00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00076 #include <vector>
00077 #include <string>
00078 #include <map>
00079 #include <iostream>
00080
00081 #include "Defaults.h"
00082 #include "Utils/Args.h"
00083
00084 #include "AdaBoostMHLearner.h"
00085 #include "Classifier.h"
00086 #include "WeakLearners/BaseLearner.h"
00087 #include "IO/ClassMappings.h"
00088
00089 using namespace std;
00090 using namespace MultiBoost;
00091
00094
00099 void showBase()
00100 {
00101 cout << "MultiBoost. An obvious name for a multi-class AdaBoost learner." << endl;
00102 cout << "----------------------------------------------------------------" << endl;
00103 cout << "Build: " << __DATE__ << " (" << __TIME__ << ") (C) Norman Casagrande 2005" << endl << endl;
00104 cout << "===> Type -help for help or -static to show the static options" << endl;
00105
00106 exit(0);
00107 }
00108
00109
00110
00115 void showHelp(nor_utils::Args& args, const vector<string>& learnersList)
00116 {
00117 cout << "MultiBoost. An obvious name for a multi-class AdaBoost learner." << endl;
00118 cout << "------------------------ HELP SECTION --------------------------" << endl;
00119
00120 args.printGroup("Parameters");
00121
00122 cout << endl;
00123 cout << "For specific help options type:" << endl;
00124 cout << " -h general: General options" << endl;
00125 cout << " -h io: I/O options" << endl;
00126 cout << " -h algo: Basic algorithm options" << endl;
00127
00128 cout << endl;
00129 cout << "For weak learners specific options type:" << endl;
00130
00131 vector<string>::const_iterator it;
00132 for (it = learnersList.begin(); it != learnersList.end(); ++it)
00133 cout << " -h " << *it << endl;
00134
00135 exit(0);
00136 }
00137
00138
00139
00145 void showOptionalHelp(nor_utils::Args& args)
00146 {
00147 string helpType;
00148 args.getValue("-h", 0, helpType);
00149
00150 cout << "MultiBoost. An obvious name for a multi-class AdaBoost learner." << endl;
00151 cout << "----------------------------------------------------------------" << endl;
00152
00153 if (helpType == "general")
00154 args.printGroup("General Options");
00155 else if (helpType == "io")
00156 args.printGroup("I/O Options");
00157 else if (helpType == "algo")
00158 args.printGroup("Basic Algorithm Options");
00159 else if ( BaseLearner::RegisteredLearners().hasLearner(helpType) )
00160 args.printGroup(helpType + " Options");
00161 else
00162 cerr << "ERROR: Unknown help section <" << helpType << ">" << endl;
00163 }
00164
00165
00166
00171 void showStaticConfig()
00172 {
00173 cout << "MultiBoost. An obvious name for a multi-class AdaBoost learner." << endl;
00174 cout << "------------------------ STATIC CONFIG -------------------------" << endl;
00175
00176 cout << "- Sort type = ";
00177 #if CONSERVATIVE_SORT
00178 cout << "CONSERVATIVE (slow)" << endl;
00179 #else
00180 cout << "NON CONSERVATIVE (fast)" << endl;
00181 #endif
00182
00183 cout << "Comment: " << COMMENT << endl;
00184 #ifndef NDEBUG
00185 cout << "Important: NDEBUG not active!!" << endl;
00186 #endif
00187
00188 #if MB_DEBUG
00189 cout << "MultiBoost debug active (MB_DEBUG=1)!!" << endl;
00190 #endif
00191
00192 exit(0);
00193 }
00194
00195
00196
00197
00204 int main(int argc, char* argv[])
00205 {
00207
00208 nor_utils::Args args;
00209
00210 args.declareArgument("-help");
00211 args.declareArgument("-static");
00212
00213 args.declareArgument("-h", "Help", 1, "<optiongroup>");
00214
00216
00217
00218 args.setGroup("Parameters");
00219
00220 args.declareArgument("-train", "Performs training.", 2, "<dataFile> <nInterations>");
00221 args.declareArgument("-traintest", "Performs training and test at the same time.", 3, "<trainingDataFile> <testDataFile> <nInterations>");
00222 args.declareArgument("-test", "Test the model.", 2, "<dataFile> <shypFile>");
00223
00225
00226
00227 args.setGroup("General Options");
00228
00229 args.declareArgument("-verbose", "Set the verbose level 0, 1 or 2 (0=no messages, 1=default, 2=all messages).", 1, "<val>");
00230 args.declareArgument("-outputinfo", "Output informations on the algorithm performances during training, on file <filename>.", 1, "<filename>");
00231
00232 string learnersComment = "Available learners are:";
00233
00234 vector<string> learnersList;
00235 BaseLearner::RegisteredLearners().getList(learnersList);
00236 vector<string>::const_iterator it;
00237 for (it = learnersList.begin(); it != learnersList.end(); ++it)
00238 {
00239 learnersComment += "\n ** " + *it;
00240
00241 if ( *it == defaultLearner )
00242 learnersComment += " (DEFAULT)";
00243 }
00244
00245 args.declareArgument("-learnertype", "Change the type of weak learner. " + learnersComment, 1, "<learner>");
00246
00247 args.setGroup("I/O Options");
00248
00249 args.declareArgument("-classend", "The class is the last column instead of the first (or second if -hasfilename is active).");
00250 args.declareArgument("-classmap", "Specify a file with the list of classes. Use it when the test set has less classes than the training set.", 1, "<filename>");
00251 args.declareArgument("-hasfilename", "The data file has an additional column (the very first) which contains the filename.");
00252 args.declareArgument("-shypname", "The name of output strong hypothesis (default: " + string(SHYP_NAME) + "." + string(SHYP_EXTENSION) + ").", 1, "<filename>");
00253
00254 args.setGroup("Basic Algorithm Options");
00255
00256 args.declareArgument("-edgeoffset", "Defines the value of the edge offset (theta) (default: no edge offset).", 1, "<val>");
00257
00260 for (it = learnersList.begin(); it != learnersList.end(); ++it)
00261 {
00262 args.setGroup(*it + " Options");
00263
00264 BaseLearner::RegisteredLearners().getLearner(*it)->declareArguments(args);
00265 }
00266
00269
00270 switch ( args.readArguments(argc, argv) )
00271 {
00272 case nor_utils::AOT_NO_ARGUMENTS:
00273 showBase();
00274 break;
00275
00276 case nor_utils::AOT_UNKOWN_ARGUMENT:
00277 exit(1);
00278 break;
00279
00280 case nor_utils::AOT_TOO_FEW_VALUES:
00281 showHelp(args, learnersList);
00282 break;
00283 }
00284
00287
00288 if ( args.hasArgument("-help") )
00289 showHelp(args, learnersList);
00290 if ( args.hasArgument("-static") )
00291 showStaticConfig();
00292
00295
00296 if ( args.hasArgument("-h") )
00297 showOptionalHelp(args);
00298
00301
00302 int verbose = 1;
00303
00304 if ( args.hasArgument("-verbose") )
00305 args.getValue("-verbose", 0, verbose);
00306
00307 if (args.hasArgument("-classmap") )
00308 {
00309 string classMapName;
00310 args.getValue("-classmap", 0, classMapName);
00311 ClassMappings::loadClassMapFile(classMapName);
00312 }
00313
00316 if ( args.hasArgument("-train") )
00317 {
00318
00319 string trainFileName;
00320 args.getValue("-train", 0, trainFileName);
00321
00322 int numInterations;
00323 args.getValue("-train", 1, numInterations);
00324
00325 AdaBoostLearner learner(args, verbose);
00326 learner.run(numInterations, trainFileName);
00327 }
00330 else if ( args.hasArgument("-traintest") )
00331 {
00332
00333 string trainFileName;
00334 args.getValue("-traintest", 0, trainFileName);
00335
00336 string testFileName;
00337 args.getValue("-traintest", 1, testFileName);
00338
00339 int numInterations;
00340 args.getValue("-traintest", 2, numInterations);
00341
00342 AdaBoostLearner learner(args, verbose);
00343 learner.run(numInterations, trainFileName, testFileName);
00344 }
00347 else if ( args.hasArgument("-test") )
00348 {
00349
00350 string testFileName;
00351 args.getValue("-test", 0, testFileName);
00352
00353 string shypFile;
00354 args.getValue("-test", 1, shypFile);
00355
00356 Classifier classifier(args, verbose);
00357 classifier.run(testFileName, shypFile);
00358 }
00359
00360 return 0;
00361 }
00362