testImageNetBinary.cpp 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. /**
  2. * @file testImageNetBinary.cpp
  3. * @brief perform ImageNet tests with binary classification
  4. * @author Erik Rodner
  5. * @date 01/04/2012
  6. */
  7. #include <core/basics/Config.h>
  8. #ifdef NICE_USELIB_MATIO
  9. #include <core/matlabAccess/MatFileIO.h>
  10. //----------
  11. #include <vislearning/cbaselib/ClassificationResults.h>
  12. #include <vislearning/baselib/ProgressBar.h>
  13. #include <vislearning/matlabAccessHighLevel/ImageNetData.h>
  14. //----------
  15. #include <gp-hik-core/FMKGPHyperparameterOptimization.h>
  16. #include <gp-hik-core/parameterizedFunctions/PFAbsExp.h>
  17. #include <gp-hik-core/parameterizedFunctions/PFExp.h>
  18. #include <gp-hik-core/parameterizedFunctions/PFWeightedDim.h>
  19. #include <gp-hik-core/tools.h>
  20. using namespace std;
  21. using namespace NICE;
  22. using namespace OBJREC;
  23. /**
  24. test the basic functionality of fast-hik hyperparameter optimization
  25. */
  26. int main (int argc, char **argv)
  27. {
  28. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  29. Config conf ( argc, argv );
  30. string resultsfile = conf.gS("main", "results", "results.txt" );
  31. int positiveClass = conf.gI("main", "positive_class");
  32. cerr << "Positive class is " << positiveClass << endl;
  33. conf.sD( "FMKGPHyperparameterOptimization", "parameter_upper_bound", 5.0 );
  34. conf.sD( "FMKGPHyperparameterOptimization", "parameter_lower_bound", 1.0 );
  35. std::string pf_s = conf.gS("main", "transform", "absexp");
  36. if ( pf_s == "absexp" )
  37. conf.sS( "FMKGPHyperparameterOptimization", "transform", "absexp" );
  38. else if ( pf_s == "exp" )
  39. conf.sS( "FMKGPHyperparameterOptimization", "transform", "exp" );
  40. else if ( pf_s == "weighted" )
  41. {
  42. conf.sS( "FMKGPHyperparameterOptimization", "transform", "weightedDim" );
  43. conf.sI( "FMKGPHyperparameterOptimization", "pf_dim", conf.gI("main", "dimension") );
  44. }
  45. else
  46. fthrow(Exception, "Parameterized function type " << pf_s << " not yet implemented");
  47. std::cerr << "Transformation type: " << pf_s << std::endl;
  48. double noise = conf.gD("GPHIKClassifier", "noise", 0.1);
  49. FMKGPHyperparameterOptimization hypopt ( &conf );
  50. sparse_t data;
  51. NICE::Vector y;
  52. cerr << "Reading ImageNet data ..." << endl;
  53. bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
  54. string imageNetPath;
  55. if (imageNetLocal)
  56. imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
  57. else
  58. imageNetPath = "/home/dbv/datasets/ilsvrc2010/devkit-1.0/"; // /home/dbv/bilder/imagenet/devkit-1.0/";
  59. ImageNetData imageNet ( imageNetPath + "demo/" );
  60. imageNet.getBatchData ( data, y, "train", "training" );
  61. uint n = y.size();
  62. cerr << "Performing hyperparameter optimization ... " << endl;
  63. set<int> positives;
  64. set<int> negatives;
  65. map< int, set<int> > mysets;
  66. for ( uint i = 0 ; i < n; i++ )
  67. mysets[ y[i] ].insert ( i );
  68. if ( mysets[ positiveClass ].size() == 0 )
  69. fthrow(Exception, "Class " << positiveClass << " is not available.");
  70. // add our positive examples
  71. for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
  72. positives.insert ( *i );
  73. int Nneg = conf.gI("main", "nneg", 1 );
  74. for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
  75. {
  76. int classno = k->first;
  77. if ( classno == positiveClass )
  78. continue;
  79. const set<int> & s = k->second;
  80. uint ind = 0;
  81. for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++ )
  82. negatives.insert ( *i );
  83. }
  84. cerr << "Number of positive examples: " << positives.size() << endl;
  85. cerr << "Number of negative examples: " << negatives.size() << endl;
  86. std::cerr << "hypopt.optimize( data, y, positives, negatives ) " << std::endl;
  87. hypopt.optimizeBinary ( data, y, positives, negatives, noise );
  88. // ------------------------------ TESTING ------------------------------
  89. cerr << "Reading ImageNet test data files (takes some seconds)..." << endl;
  90. imageNet.preloadData ( "val", "testing" );
  91. imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
  92. ClassificationResults results;
  93. cerr << "Classification step ... with " << imageNet.getNumPreloadedExamples() << " examples" << endl;
  94. ProgressBar pb;
  95. for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
  96. {
  97. pb.update ( imageNet.getNumPreloadedExamples() );
  98. const SparseVector & svec = imageNet.getPreloadedExample ( i );
  99. SparseVector scores;
  100. // classification step
  101. int classno = hypopt.classify ( svec, scores );
  102. // building the result
  103. ClassificationResult r ( classno, scores );
  104. // set ground truth label
  105. r.classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
  106. results.push_back ( r );
  107. }
  108. cerr << "Writing results to " << resultsfile << endl;
  109. results.writeWEKA ( resultsfile, 0 );
  110. double perfvalue = results.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
  111. cerr << "Performance: " << perfvalue << endl;
  112. return 0;
  113. }
  114. #else
  115. int main (int argc, char **argv)
  116. {
  117. std::cerr << "MatIO library is missing in your system - this program will have no effect. " << std::endl;
  118. }
  119. #endif