testImageNetBinary.cpp 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. /**
  2. * @file testImageNetBinary.cpp
  3. * @brief perform ImageNet tests with binary classification
  4. * @author Erik Rodner
  5. * @date 01/04/2012
  6. */
  7. #include <core/basics/Config.h>
  8. #ifdef NICE_USELIB_MATIO
  9. #include <core/matlabAccess/MatFileIO.h>
  10. //----------
  11. #include <vislearning/cbaselib/ClassificationResults.h>
  12. #include <vislearning/baselib/ProgressBar.h>
  13. #include <vislearning/matlabAccessHighLevel/ImageNetData.h>
  14. //----------
  15. #include <gp-hik-core/FMKGPHyperparameterOptimization.h>
  16. #include <gp-hik-core/parameterizedFunctions/PFAbsExp.h>
  17. #include <gp-hik-core/parameterizedFunctions/PFExp.h>
  18. #include <gp-hik-core/parameterizedFunctions/PFWeightedDim.h>
  19. #include <gp-hik-core/tools.h>
  20. using namespace std;
  21. using namespace NICE;
  22. using namespace OBJREC;
  23. /**
  24. test the basic functionality of fast-hik hyperparameter optimization
  25. */
  26. int main (int argc, char **argv)
  27. {
  28. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  29. Config conf ( argc, argv );
  30. string resultsfile = conf.gS("main", "results", "results.txt" );
  31. int positiveClass = conf.gI("main", "positive_class");
  32. cerr << "Positive class is " << positiveClass << endl;
  33. ParameterizedFunction *pf;
  34. string pf_s = conf.gS("main", "transform", "absexp");
  35. if ( pf_s == "absexp" )
  36. pf = new PFAbsExp( 1.0 );
  37. else if ( pf_s == "exp" )
  38. pf = new PFExp ( 1.0 );
  39. else if ( pf_s == "weighted" )
  40. pf = new PFWeightedDim ( conf.gI("main", "dimension"), 0.0, 5.0 );
  41. else
  42. fthrow(Exception, "Parameterized function type " << pf_s << " not yet implemented");
  43. double noise = conf.gD("GPHIKClassifier", "noise", 0.1);
  44. FMKGPHyperparameterOptimization hypopt ( &conf, pf );
  45. sparse_t data;
  46. NICE::Vector y;
  47. cerr << "Reading ImageNet data ..." << endl;
  48. bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
  49. string imageNetPath;
  50. if (imageNetLocal)
  51. imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
  52. else
  53. imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/";
  54. ImageNetData imageNet ( imageNetPath + "demo/" );
  55. imageNet.getBatchData ( data, y, "train", "training" );
  56. uint n = y.size();
  57. cerr << "Performing hyperparameter optimization ... " << endl;
  58. set<int> positives;
  59. set<int> negatives;
  60. map< int, set<int> > mysets;
  61. for ( uint i = 0 ; i < n; i++ )
  62. mysets[ y[i] ].insert ( i );
  63. if ( mysets[ positiveClass ].size() == 0 )
  64. fthrow(Exception, "Class " << positiveClass << " is not available.");
  65. // add our positive examples
  66. for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
  67. positives.insert ( *i );
  68. int Nneg = conf.gI("main", "nneg", 1 );
  69. for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
  70. {
  71. int classno = k->first;
  72. if ( classno == positiveClass )
  73. continue;
  74. const set<int> & s = k->second;
  75. uint ind = 0;
  76. for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++ )
  77. negatives.insert ( *i );
  78. }
  79. cerr << "Number of positive examples: " << positives.size() << endl;
  80. cerr << "Number of negative examples: " << negatives.size() << endl;
  81. std::cerr << "hypopt.optimize( data, y, positives, negatives ) " << std::endl;
  82. hypopt.optimizeBinary ( data, y, positives, negatives, noise );
  83. // ------------------------------ TESTING ------------------------------
  84. cerr << "Reading ImageNet test data files (takes some seconds)..." << endl;
  85. imageNet.preloadData ( "val", "testing" );
  86. imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
  87. ClassificationResults results;
  88. cerr << "Classification step ... with " << imageNet.getNumPreloadedExamples() << " examples" << endl;
  89. ProgressBar pb;
  90. for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
  91. {
  92. pb.update ( imageNet.getNumPreloadedExamples() );
  93. const SparseVector & svec = imageNet.getPreloadedExample ( i );
  94. SparseVector scores;
  95. // classification step
  96. int classno = hypopt.classify ( svec, scores );
  97. // building the result
  98. ClassificationResult r ( classno, scores );
  99. // set ground truth label
  100. r.classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
  101. results.push_back ( r );
  102. }
  103. cerr << "Writing results to " << resultsfile << endl;
  104. results.writeWEKA ( resultsfile, 0 );
  105. double perfvalue = results.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
  106. cerr << "Performance: " << perfvalue << endl;
  107. delete pf;
  108. return 0;
  109. }
  110. #else
  111. int main (int argc, char **argv)
  112. {
  113. std::cerr << "MatIO library is missing in your system - this program will have no effect. " << std::endl;
  114. }
  115. #endif