testImageNetBinary.cpp 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141
  1. /**
  2. * @file testImageNetBinary.cpp
  3. * @brief perform ImageNet tests with binary classification
  4. * @author Erik Rodner
  5. * @date 01/04/2012
  6. */
  7. #include <core/basics/Config.h>
  8. #include <core/matlabAccess/MatFileIO.h>
  9. //----------
  10. #include <vislearning/cbaselib/ClassificationResults.h>
  11. #include <vislearning/baselib/ProgressBar.h>
  12. #include <vislearning/matlabAccessHighLevel/ImageNetData.h>
  13. //----------
  14. #include <gp-hik-core/FMKGPHyperparameterOptimization.h>
  15. #include <gp-hik-core/parameterizedFunctions/PFAbsExp.h>
  16. #include <gp-hik-core/parameterizedFunctions/PFExp.h>
  17. #include <gp-hik-core/parameterizedFunctions/PFWeightedDim.h>
  18. #include <gp-hik-core/tools.h>
  19. using namespace std;
  20. using namespace NICE;
  21. using namespace OBJREC;
  22. /**
  23. test the basic functionality of fast-hik hyperparameter optimization
  24. */
  25. int main (int argc, char **argv)
  26. {
  27. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  28. Config conf ( argc, argv );
  29. string resultsfile = conf.gS("main", "results", "results.txt" );
  30. int positiveClass = conf.gI("main", "positive_class");
  31. cerr << "Positive class is " << positiveClass << endl;
  32. ParameterizedFunction *pf;
  33. string pf_s = conf.gS("main", "transform", "absexp");
  34. if ( pf_s == "absexp" )
  35. pf = new PFAbsExp( 1.0 );
  36. else if ( pf_s == "exp" )
  37. pf = new PFExp ( 1.0 );
  38. else if ( pf_s == "weighted" )
  39. pf = new PFWeightedDim ( conf.gI("main", "dimension"), 0.0, 5.0 );
  40. else
  41. fthrow(Exception, "Parameterized function type " << pf_s << " not yet implemented");
  42. double noise = conf.gD("GPHIKClassifier", "noise", 0.1);
  43. FMKGPHyperparameterOptimization hypopt ( &conf, pf );
  44. sparse_t data;
  45. NICE::Vector y;
  46. cerr << "Reading ImageNet data ..." << endl;
  47. bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
  48. string imageNetPath;
  49. if (imageNetLocal)
  50. imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
  51. else
  52. imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/";
  53. ImageNetData imageNet ( imageNetPath + "demo/" );
  54. imageNet.getBatchData ( data, y, "train", "training" );
  55. uint n = y.size();
  56. cerr << "Performing hyperparameter optimization ... " << endl;
  57. set<int> positives;
  58. set<int> negatives;
  59. map< int, set<int> > mysets;
  60. for ( uint i = 0 ; i < n; i++ )
  61. mysets[ y[i] ].insert ( i );
  62. if ( mysets[ positiveClass ].size() == 0 )
  63. fthrow(Exception, "Class " << positiveClass << " is not available.");
  64. // add our positive examples
  65. for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
  66. positives.insert ( *i );
  67. int Nneg = conf.gI("main", "nneg", 1 );
  68. for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
  69. {
  70. int classno = k->first;
  71. if ( classno == positiveClass )
  72. continue;
  73. const set<int> & s = k->second;
  74. uint ind = 0;
  75. for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++ )
  76. negatives.insert ( *i );
  77. }
  78. cerr << "Number of positive examples: " << positives.size() << endl;
  79. cerr << "Number of negative examples: " << negatives.size() << endl;
  80. std::cerr << "hypopt.optimize( data, y, positives, negatives ) " << std::endl;
  81. hypopt.optimizeBinary ( data, y, positives, negatives, noise );
  82. // ------------------------------ TESTING ------------------------------
  83. cerr << "Reading ImageNet test data files (takes some seconds)..." << endl;
  84. imageNet.preloadData ( "val", "testing" );
  85. imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
  86. ClassificationResults results;
  87. cerr << "Classification step ... with " << imageNet.getNumPreloadedExamples() << " examples" << endl;
  88. ProgressBar pb;
  89. for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
  90. {
  91. pb.update ( imageNet.getNumPreloadedExamples() );
  92. const SparseVector & svec = imageNet.getPreloadedExample ( i );
  93. SparseVector scores;
  94. // classification step
  95. int classno = hypopt.classify ( svec, scores );
  96. // building the result
  97. ClassificationResult r ( classno, scores );
  98. // set ground truth label
  99. r.classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
  100. results.push_back ( r );
  101. }
  102. cerr << "Writing results to " << resultsfile << endl;
  103. results.writeWEKA ( resultsfile, 0 );
  104. double perfvalue = results.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
  105. cerr << "Performance: " << perfvalue << endl;
  106. delete pf;
  107. return 0;
  108. }