saveImageNetBinary.cpp 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149
  1. /**
  2. * @file testImageNetBinary.cpp
  3. * @brief perform ImageNet tests with binary classification
  4. * @author Erik Rodner
  5. * @date 01/04/2012
  6. */
  7. #include <core/basics/Config.h>
  8. #include <core/matlabAccess/MatFileIO.h>
  9. //----------
  10. #include <vislearning/cbaselib/ClassificationResults.h>
  11. #include <vislearning/baselib/ProgressBar.h>
  12. #include <vislearning/matlabAccessHighLevel/ImageNetData.h>
  13. //----------
  14. #include <gp-hik-core/FeatureMatrixT.h>
  15. #include <gp-hik-core/tools.h>
  16. using namespace std;
  17. using namespace NICE;
  18. using namespace OBJREC;
  19. /**
  20. test the basic functionality of fast-hik hyperparameter optimization
  21. */
  22. int main (int argc, char **argv)
  23. {
  24. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  25. Config conf ( argc, argv );
  26. string resultsfile = conf.gS("main", "results", "results.txt" );
  27. int positiveClass = conf.gI("main", "positive_class");
  28. cerr << "Positive class is " << positiveClass << endl;
  29. sparse_t data;
  30. NICE::Vector yl;
  31. cerr << "Reading ImageNet data ..." << endl;
  32. bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
  33. string imageNetPath;
  34. if (imageNetLocal)
  35. imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
  36. else
  37. imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/";
  38. ImageNetData imageNet ( imageNetPath + "demo/" );
  39. imageNet.getBatchData ( data, yl, "train", "training" );
  40. uint n = yl.size();
  41. cerr << "Performing hyperparameter optimization ... " << endl;
  42. set<int> positives;
  43. set<int> negatives;
  44. map< int, set<int> > mysets;
  45. for ( uint i = 0 ; i < n; i++ )
  46. mysets[ yl[i] ].insert ( i );
  47. if ( mysets[ positiveClass ].size() == 0 )
  48. fthrow(Exception, "Class " << positiveClass << " is not available.");
  49. // add our positive examples
  50. for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
  51. positives.insert ( *i );
  52. int Nneg = conf.gI("main", "nneg", 1 );
  53. for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
  54. {
  55. int classno = k->first;
  56. if ( classno == positiveClass )
  57. continue;
  58. const set<int> & s = k->second;
  59. uint ind = 0;
  60. for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++ )
  61. negatives.insert ( *i );
  62. }
  63. cerr << "Number of positive examples: " << positives.size() << endl;
  64. cerr << "Number of negative examples: " << negatives.size() << endl;
  65. map<int, int> examples;
  66. Vector y ( yl.size() );
  67. int ind = 0;
  68. for ( uint i = 0 ; i < yl.size(); i++ )
  69. {
  70. if (positives.find(i) != positives.end()) {
  71. y[ examples.size() ] = 1.0;
  72. examples.insert( pair<int, int> ( i, ind ) );
  73. ind++;
  74. } else if ( negatives.find(i) != negatives.end() ) {
  75. y[ examples.size() ] = -1.0;
  76. examples.insert( pair<int, int> ( i, ind ) );
  77. ind++;
  78. }
  79. }
  80. y.resize( examples.size() );
  81. cerr << "Examples: " << examples.size() << endl;
  82. cerr << "Putting everything in a feature matrix structure ..." << endl;
  83. FeatureMatrixT<double> fm ( data, examples, 1000 );
  84. cerr << "Writing file ..." << endl;
  85. ofstream ofs ( "train.txt", ios::out );
  86. if ( !ofs.good() )
  87. fthrow(Exception, "Unable to write to train.txt" );
  88. // writing features
  89. for ( uint i = 0 ; i < fm.get_n(); i++ )
  90. {
  91. ofs << (y[i] == 1.0) ? 1 : 0;
  92. for ( uint k = 0 ; k < fm.get_d(); k++ )
  93. {
  94. double val = fm(k,i);
  95. if ( val != 0 )
  96. {
  97. ofs << " " << k+1 << ":" << val;
  98. }
  99. }
  100. ofs << endl;
  101. }
  102. ofs.close();
  103. // ------------------------------ TESTING ------------------------------
  104. cerr << "Reading ImageNet test data files (takes some seconds)..." << endl;
  105. imageNet.preloadData ( "val", "testing" );
  106. imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
  107. ofstream ofs_test ( "test.txt", ios::out );
  108. if ( !ofs_test.good() )
  109. fthrow(Exception, "Unable to write to test.txt" );
  110. for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
  111. {
  112. const SparseVector & svec = imageNet.getPreloadedExample ( i );
  113. int classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
  114. ofs_test << ( classno_groundtruth );
  115. for ( SparseVector::const_iterator k = svec.begin(); k != svec.end(); k++ )
  116. ofs_test << " " << k->first+1 << ":" << k->second;
  117. ofs_test << endl;
  118. }
  119. ofs_test.close();
  120. return 0;
  121. }