saveImageNetBinary.cpp 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. /**
  2. * @file testImageNetBinary.cpp
  3. * @brief perform ImageNet tests with binary classification
  4. * @author Erik Rodner
  5. * @date 01/04/2012
  6. */
  7. #include <core/basics/Config.h>
  8. #ifdef NICE_USELIB_MATIO
  9. #include <core/matlabAccess/MatFileIO.h>
  10. //----------
  11. #include <vislearning/cbaselib/ClassificationResults.h>
  12. #include <vislearning/baselib/ProgressBar.h>
  13. #include <vislearning/matlabAccessHighLevel/ImageNetData.h>
  14. //----------
  15. #include <gp-hik-core/FeatureMatrixT.h>
  16. #include <gp-hik-core/tools.h>
  17. using namespace std;
  18. using namespace NICE;
  19. using namespace OBJREC;
  20. /**
  21. test the basic functionality of fast-hik hyperparameter optimization
  22. */
  23. int main (int argc, char **argv)
  24. {
  25. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  26. Config conf ( argc, argv );
  27. string resultsfile = conf.gS("main", "results", "results.txt" );
  28. int positiveClass = conf.gI("main", "positive_class");
  29. cerr << "Positive class is " << positiveClass << endl;
  30. sparse_t data;
  31. NICE::Vector yl;
  32. cerr << "Reading ImageNet data ..." << endl;
  33. bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
  34. string imageNetPath;
  35. if (imageNetLocal)
  36. imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
  37. else
  38. imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/";
  39. ImageNetData imageNet ( imageNetPath + "demo/" );
  40. imageNet.getBatchData ( data, yl, "train", "training" );
  41. uint n = yl.size();
  42. cerr << "Performing hyperparameter optimization ... " << endl;
  43. set<int> positives;
  44. set<int> negatives;
  45. map< int, set<int> > mysets;
  46. for ( uint i = 0 ; i < n; i++ )
  47. mysets[ yl[i] ].insert ( i );
  48. if ( mysets[ positiveClass ].size() == 0 )
  49. fthrow(Exception, "Class " << positiveClass << " is not available.");
  50. // add our positive examples
  51. for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
  52. positives.insert ( *i );
  53. int Nneg = conf.gI("main", "nneg", 1 );
  54. for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
  55. {
  56. int classno = k->first;
  57. if ( classno == positiveClass )
  58. continue;
  59. const set<int> & s = k->second;
  60. uint ind = 0;
  61. for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++ )
  62. negatives.insert ( *i );
  63. }
  64. cerr << "Number of positive examples: " << positives.size() << endl;
  65. cerr << "Number of negative examples: " << negatives.size() << endl;
  66. map<int, int> examples;
  67. Vector y ( yl.size() );
  68. int ind = 0;
  69. for ( uint i = 0 ; i < yl.size(); i++ )
  70. {
  71. if (positives.find(i) != positives.end()) {
  72. y[ examples.size() ] = 1.0;
  73. examples.insert( pair<int, int> ( i, ind ) );
  74. ind++;
  75. } else if ( negatives.find(i) != negatives.end() ) {
  76. y[ examples.size() ] = -1.0;
  77. examples.insert( pair<int, int> ( i, ind ) );
  78. ind++;
  79. }
  80. }
  81. y.resize( examples.size() );
  82. cerr << "Examples: " << examples.size() << endl;
  83. cerr << "Putting everything in a feature matrix structure ..." << endl;
  84. FeatureMatrixT<double> fm ( data, examples, 1000 );
  85. cerr << "Writing file ..." << endl;
  86. ofstream ofs ( "train.txt", ios::out );
  87. if ( !ofs.good() )
  88. fthrow(Exception, "Unable to write to train.txt" );
  89. // writing features
  90. for ( uint i = 0 ; i < fm.get_n(); i++ )
  91. {
  92. ofs << (y[i] == 1.0) ? 1 : 0;
  93. for ( uint k = 0 ; k < fm.get_d(); k++ )
  94. {
  95. double val = fm(k,i);
  96. if ( val != 0 )
  97. {
  98. ofs << " " << k+1 << ":" << val;
  99. }
  100. }
  101. ofs << endl;
  102. }
  103. ofs.close();
  104. // ------------------------------ TESTING ------------------------------
  105. cerr << "Reading ImageNet test data files (takes some seconds)..." << endl;
  106. imageNet.preloadData ( "val", "testing" );
  107. imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
  108. ofstream ofs_test ( "test.txt", ios::out );
  109. if ( !ofs_test.good() )
  110. fthrow(Exception, "Unable to write to test.txt" );
  111. for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
  112. {
  113. const SparseVector & svec = imageNet.getPreloadedExample ( i );
  114. int classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
  115. ofs_test << ( classno_groundtruth );
  116. for ( SparseVector::const_iterator k = svec.begin(); k != svec.end(); k++ )
  117. ofs_test << " " << k->first+1 << ":" << k->second;
  118. ofs_test << endl;
  119. }
  120. ofs_test.close();
  121. return 0;
  122. }
  123. #else
  124. int main (int argc, char **argv)
  125. {
  126. std::cerr << "MatIO library is missing in your system - this program will have no effect. " << std::endl;
  127. }
  128. #endif