testImageNetBinary.cpp 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160
  1. /**
  2. * @file testImageNetBinary.cpp
  3. * @brief perform ImageNet tests with binary tasks for OCC
  4. * @author Alexander Lütz
  5. * @date 23-05-2012 (dd-mm-yyyy)
  6. */
  7. #include <iostream>
  8. #include "core/basics/Config.h"
  9. #ifdef NICE_USELIB_MATIO
  10. #include "vislearning/cbaselib/ClassificationResults.h"
  11. #include "vislearning/baselib/ProgressBar.h"
  12. #include "core/matlabAccess/MatFileIO.h"
  13. #include "vislearning/matlabAccessHighLevel/ImageNetData.h"
  14. #include "vislearning/classifier/kernelclassifier/KCGPOneClass.h"
  15. #include "vislearning/classifier/kernelclassifier/KCGPApproxOneClass.h"
  16. #include "vislearning/math/kernels/KernelData.h"
  17. #include "vislearning/math/kernels/Kernel.h"
  18. #include "vislearning/math/kernels/KernelRBF.h"
  19. #include "vislearning/math/kernels/KernelExp.h"
  20. // #include "fast-hik/tools.h"
  21. using namespace std;
  22. using namespace NICE;
  23. using namespace OBJREC;
  24. /**
  25. test the basic functionality of fast-hik hyperparameter optimization
  26. */
  27. int main (int argc, char **argv)
  28. {
  29. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  30. Config conf ( argc, argv );
  31. string resultsfile = conf.gS("main", "results", "results.txt" );
  32. int positiveClass = conf.gI("main", "positive_class");
  33. std::cerr << "Positive class is " << positiveClass << std::endl;
  34. sparse_t data;
  35. NICE::Vector y;
  36. std::cerr << "Reading ImageNet data ..." << std::endl;
  37. bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
  38. string imageNetPath;
  39. if (imageNetLocal)
  40. imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
  41. else
  42. imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/";
  43. ImageNetData imageNet ( imageNetPath + "demo/" );
  44. // imageNet.getBatchData ( data, y, "train", "training" );
  45. LabeledSetVector train;
  46. imageNet.loadDataAsLabeledSetVector( train );
  47. //set up the kernel function
  48. double rbf_sigma = conf.gD("main", "rbf_sigma", -2.0 );
  49. KernelRBF kernelFunction ( rbf_sigma, 0.0 );
  50. //KernelExp kernelFunction ( rbf_sigma, 0.0, 0.0 );
  51. //set up our OC-classifier
  52. string classifierName = conf.gS("main", "classifier", "KCGPApproxOneClass");
  53. KernelClassifier *classifier;
  54. if(strcmp("KCGPApproxOneClass",classifierName.c_str())==0)
  55. {
  56. classifier = new KCGPApproxOneClass ( &conf, &kernelFunction );
  57. }
  58. else if (strcmp("KCGPOneClass",classifierName.c_str())==0) {
  59. classifier = new KCGPOneClass ( &conf, &kernelFunction );
  60. }
  61. else{ //default
  62. classifier = new KCGPApproxOneClass ( &conf, &kernelFunction );
  63. }
  64. //and perform the training
  65. classifier->teach( train );
  66. // uint n = y.size();
  67. //
  68. // set<int> positives;
  69. // set<int> negatives;
  70. //
  71. // map< int, set<int> > mysets;
  72. // for ( uint i = 0 ; i < n; i++ )
  73. // mysets[ y[i] ].insert ( i );
  74. //
  75. // if ( mysets[ positiveClass ].size() == 0 )
  76. // fthrow(Exception, "Class " << positiveClass << " is not available.");
  77. //
  78. // // add our positive examples
  79. // for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
  80. // positives.insert ( *i );
  81. //
  82. // int Nneg = conf.gI("main", "nneg", 1 );
  83. // for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
  84. // {
  85. // int classno = k->first;
  86. // if ( classno == positiveClass )
  87. // continue;
  88. // const set<int> & s = k->second;
  89. // uint ind = 0;
  90. // for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++ )
  91. // negatives.insert ( *i );
  92. // }
  93. // std::cerr << "Number of positive examples: " << positives.size() << std::endl;
  94. // std::cerr << "Number of negative examples: " << negatives.size() << std::endl;
  95. // ------------------------------ TESTING ------------------------------
  96. std::cerr << "Reading ImageNet test data files (takes some seconds)..." << std::endl;
  97. imageNet.preloadData ( "val", "testing" );
  98. imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
  99. ClassificationResults results;
  100. std::cerr << "Classification step ... with " << imageNet.getNumPreloadedExamples() << " examples" << std::endl;
  101. ProgressBar pb;
  102. for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
  103. {
  104. pb.update ( imageNet.getNumPreloadedExamples() );
  105. const SparseVector & svec = imageNet.getPreloadedExample ( i );
  106. NICE::Vector vec;
  107. svec.convertToVectorT( vec );
  108. // classification step
  109. ClassificationResult r = classifier->classify ( vec );
  110. // set ground truth label
  111. r.classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
  112. results.push_back ( r );
  113. }
  114. std::cerr << "Writing results to " << resultsfile << std::endl;
  115. results.writeWEKA ( resultsfile, 0 );
  116. double perfvalue = results.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
  117. std::cerr << "Performance: " << perfvalue << std::endl;
  118. //don't waste memory
  119. delete classifier;
  120. return 0;
  121. }
  122. #else
  123. int main (int argc, char **argv)
  124. {
  125. std::cerr << "MatIO library is missing in your system - this program will have no effect. " << std::endl;
  126. }
  127. #endif