瀏覽代碼

prepared everything for ImageNetTests of GP-Approximation, only training is missing

Alexander Luetz 13 年之前
父節點
當前提交
6d685edb44
共有 4 個文件被更改,包括 169 次插入5 次删除
  1. 1 1
      classifier/kernelclassifier/KCGPApproxOneClass.h
  2. 13 0
      progs/ImagenetBinary.conf
  3. 8 4
      progs/libdepend.inc
  4. 147 0
      progs/testImageNetBinary.cpp

+ 1 - 1
classifier/kernelclassifier/KCGPApproxOneClass.h

@@ -59,7 +59,7 @@ class KCGPApproxOneClass : public KernelClassifier
 
   void restore(std::istream&, int);
   void store(std::ostream&, int) const;
-  void clear();
+  void clear(){};
   
 };
 

+ 13 - 0
progs/ImagenetBinary.conf

@@ -0,0 +1,13 @@
+[main]
+positive_class = 1
+
+# whether to use eriks folder (only works on dionysos)
+imageNetLocal = false
+
+# standard setting with one negative example for each category
+# nneg = 1
+
+
+[OneClassGP]
+detection_mode = variance
+static_noise = 0.1

+ 8 - 4
progs/libdepend.inc

@@ -1,4 +1,8 @@
-$(call PKG_DEPEND_EXT,OPENMP)
-$(call PKG_DEPEND_EXT,OPENCV)
-$(call PKG_DEPEND_INT,objrec/iclassifier)
-$(call PKG_DEPEND_INT,vislearning/classifier)
+$(call PKG_DEPEND_INT,core/basics)
+$(call PKG_DEPEND_INT,vislearning/classifier/kernelclassifier)
+$(call PKG_DEPEND_INT,vislearning/math)
+$(call PKG_DEPEND_INT,vislearning/baselib)
+$(call PKG_DEPEND_INT,vislearning/cbaselib)
+$(call PKG_DEPEND_INT,fast-hik)
+
+

+ 147 - 0
progs/testImageNetBinary.cpp

@@ -0,0 +1,147 @@
+/** 
+* @file testImageNetBinary.cpp
+* @brief perform ImageNet tests with binary tasks for OCC
+* @author Alexander Lütz
+* @date 23-05-2012 (dd-mm-yyyy)
+
+*/
+#include "core/basics/Config.h"
+
+#include "vislearning/cbaselib/ClassificationResults.h"
+#include "vislearning/baselib/ProgressBar.h"
+
+#include "fast-hik/tools.h"
+#include "fast-hik/MatFileIO.h"
+#include "fast-hik/ImageNetData.h"
+
+#include "vislearning/classifier/kernelclassifier/KCGPApproxOneClass.h"
+#include "vislearning/classifier/kernelclassifier/KCGPOneClass.h"
+
+#include "vislearning/math/kernels/KernelData.h"
+#include "vislearning/math/kernels/Kernel.h"
+#include "vislearning/math/kernels/KernelRBF.h"
+#include "vislearning/math/kernels/KernelExp.h"
+
+
+using namespace std;
+using namespace NICE;
+using namespace OBJREC;
+
+
+/** 
+    test the basic functionality of fast-hik hyperparameter optimization 
+*/
+int main (int argc, char **argv)
+{   
+  std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+
+  Config conf ( argc, argv );
+  string resultsfile = conf.gS("main", "results", "results.txt" );
+  int positiveClass = conf.gI("main", "positive_class");
+
+  std::cerr << "Positive class is " << positiveClass << std::endl;
+  
+  sparse_t data;
+  NICE::Vector y;
+  
+  std::cerr << "Reading ImageNet data ..." << std::endl;
+  bool imageNetLocal = conf.gB("main", "imageNetLocal" , false);
+  string imageNetPath;
+  if (imageNetLocal)
+    imageNetPath = "/users2/rodner/data/imagenet/devkit-1.0/";
+  else
+    imageNetPath = "/home/dbv/bilder/imagenet/devkit-1.0/";
+
+  ImageNetData imageNet ( imageNetPath + "demo/" );
+
+  imageNet.getBatchData ( data, y, "train", "training" );
+  
+  //set up the kernel function
+  double rbf_sigma = conf.gD("main", "rbf_sigma", -2.0 );
+  KernelRBF kernelFunction ( rbf_sigma, 0.0 );
+    //KernelExp kernelFunction ( rbf_sigma, 0.0, 0.0 );
+
+  //set up our OC-classifier
+  string classifierName = conf.gS("main", "classifier", "KCGPApproxOneClass");
+  
+  KernelClassifier *classifier;
+  if(strcmp("KCGPApproxOneClass",classifierName.c_str())==0)
+  {
+    classifier = new KCGPApproxOneClass ( &conf, &kernelFunction );
+  }
+  else if (strcmp("KCGPOneClass",classifierName.c_str())==0) {
+    classifier = new KCGPOneClass ( &conf, &kernelFunction );
+  }
+  else{ //default
+    classifier = new KCGPApproxOneClass ( &conf, &kernelFunction );
+  }
+  //and perform the training
+  //TODO how do we get a LabelSetSelection-Object out of our matlab data struct?
+//   classifier->teach( train );    
+
+  uint n = y.size();
+  
+  set<int> positives;
+  set<int> negatives;
+
+  map< int, set<int> > mysets;
+  for ( uint i = 0 ; i < n; i++ )
+    mysets[ y[i] ].insert ( i );
+
+  if ( mysets[ positiveClass ].size() == 0 ) 
+    fthrow(Exception, "Class " << positiveClass << " is not available.");
+
+  // add our positive examples
+  for ( set<int>::const_iterator i = mysets[positiveClass].begin(); i != mysets[positiveClass].end(); i++ )
+    positives.insert ( *i );
+
+  int Nneg = conf.gI("main", "nneg", 1 );
+  for ( map<int, set<int> >::const_iterator k = mysets.begin(); k != mysets.end(); k++ )
+  {
+    int classno = k->first;
+    if ( classno == positiveClass )
+      continue;
+    const set<int> & s = k->second;
+    uint ind = 0;
+    for ( set<int>::const_iterator i = s.begin(); (i != s.end() && ind < Nneg); i++,ind++  )
+      negatives.insert ( *i );
+  }
+  std::cerr << "Number of positive examples: " << positives.size() << std::endl;
+  std::cerr << "Number of negative examples: " << negatives.size() << std::endl;
+
+  // ------------------------------ TESTING ------------------------------
+ 
+  std::cerr << "Reading ImageNet test data files (takes some seconds)..." << std::endl;
+  imageNet.preloadData ( "val", "testing" );
+  imageNet.loadExternalLabels ( imageNetPath + "data/ILSVRC2010_validation_ground_truth.txt" );
+ 
+  ClassificationResults results;
+  std::cerr << "Classification step ... with " << imageNet.getNumPreloadedExamples() << " examples" << std::endl;
+  ProgressBar pb;
+  for ( uint i = 0 ; i < (uint)imageNet.getNumPreloadedExamples(); i++ )
+  {
+    pb.update ( imageNet.getNumPreloadedExamples() );
+
+    const SparseVector & svec = imageNet.getPreloadedExample ( i );
+    NICE::Vector vec;
+    svec.convertToVectorT( vec );
+
+    // classification step
+    ClassificationResult r = classifier->classify ( vec );
+    
+    // set ground truth label
+    r.classno_groundtruth = (((int)imageNet.getPreloadedLabel ( i )) == positiveClass) ? 1 : 0;
+    results.push_back ( r );
+  }
+
+  std::cerr << "Writing results to " << resultsfile << std::endl;
+  results.writeWEKA ( resultsfile, 0 );
+  double perfvalue = results.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
+
+  std::cerr << "Performance: " << perfvalue << std::endl;
+  
+  //don't waste memory
+  delete classifier;
+  
+  return 0;
+}