Erik Rodner 9 лет назад
Родитель
Сommit
69d24d7cc1
2 измененных файлов с 110 добавлено и 106 удалено
  1. 8 6
      classifier/kernelclassifier/progs/laplaceTests.cpp
  2. 102 100
      progs/evaluateCompleteBoWPipeline.cpp

+ 8 - 6
classifier/kernelclassifier/progs/laplaceTests.cpp

@@ -1,4 +1,4 @@
-/** 
+/**
 * @file laplaceTests.cpp
 * @brief Laplace Approximation Tests
 * @author Erik Rodner
@@ -27,15 +27,17 @@ using namespace std;
 using namespace OBJREC;
 using namespace NICE;
 
-/** 
-    Laplace Approximation Tests 
+/**
+    Laplace Approximation Tests
 */
 int main (int argc, char **argv)
-{   
+{
+#ifdef __GLIBCXX__
     std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
 
     Config conf ( argc, argv );
-    
+
 	LabeledSetVector train;
 	train.read ( conf.gS("main", "set" ), LabeledSetVector::FILEFORMAT_NOINDEX );
 
@@ -86,6 +88,6 @@ int main (int argc, char **argv)
 	}
 
 	showImage ( img );
-    
+
     return 0;
 }

+ 102 - 100
progs/evaluateCompleteBoWPipeline.cpp

@@ -32,7 +32,7 @@
 // vislearning -- BoW codebooks
 #include "vislearning/features/simplefeatures/CodebookPrototypes.h"
 #include "vislearning/features/simplefeatures/BoWFeatureConverter.h"
-// 
+//
 // vislearning -- local features
 #include <vislearning/features/localfeatures/GenericLFSelection.h>
 //
@@ -46,41 +46,43 @@ using namespace OBJREC;
 
 /**
  a complete BoW pipeline
- 
+
  possibly, we can make use of objrec/progs/testClassifier.cpp
 */
 int main( int argc, char **argv )
 {
+#ifdef __GLIBCXX__
   std::set_terminate( __gnu_cxx::__verbose_terminate_handler );
+#endif
 
   Config * conf = new Config ( argc, argv );
-  
+
   const bool writeClassificationResults = conf->gB( "main", "writeClassificationResults", true );
   const std::string resultsfile = conf->gS( "main", "resultsfile", "/tmp/results.txt" );
-  
+
   ResourceStatistics rs;
-  
+
   // ========================================================================
   //                            TRAINING STEP
   // ========================================================================
 
   MultiDataset md( conf );
-  const LabeledSet *trainFiles = md["train"];  
-   
+  const LabeledSet *trainFiles = md["train"];
+
   //**********************************************
   //
   //     FEATURE EXTRACTION FOR TRAINING IMAGES
   //
-  //**********************************************  
-  
+  //**********************************************
+
   std::cerr << "FEATURE EXTRACTION FOR TRAINING IMAGES" << std::endl;
-  
+
   OBJREC::LocalFeatureRepresentation * featureExtractor = OBJREC::GenericLFSelection::selectLocalFeatureRep ( conf, "features", OBJREC::GenericLFSelection::TRAINING );
-  
+
   //collect features in a single data structure
-  NICE::VVector featuresFromAllTrainingImages;  
+  NICE::VVector featuresFromAllTrainingImages;
   featuresFromAllTrainingImages.clear();
-  
+
   //determine how many training images we actually use to easily allocate the correct amount of memory afterwards
   int numberOfTrainingImages ( 0 );
   for(LabeledSet::const_iterator classIt = trainFiles->begin() ; classIt != trainFiles->end() ; classIt++)
@@ -88,51 +90,51 @@ int main( int argc, char **argv )
     numberOfTrainingImages += classIt->second.size();
     std::cerr << "number of examples for this class: " << classIt->second.size() << std::endl;
   }
-  
-  
+
+
   //okay, this is redundant - but I see no way to do it more easy right now...
   std::vector<NICE::VVector> featuresOfImages ( numberOfTrainingImages );
   //this again is somehow redundant, but we need the labels lateron for easy access - change this to a better solution :)
   NICE::VectorT<int> labelsTrain ( numberOfTrainingImages, 0 );
-  
+
   int imgCnt ( 0 );
-   
+
   // the corresponding nasty makro: LOOP_ALL_S( *trainFiles )
   for(LabeledSet::const_iterator classIt = trainFiles->begin() ; classIt != trainFiles->end() ; classIt++)
   {
-    for ( std::vector<ImageInfo *>::const_iterator imgIt = classIt->second.begin(); 
-          imgIt != classIt->second.end(); 
+    for ( std::vector<ImageInfo *>::const_iterator imgIt = classIt->second.begin();
+          imgIt != classIt->second.end();
           imgIt++, imgCnt++
-        ) 
+        )
     {
       // the corresponding nasty makro: EACH_INFO( classno, info );
       int classno ( classIt->first );
-      const ImageInfo imgInfo = *(*imgIt);      
-      
-      std::string filename = imgInfo.img();      
-      
+      const ImageInfo imgInfo = *(*imgIt);
+
+      std::string filename = imgInfo.img();
+
       NICE::ColorImage img( filename );
-  
+
       //compute features
-      
+
       //variables to store feature information
       NICE::VVector features;
       NICE::VVector positions;
-  
+
       Globals::setCurrentImgFN ( filename );
-      featureExtractor->extractFeatures ( img, features, positions );  
-              
+      featureExtractor->extractFeatures ( img, features, positions );
+
       //normalization :)
       for ( NICE::VVector::iterator i = features.begin();
             i != features.end();
             i++)
-      {              
+      {
         i->normalizeL1();
-      }  
-            
+      }
+
       //collect them all in a larger data structure
       featuresFromAllTrainingImages.append( features );
-      
+
       //and store it as well in the data struct that additionally keeps the information which features belong to which image
       //TODO this can be made more clever!
 //       featuresOfImages.push_back( features );
@@ -140,87 +142,87 @@ int main( int argc, char **argv )
       labelsTrain[imgCnt] = classno;
     }
   }
-  
-  
-  
+
+
+
   //**********************************************
   //
   //             CODEBOOK CREATION
   //
-  //**********************************************    
-  
+  //**********************************************
+
   std::cerr << "CODEBOOK CREATION" << std::endl;
-  
+
   OBJREC::ClusterAlgorithm * clusterAlgo = OBJREC::GenericClusterAlgorithmSelection::selectClusterAlgorithm ( conf );
-   
+
   NICE::VVector prototypes;
-  
+
   std::vector<double> weights;
   std::vector<int> assignments;
-  
+
   std::cerr << "call cluster of cluster algo " << std::endl;
   clusterAlgo->cluster( featuresFromAllTrainingImages, prototypes, weights, assignments );
-  
+
   std::cerr << "create new codebook with the computed prototypes" << std::endl;
   OBJREC::CodebookPrototypes * codebook = new OBJREC::CodebookPrototypes ( prototypes );
-  
-  
+
+
   //**********************************************
   //
   //             VECTOR QUANTIZATION OF
   //           FEATURES OF TRAINING IMAGES
   //
-  //**********************************************  
-  
-  OBJREC::BoWFeatureConverter * bowConverter = new OBJREC::BoWFeatureConverter ( conf, codebook );  
-  
+  //**********************************************
+
+  OBJREC::BoWFeatureConverter * bowConverter = new OBJREC::BoWFeatureConverter ( conf, codebook );
+
   OBJREC::LabeledSetVector trainSet;
-  
+
   NICE::VVector histograms ( featuresOfImages.size() /* number of vectors*/, 0 /* dimension of vectors*/ ); //the internal vectors will be resized within calcHistogram
   NICE::VVector::iterator histogramIt = histograms.begin();
   NICE::VectorT<int>::const_iterator labelsIt = labelsTrain.begin();
-  
+
   for (std::vector<NICE::VVector>::const_iterator imgIt = featuresOfImages.begin(); imgIt != featuresOfImages.end(); imgIt++, histogramIt++, labelsIt++)
   {
     bowConverter->calcHistogram ( *imgIt, *histogramIt );
     bowConverter->normalizeHistogram ( *histogramIt );
-    
+
     //NOTE perhaps we should use add_reference here
     trainSet.add( *labelsIt, *histogramIt );
   }
-  
-  
+
+
   //**********************************************
   //
   //             CLASSIFIER TRAINING
   //
-  //**********************************************  
-  
+  //**********************************************
+
   std::string classifierType = conf->gS( "main", "classifierType", "GPHIK" );
   OBJREC::VecClassifier * classifier = OBJREC::GenericClassifierSelection::selectVecClassifier( conf, classifierType );
-    
+
   //this method adds the training data to the temporary knowledge of our classifier
   classifier->teach( trainSet );
   //now the actual training step starts (e.g., parameter estimation, ... )
   classifier->finishTeaching();
-  
-  
+
+
   // ========================================================================
   //                            TEST STEP
   // ========================================================================
-  
+
   const LabeledSet *testFiles = md["test"];
 
   delete featureExtractor;
-  featureExtractor = OBJREC::GenericLFSelection::selectLocalFeatureRep ( conf, "features", OBJREC::GenericLFSelection::TESTING );  
-  
+  featureExtractor = OBJREC::GenericLFSelection::selectLocalFeatureRep ( conf, "features", OBJREC::GenericLFSelection::TESTING );
+
   NICE::Matrix confusionMat ( testFiles->size() /* number of classes for testing*/, trainFiles->size() /* number of classes in training */, 0.0 );
   NICE::Timer t;
-  
+
   ClassificationResults results;
-  
+
   ProgressBar pbClasses;
-  
+
   // the corresponding nasty makro: LOOP_ALL_S( *testFiles )
   for(LabeledSet::const_iterator classIt = testFiles->begin() ; classIt != testFiles->end() ; classIt++)
   {
@@ -228,63 +230,63 @@ int main( int argc, char **argv )
     pbClasses.update ( testFiles->size() );
     std::cerr << "\nStart next class " << std::endl;
     ProgressBar pbClassExamples;
-    for ( std::vector<ImageInfo *>::const_iterator imgIt = classIt->second.begin(); 
-          imgIt != classIt->second.end(); 
+    for ( std::vector<ImageInfo *>::const_iterator imgIt = classIt->second.begin();
+          imgIt != classIt->second.end();
           imgIt++, imgCnt++
-        ) 
-    {  
+        )
+    {
       pbClassExamples.update ( classIt->second.size() );
-    
+
       // the corresponding nasty makro: EACH_INFO( classno, info );
       int classno ( classIt->first );
-      const ImageInfo imgInfo = *(*imgIt);   
-      
+      const ImageInfo imgInfo = *(*imgIt);
+
       std::string filename = imgInfo.img();
-  
+
       //**********************************************
       //
       //     FEATURE EXTRACTION FOR TEST IMAGES
       //
-      //**********************************************  
-      
+      //**********************************************
+
       NICE::ColorImage img( filename );
-  
+
       //compute features
-      
+
       //variables to store feature information
       NICE::VVector features;
       NICE::VVector positions;
-  
+
       Globals::setCurrentImgFN ( filename );
-      featureExtractor->extractFeatures ( img, features, positions );  
-        
+      featureExtractor->extractFeatures ( img, features, positions );
+
       //normalization :)
       for ( NICE::VVector::iterator i = features.begin();
             i != features.end();
             i++)
-      {              
+      {
         i->normalizeL1();
-      }        
-      
+      }
+
       //**********************************************
       //
       //             VECTOR QUANTIZATION OF
       //           FEATURES OF TEST IMAGES
       //
-      //********************************************** 
-      
+      //**********************************************
+
       NICE::Vector histogramOfCurrentImg;
       bowConverter->calcHistogram ( features, histogramOfCurrentImg );
       bowConverter->normalizeHistogram ( histogramOfCurrentImg );
-      
+
       //**********************************************
       //
       //             CLASSIFIER EVALUATION
       //
-      //**********************************************   
-            
+      //**********************************************
+
       uint classno_groundtruth = classno;
-      
+
       t.start();
       ClassificationResult r = classifier->classify ( histogramOfCurrentImg );
       t.stop();
@@ -295,25 +297,25 @@ int main( int argc, char **argv )
       if ( writeClassificationResults )
       {
         results.push_back( r );
-      }      
-      
+      }
+
       confusionMat( classno_groundtruth, classno_estimated ) += 1;
     }
   }
-  
+
   confusionMat.normalizeRowsL1();
   std::cerr << confusionMat << std::endl;
 
-  std::cerr << "average recognition rate: " << confusionMat.trace()/confusionMat.rows() << std::endl;  
-  
+  std::cerr << "average recognition rate: " << confusionMat.trace()/confusionMat.rows() << std::endl;
+
   if ( writeClassificationResults )
   {
-    double avgRecogResults  ( results.getAverageRecognitionRate () );    
+    double avgRecogResults  ( results.getAverageRecognitionRate () );
     std::cerr << "average recognition rate according to classificationResults: " << avgRecogResults << std::endl;
     results.writeWEKA ( resultsfile, 0 );
-  }     
-  
- 
-  
+  }
+
+
+
    return 0;
-}
+}