Эх сурвалжийг харах

minor changes on classification stuff

Alexander Freytag 12 жил өмнө
parent
commit
c2ef1337f9

+ 35 - 2
cbaselib/MultiDataset.cpp

@@ -135,13 +135,27 @@ void MultiDataset::selectExamples ( const std::string & examples_command,
 /** MultiDataset ------- constructor */
 MultiDataset::MultiDataset( const Config *conf , LabeledSetFactory *pSetFactory)
 {
+  //read all blocks from our config file
   std::set<string> blocks;
   conf->getAllBlocks ( blocks );
+  
+#ifdef DEBUG_MultiDataset  
+  std::cerr << "found the following config blocks: " << std::endl;
+  for ( std::set<string>::const_iterator blockIt = blocks.begin(); blockIt != blocks.end(); blockIt++)
+  {
+    std::cerr << *blockIt << " ";
+  }
+  std::cerr << std::endl;
+#endif  
 
   lfl.setFactory( pSetFactory );
 
+  //for every dataset (e.g., train and test), we store a single confog file
   map<string, Config> dsconfs;
+  //for every dataset (e.g., train and test), we store the position of the file directory
   map<string, string> dirs;
+  
+  //first of all, remove all blocks which do correspond to specified datasets, i.e., that do not contain a "dataset" entry
   for ( set<string>::iterator i = blocks.begin();
         i != blocks.end();  )
   {
@@ -165,7 +179,17 @@ MultiDataset::MultiDataset( const Config *conf , LabeledSetFactory *pSetFactory)
       i++;
     }
   }
+  
+#ifdef DEBUG_MultiDataset  
+  std::cerr << "found the following datasets within all config blocks: " << std::endl;
+  for ( std::set<string>::const_iterator blockIt = blocks.begin(); blockIt != blocks.end(); blockIt++)
+  {
+    std::cerr << *blockIt << " ";
+  }
+  std::cerr << std::endl;  
+#endif
 
+  //is there a dataset specified that contains images for both, training and testing?
   if ( blocks.find("traintest") != blocks.end() )
   {
     LabeledSet ls_base;
@@ -217,11 +241,13 @@ MultiDataset::MultiDataset( const Config *conf , LabeledSetFactory *pSetFactory)
     datasets["train"] = ls_train;
   }
 
+  //now read files for every specified dataset (e.g., train and test)
   for ( set<string>::const_iterator i = blocks.begin();
         i != blocks.end();
         i++ )
   {
     std::string name = *i;
+    std::cerr << "read: " << name << std::endl;
     if ( classnames.find(name) != classnames.end() )
       continue;
 
@@ -254,8 +280,11 @@ MultiDataset::MultiDataset( const Config *conf , LabeledSetFactory *pSetFactory)
 #endif
         classnames[name].readFromConfig ( dsconfs[name], classselection );
     }
-		
-
+    
+#ifdef DEBUG_MultiDataset
+    std::cerr << "we set up everything to read this dataset - so now call lfl.get" << std::endl;
+#endif
+    
     lfl.get (   dirs[name],
                 dsconfs[name],
                 classnames[name],
@@ -270,6 +299,10 @@ MultiDataset::MultiDataset( const Config *conf , LabeledSetFactory *pSetFactory)
     fprintf (stderr, "MultiDataset: all information about %s set obtained ! (size %d)\n", name.c_str(), ls_base.count() );
 #endif
 
+#ifdef DEBUG_MultiDataset    
+    std::cerr << "we now call selectExamples to pick only a subset if desired" << std::endl;
+#endif
+    
     std::string examples = conf->gS(name, "examples", "all *" );
     selectExamples ( examples, ls_base, ls, dummy, classnames[name] );
 

+ 5 - 1
classifier/genericClassifierSelection.h

@@ -87,7 +87,11 @@ class GenericClassifierSelection
         classifier = new VCNearestClassMean( conf, new NICE::EuclidianDistance<double>() );
 #endif
       }
-      // all Feature Pool Classifiers
+      ////////////////////////////////////////
+      //                                    //
+      //    all Feature Pool Classifiers    //
+      //                                    //
+      ////////////////////////////////////////
 //       else if ( classifier_type == "GPHIK" ) {
 //         FeaturePoolClassifier *fpc = new GPHIKClassifierNICE ( conf, "GPHIK" );
 //         classifier = new VCFeaturePool ( conf, fpc );

+ 2 - 2
featureLearning/FeatureLearningPrototypes.cpp

@@ -82,9 +82,9 @@ void FeatureLearningPrototypes::setFeatureExtractor( const bool & _setForTrainin
   if ( opSiftImpl == "NICE" )
   {
     if ( _setForTraining )
-      cSIFT = new OBJREC::LFonHSG ( this->conf, "HSGtrain" );
+      cSIFT = new OBJREC::LFonHSG ( this->conf, "HSGTrain" );
     else
-      cSIFT = new OBJREC::LFonHSG ( this->conf, "HSGtest" );
+      cSIFT = new OBJREC::LFonHSG ( this->conf, "HSGTest" );
   }
   else if ( opSiftImpl == "VANDESANDE" )
   {

+ 6 - 7
progs/evaluateCompleteBoWPipeline.cpp

@@ -38,7 +38,7 @@
 #include <vislearning/features/localfeatures/LFColorWeijer.h>
 #include <vislearning/features/localfeatures/LFReadCache.h>
 #include <vislearning/features/localfeatures/LFWriteCache.h>
-#include <vislearning/features/localfeatures/GenericLocalFeatureSelection.h>
+#include <vislearning/features/localfeatures/GenericLFSelection.h>
 //
 // vislearning -- clustering methods
 #include <vislearning/math/cluster/ClusterAlgorithm.h>
@@ -48,11 +48,6 @@
 #include <vislearning/math/cluster/GMM.h>
 //
 
-
-//
-#include <vl/generic.h>
-#include <vl/dsift.h>
-
 using namespace std;
 using namespace NICE;
 using namespace OBJREC;
@@ -191,7 +186,10 @@ int main( int argc, char **argv )
   //
   //**********************************************  
   
-  LocalFeatureRepresentation * featureExtractor = setFeatureExtractor( conf );
+  std::cerr << "FEATURE EXTRACTION FOR TRAINING IMAGES" << std::endl;
+  
+  OBJREC::LocalFeatureRepresentation * featureExtractor = OBJREC::GenericLFSelection::selectLocalFeatureRep ( conf, "features", OBJREC::GenericLFSelection::TRAINING );
+//   LocalFeatureRepresentation * featureExtractor = setFeatureExtractor( conf );
   
   //collect features in a single data structure
   NICE::VVector featuresFromAllTrainingImages;  
@@ -246,6 +244,7 @@ int main( int argc, char **argv )
   //
   //**********************************************    
   
+  std::cerr << "CODEBOOK CREATION" << std::endl;
   OBJREC::ClusterAlgorithm * clusterAlgo = setClusterAlgo( conf );
    
   NICE::VVector prototypes;