Przeglądaj źródła

consistency for new position of FPCGPHIK, no dependency to gp-hik-exp anymore

Alexander Freytag 9 lat temu
rodzic
commit
301a79ad93

+ 1 - 1
classifier/GenericFPClassifierSelection.h

@@ -12,7 +12,7 @@
 #include "vislearning/classifier/fpclassifier/randomforest/FPCRandomForests.h"
 #include "vislearning/classifier/fpclassifier/randomforest/FPCDecisionTree.h"
 #include "vislearning/classifier/fpclassifier/logisticregression/FPCSMLR.h"
-#include "vislearning/classifier/fpclassifier/FPCGPHIK.h"
+#include "vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h"
 
 
 

+ 106 - 48
classifier/fpclassifier/FPCGPHIK.cpp → classifier/fpclassifier/gphik/FPCGPHIK.cpp

@@ -20,58 +20,54 @@ using namespace NICE;
 using namespace OBJREC;
 
 
-FPCGPHIK::FPCGPHIK( const Config *conf, const string & confSection ) 
+void FPCGPHIK::init ( const NICE::Config *conf, const std::string & s_confSection )
 {
-  this->verbose = conf->gB(confSection, "verbose", false);
-  this->useSimpleBalancing = conf->gB(confSection, "use_simple_balancing", false);
-  this->minSamples = conf->gI(confSection, "min_samples", -1);
-  this->performOptimizationAfterIncrement = conf->gB(confSection, "performOptimizationAfterIncrement", true);
+  this->verbose = conf->gB( s_confSection, "verbose", false );
+  this->useSimpleBalancing = conf->gB( s_confSection, "use_simple_balancing", false );
+  this->minSamples = conf->gI( s_confSection, "min_samples", -1 );
+  this->performOptimizationAfterIncrement = conf->gB( s_confSection, "performOptimizationAfterIncrement", false );
   
-  classifier = new GPHIKClassifier(conf, confSection);
+  this->classifier = new GPHIKClassifier(conf, s_confSection);
 }
 
-FPCGPHIK::~FPCGPHIK()
+FPCGPHIK::FPCGPHIK( ) 
 {
-  if ( classifier != NULL )
-    delete classifier;
+  this->classifier = NULL;
 }
 
-ClassificationResult FPCGPHIK::classify ( Example & pe )
-{  
-
-  NICE::SparseVector *svec;// = pe.svec;  
-  
-  // was only a NICE::Vector given?
-  // Than we had to allocate a new NICE::SparseVector and converted the given NICE::Vector into it.
-  bool newvec = false;  
+FPCGPHIK::FPCGPHIK( const Config *conf, const string & confSection ) 
+{
+  this->classifier = NULL;
   
-  if ( pe.svec != NULL )
+  // if no config file was given, we either restore the classifier from an external file, or run ::init with 
+  // an emtpy config (using default values thereby) when calling the train-method
+  if ( conf != NULL )
   {
-    svec = pe.svec;
+    this->init(conf, confSection);
   }
-  else
-  {
-    NICE::Vector x;
-
-    x = * ( pe.vec );
-
-    svec = new NICE::SparseVector ( x );
+}
 
-    svec->setDim ( x.size() );
+FPCGPHIK::~FPCGPHIK()
+{
+  if ( this->classifier != NULL )
+    delete this->classifier;
+  this->classifier = NULL;
+}
 
-    newvec = true;
-  }  
-    
-  ClassificationResult result ( this->classify( svec )  );
-    
-  if ( newvec )
-    delete svec;
+ClassificationResult FPCGPHIK::classify ( Example & pe )
+{
+  const SparseVector *svec = pe.svec;
 
-  return result;  
+  if ( svec == NULL )
+    fthrow(Exception, "FPCGPHIK requires example.svec (SparseVector stored in an Example struct)");
+ return this->classify( svec ); 
 }
 
 ClassificationResult FPCGPHIK::classify ( const NICE::SparseVector * example )
 {
+  if ( this->classifier == NULL )
+    fthrow(Exception, "Classifier not trained yet -- aborting!" );
+  
   NICE::SparseVector scores;
   int result;
   
@@ -112,6 +108,13 @@ ClassificationResult FPCGPHIK::classify ( const NICE::SparseVector * example )
 /** training process */
 void FPCGPHIK::train ( FeaturePool & fp, Examples & examples )
 {
+  if ( this->classifier == NULL )
+  {
+    std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
+    NICE::Config tmpConfEmpty ;
+    this->init ( &tmpConfEmpty );
+  }  
+  
   // we completely ignore the feature pool :)
   //
   initRand(0);
@@ -177,10 +180,7 @@ void FPCGPHIK::train ( FeaturePool & fp, Examples & examples )
 /** training process */
 void FPCGPHIK::train ( const std::vector< const SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
 {
-  
-  std::cerr << "call internal train method " << std::endl;
   classifier->train(examples, binLabels);
-  std::cerr << "training done" << std::endl;
 }
 
 void FPCGPHIK::clear ()
@@ -210,18 +210,62 @@ void FPCGPHIK::predictUncertainty( const NICE::SparseVector * example, double &
   classifier->predictUncertainty(example, uncertainty);
 }
 
-//---------------------------------------------------------------------
-//                           protected methods
-//---------------------------------------------------------------------
+///////////////////// INTERFACE PERSISTENT /////////////////////
+// interface specific methods for store and restore
+///////////////////// INTERFACE PERSISTENT ///////////////////// 
 void FPCGPHIK::restore ( std::istream & is, int format )
 {
   if (is.good())
-  {
-    classifier->restore(is, format);  
-    
+  {    
     std::string tmp;
-    is >> tmp; //"performOptimizationAfterIncrement: "
-    is >> this->performOptimizationAfterIncrement;
+    is >> tmp; //class name 
+    
+    if ( ! this->isStartTag( tmp, "FPCGPHIK" ) )
+    {
+      std::cerr << " WARNING - attempt to restore FPCGPHIK, but start flag " << tmp << " does not match! Aborting... " << std::endl;
+      throw;
+    } 
+    
+    is.precision (numeric_limits<double>::digits10 + 1);
+    
+    bool b_endOfBlock ( false ) ;
+    
+    while ( !b_endOfBlock )
+    {
+      is >> tmp; // start of block 
+      
+      if ( this->isEndTag( tmp, "FPCGPHIK" ) )
+      {
+        b_endOfBlock = true;
+        continue;
+      } 
+
+      tmp = this->removeStartTag( tmp );
+
+      if ( tmp.compare("classifier") == 0 )
+      {
+        if ( classifier == NULL )
+          classifier = new NICE::GPHIKClassifier();
+        
+        //then, load everything that we stored explicitely,
+        // including precomputed matrices, LUTs, eigenvalues, ... and all that stuff
+        classifier->restore(is, format);  
+          
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
+      else if ( tmp.compare("performOptimizationAfterIncrement") == 0 )
+      {
+        is >> performOptimizationAfterIncrement;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else
+      {
+      std::cerr << "WARNING -- unexpected FPCGPHIK object -- " << tmp << " -- for restoration... aborting" << std::endl;
+      throw;
+      }
+    } // while-loop
   }
   else
   {
@@ -235,9 +279,19 @@ void FPCGPHIK::store ( std::ostream & os, int format ) const
   {
     os.precision (numeric_limits<double>::digits10 + 1);
     
+    // show starting point
+    os << this->createStartTag( "FPCGPHIK" ) << std::endl;        
+    
+    os << this->createStartTag( "classifier" ) << std::endl;
     classifier->store(os, format);
+    os << this->createEndTag( "classifier" ) << std::endl;
     
-    os << "performOptimizationAfterIncrement: " << performOptimizationAfterIncrement << std::endl;
+    os << this->createStartTag( "performOptimizationAfterIncrement" ) << std::endl;  
+    os << performOptimizationAfterIncrement << std::endl;
+    os << this->createEndTag( "performOptimizationAfterIncrement" ) << std::endl;     
+    
+    // done
+    os << this->createEndTag( "FPCGPHIK" ) << std::endl;
   }
   else
   {
@@ -245,6 +299,10 @@ void FPCGPHIK::store ( std::ostream & os, int format ) const
   }
 }
 
+///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+// interface specific methods for incremental extensions
+///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+
 void FPCGPHIK::addExample( const Example & pe, const double & label)
 {
   const SparseVector *svec = pe.svec;
@@ -258,7 +316,7 @@ void FPCGPHIK::addMultipleExamples( Examples & newExamples)
     return;
   
   // (multi-class) label vector
-  Vector y ( newExamples.size() );
+  NICE::Vector y ( newExamples.size() );
 
   // flat structure of our training data
   std::vector< const SparseVector * > sparseExamples;

+ 33 - 3
classifier/fpclassifier/FPCGPHIK.h → classifier/fpclassifier/gphik/FPCGPHIK.h

@@ -33,6 +33,12 @@ class FPCGPHIK : public FeaturePoolClassifier
 
   protected:
     
+    /////////////////////////
+    /////////////////////////
+    // PROTECTED VARIABLES //
+    /////////////////////////
+    /////////////////////////    
+    
     NICE::GPHIKClassifier * classifier;
     
     /** verbose flag for useful output*/
@@ -44,10 +50,27 @@ class FPCGPHIK : public FeaturePoolClassifier
     
     /** When adding new examples, do we want to run a whole optimization of all involved hyperparameters? default: true*/
     bool performOptimizationAfterIncrement;
+    
+    /////////////////////////
+    /////////////////////////
+    //  PROTECTED METHODS  //
+    /////////////////////////
+    /////////////////////////    
+    
+    /** 
+    * @brief Setup internal variables and objects used
+    * @author Alexander Freytag
+    * @param conf Config file to specify variable settings
+    * @param s_confSection
+    */    
+    void init ( const NICE::Config *conf, const std::string & s_confSection = "GPHIKClassifier" );
 
   public:
 
     /** simple constructor */
+    FPCGPHIK( );
+    
+    /** default constructor */
     FPCGPHIK( const NICE::Config *conf, const std::string & confSection = "GPHIKClassifier" );
       
     /** simple destructor */
@@ -86,20 +109,27 @@ class FPCGPHIK : public FeaturePoolClassifier
 
     virtual FeaturePoolClassifier *clone () const;
     
-    /** prediction of classification uncertainty */
+    /** 
+     * @brief prediction of classification uncertainty
+     * @date 19-06-2012 (dd-mm-yyyy)
+     * @author Alexander Freytag
+     * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
+     * @param uncertainty contains the resulting classification uncertainty
+     */
     void predictUncertainty( OBJREC::Example & pe, double & uncertainty );
+    
     /** 
      * @brief prediction of classification uncertainty
      * @date 19-06-2012 (dd-mm-yyyy)
      * @author Alexander Freytag
      * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
-     * @param uncertainties contains the resulting classification uncertainties (1 entry for standard setting, m entries for binary-balanced setting)
+     * @param uncertainty contains the resulting classification uncertainty
      */       
     void predictUncertainty( const NICE::SparseVector * example, double & uncertainty );
     
     ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
     // interface specific methods for incremental extensions
-    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////       
+    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////    
     
     void addExample( const OBJREC::Example & pe, const double & label);
     virtual void addMultipleExamples( OBJREC::Examples & newExamples);

+ 8 - 0
classifier/fpclassifier/gphik/Makefile

@@ -0,0 +1,8 @@
+#TARGETS_FROM:=$(notdir $(patsubst %/,%,$(shell pwd)))/$(TARGETS_FROM)
+#$(info recursivly going up: $(TARGETS_FROM) ($(shell pwd)))
+
+all:
+
+%:
+	$(MAKE) TARGETS_FROM=$(notdir $(patsubst %/,%,$(shell pwd)))/$(TARGETS_FROM) -C .. $@
+

+ 103 - 0
classifier/fpclassifier/gphik/Makefile.inc

@@ -0,0 +1,103 @@
+# LIBRARY-DIRECTORY-MAKEFILE
+# conventions:
+# - all subdirectories containing a "Makefile.inc" are considered sublibraries
+#   exception: "progs/" and "tests/" subdirectories!
+# - all ".C", ".cpp" and ".c" files in the current directory are linked to a
+#   library
+# - the library depends on all sublibraries 
+# - the library name is created with $(LIBNAME), i.e. it will be somehow
+#   related to the directory name and with the extension .a
+#   (e.g. lib1/sublib -> lib1_sublib.a)
+# - the library will be added to the default build list ALL_LIBRARIES
+
+# --------------------------------
+# - remember the last subdirectory
+#
+# set the variable $(SUBDIR) correctly to the current subdirectory. this
+# variable can be used throughout the current makefile.inc. The many 
+# SUBDIR_before, _add, and everything are only required so that we can recover
+# the previous content of SUBDIR before exitting the makefile.inc
+
+SUBDIR_add:=$(dir $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)))
+SUBDIR_before:=$(SUBDIR)
+SUBDIR:=$(strip $(SUBDIR_add))
+SUBDIR_before_$(SUBDIR):=$(SUBDIR_before)
+ifeq "$(SUBDIR)" "./"
+SUBDIR:=
+endif
+
+# ------------------------
+# - include subdirectories
+#
+# note the variables $(SUBDIRS_OF_$(SUBDIR)) are required later on to recover
+# the dependencies automatically. if you handle dependencies on your own, you
+# can also dump the $(SUBDIRS_OF_$(SUBDIR)) variable, and include the
+# makefile.inc of the subdirectories on your own...
+
+SUBDIRS_OF_$(SUBDIR):=$(patsubst %/Makefile.inc,%,$(wildcard $(SUBDIR)*/Makefile.inc))
+include $(SUBDIRS_OF_$(SUBDIR):%=%/Makefile.inc)
+
+# ----------------------------
+# - include local dependencies
+#
+# you can specify libraries needed by the individual objects or by the whole
+# directory. the object specific additional libraries are only considered
+# when compiling the specific object files
+# TODO: update documentation...
+
+-include $(SUBDIR)libdepend.inc
+
+$(foreach d,$(filter-out %progs %tests,$(SUBDIRS_OF_$(SUBDIR))),$(eval $(call PKG_DEPEND_INT,$(d))))
+
+# ---------------------------
+# - objects in this directory
+#
+# the use of the variable $(OBJS) is not mandatory. it is mandatory however
+# to update $(ALL_OBJS) in a way that it contains the path and name of
+# all objects. otherwise we can not include the appropriate .d files.
+
+OBJS:=$(patsubst %.cpp,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.cpp))) \
+      $(patsubst %.C,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.C))) \
+	  $(shell grep -ls Q_OBJECT $(SUBDIR)*.h | sed -e's@^@/@;s@.*/@$(OBJDIR)moc_@;s@\.h$$@.o@') \
+      $(patsubst %.c,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.c)))
+ALL_OBJS += $(OBJS)
+
+# ----------------------------
+# - binaries in this directory
+#
+# output of binaries in this directory. none of the variables has to be used.
+# but everything you add to $(ALL_LIBRARIES) and $(ALL_BINARIES) will be
+# compiled with `make all`. be sure again to add the files with full path.
+
+LIBRARY_BASENAME:=$(call LIBNAME,$(SUBDIR))
+ifneq "$(SUBDIR)" ""
+ALL_LIBRARIES+=$(LIBDIR)$(LIBRARY_BASENAME).$(LINK_FILE_EXTENSION)
+endif
+
+# ---------------------
+# - binary dependencies
+#
+# there is no way of determining the binary dependencies automatically, so we
+# follow conventions. the current library depends on all sublibraries.
+# all other dependencies have to be added manually by specifying, that the
+# current .pc file depends on some other .pc file. binaries depending on
+# libraries should exclusivelly use the .pc files as well.
+
+ifeq "$(SKIP_BUILD_$(OBJDIR))" "1"
+$(LIBDIR)$(LIBRARY_BASENAME).a:
+else
+$(LIBDIR)$(LIBRARY_BASENAME).a:$(OBJS) \
+	$(call PRINT_INTLIB_DEPS,$(PKGDIR)$(LIBRARY_BASENAME).a,.$(LINK_FILE_EXTENSION))
+endif
+
+$(PKGDIR)$(LIBRARY_BASENAME).pc: \
+	$(call PRINT_INTLIB_DEPS,$(PKGDIR)$(LIBRARY_BASENAME).pc,.pc)
+
+# -------------------
+# - subdir management
+#
+# as the last step, always add this line to correctly recover the subdirectory
+# of the makefile including this one!
+
+SUBDIR:=$(SUBDIR_before_$(SUBDIR))
+

+ 0 - 0
classifier/fpclassifier/tests/Makefile.inc → classifier/fpclassifier/gphik/tests/Makefile.inc


+ 1 - 1
classifier/fpclassifier/tests/TestFPCGPHIK.cpp → classifier/fpclassifier/gphik/tests/TestFPCGPHIK.cpp

@@ -16,7 +16,7 @@
 
 //----------
 
-#include "vislearning/classifier/fpclassifier/FPCGPHIK.h"
+#include "vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h"
 
 //----------
 

+ 0 - 0
classifier/fpclassifier/tests/TestFPCGPHIK.h → classifier/fpclassifier/gphik/tests/TestFPCGPHIK.h


+ 0 - 0
classifier/fpclassifier/tests/sparse20x30matrixM.mat → classifier/fpclassifier/gphik/tests/sparse20x30matrixM.mat


+ 0 - 0
classifier/fpclassifier/tests/sparse3x3matrixA.mat → classifier/fpclassifier/gphik/tests/sparse3x3matrixA.mat


+ 0 - 0
classifier/fpclassifier/tests/toyExample1.data → classifier/fpclassifier/gphik/tests/toyExample1.data


+ 0 - 0
classifier/fpclassifier/tests/toyExample2.data → classifier/fpclassifier/gphik/tests/toyExample2.data


+ 0 - 0
classifier/fpclassifier/tests/toyExampleLargeLargeScale.data → classifier/fpclassifier/gphik/tests/toyExampleLargeLargeScale.data


+ 0 - 0
classifier/fpclassifier/tests/toyExampleLargeScale.data → classifier/fpclassifier/gphik/tests/toyExampleLargeScale.data


+ 1 - 1
classifier/genericClassifierSelection.h

@@ -40,7 +40,7 @@
 #include "vislearning/classifier/fpclassifier/randomforest/FPCRandomForests.h"
 #include "vislearning/classifier/fpclassifier/randomforest/FPCDecisionTree.h"
 #include "vislearning/classifier/fpclassifier/logisticregression/FPCSMLR.h"
-#include "vislearning/classifier/fpclassifier/FPCGPHIK.h"
+#include "vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h"
 
 //vislearning -- classifier combinations
 #include "vislearning/classifier/classifiercombination/VCPreRandomForest.h"