浏览代码

completed refactoring wrt new persistent layout

Alexander Freytag 11 年之前
父节点
当前提交
37d92063a8
共有 9 个文件被更改,包括 1089 次插入602 次删除
  1. 658 183
      FMKGPHyperparameterOptimization.cpp
  2. 32 26
      FMKGPHyperparameterOptimization.h
  3. 125 173
      GPHIKClassifier.cpp
  4. 14 12
      GPHIKClassifier.h
  5. 142 182
      GPHIKRegression.cpp
  6. 30 20
      GPHIKRegression.h
  7. 60 1
      Quantization.cpp
  8. 26 5
      Quantization.h
  9. 2 0
      tests/TestGPHIKRegression.cpp

文件差异内容过多而无法显示
+ 658 - 183
FMKGPHyperparameterOptimization.cpp


+ 32 - 26
FMKGPHyperparameterOptimization.h

@@ -75,14 +75,18 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
     /** object performing feature quantization */
     NICE::Quantization *q;
     
+    
+    /** upper bound for hyper parameters (ParameterizedFunction) to optimize */
+    double parameterUpperBound;
+    
+    /** lower bound for hyper parameters (ParameterizedFunction) to optimize */
+    double parameterLowerBound;
+    
     /** the parameterized function we use within the minimum kernel */
     NICE::ParameterizedFunction *pf;
 
-    /** method for solving linear equation systems - needed to compute K^-1 \times y */
-    IterativeLinearSolver *linsolver;
     
-    /** Max. number of iterations the iterative linear solver is allowed to run */
-    int ils_max_iterations;    
+   
     
     /** Simple type definition for precomputation matrices used for fast classification */
     typedef VVector PrecomputedType;
@@ -109,33 +113,38 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
     //! container for multiple kernel matrices (e.g., a data-containing kernel matrix (GMHIKernel) and a noise matrix (IKMNoise) )
     NICE::IKMLinearCombination * ikmsum;    
     
+    //////////////////////////////////////////////
+    //           Iterative Linear Solver        //
+    //////////////////////////////////////////////
+    
+    /** method for solving linear equation systems - needed to compute K^-1 \times y */
+    IterativeLinearSolver *linsolver;    
+    
+    /** Max. number of iterations the iterative linear solver is allowed to run */
+    int ils_max_iterations;    
   
     /////////////////////////////////////
     // optimization related parameters //
     /////////////////////////////////////
     
-    enum {
+    enum OPTIMIZATIONTECHNIQUE{
       OPT_GREEDY = 0,
       OPT_DOWNHILLSIMPLEX,
       OPT_NONE
     };
 
     /** specify the optimization method used (see corresponding enum) */
-    int optimizationMethod;
+    OPTIMIZATIONTECHNIQUE optimizationMethod;
     
     //! whether or not to optimize noise with the GP likelihood
     bool optimizeNoise;     
     
-    /** upper bound for hyper parameters to optimize */
-    double parameterUpperBound;
-    
-    /** lower bound for hyper parameters to optimize */
-    double parameterLowerBound;
-    
         // specific to greedy optimization
     /** step size used in grid based greedy optimization technique */
     double parameterStepSize;
     
+     
+    
         // specific to downhill simplex optimization
     /** Max. number of iterations the downhill simplex optimizer is allowed to run */
     int downhillSimplexMaxIterations;
@@ -147,7 +156,9 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
     double downhillSimplexParamTol;
     
     
-      // likelihood computation related variables
+    //////////////////////////////////////////////
+    // likelihood computation related variables //
+    //////////////////////////////////////////////  
 
     /** whether to compute the exact likelihood by computing the exact kernel matrix (not recommended - only for debugging/comparison purpose) */
     bool verifyApproximation;
@@ -256,27 +267,29 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
     /**
     * @brief simple constructor
     * @author Alexander Freytag
+    * @param b_performRegression
     */
     FMKGPHyperparameterOptimization( const bool & b_performRegression );
 
     /**
     * @brief recommended constructor, only calls this->initialize with same input arguments
+    * @author Alexander Freytag
+    * @param conf
+    * @param confSection
     *
-    * @param pf pointer to a parameterized function used within the minimum kernel min(f(x_i), f(x_j)) (will not be deleted)
-    * @param noise GP label noise
-    * @param fmk pointer to a pre-initialized structure (will be deleted)
     */
     FMKGPHyperparameterOptimization( const Config *conf, const std::string & confSection = "GPHIKClassifier" );
     
     
     /**
     * @brief recommended constructor, only calls this->initialize with same input arguments
+    * @author Alexander Freytag
     *
-    * @param pf pointer to a parameterized function used within the minimum kernel min(f(x_i), f(x_j)) (will not be deleted)
-    * @param noise GP label noise
+    * @param conf
     * @param fmk pointer to a pre-initialized structure (will be deleted)
+    * @param confSection
     */
-    FMKGPHyperparameterOptimization( const Config *conf, ParameterizedFunction *_pf, FastMinKernel *_fmk, const std::string & confSection = "GPHIKClassifier" );
+    FMKGPHyperparameterOptimization( const Config *conf, FastMinKernel *_fmk, const std::string & confSection = "GPHIKClassifier" );
       
     /**
     * @brief standard destructor
@@ -318,13 +331,6 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
      * @date 05-02-2014 (dd-mm-yyyy)
      */
     void setPerformRegression ( const bool & b_performRegression );
-
-      /**
-     * @brief Set the ParameterizedFunction object. Only allowed if not trained. Otherwise, exceptions will be thrown...
-     * @author Alexander Freytag
-     * @date 05-02-2014 (dd-mm-yyyy)
-     */
-    void setParameterizedFunction ( ParameterizedFunction *pf );
     
     /**
      * @brief Set the FastMinKernel object. Only allowed if not trained. Otherwise, exceptions will be thrown...

+ 125 - 173
GPHIKClassifier.cpp

@@ -15,9 +15,6 @@
 
 // gp-hik-core includes
 #include "GPHIKClassifier.h"
-#include "gp-hik-core/parameterizedFunctions/PFAbsExp.h"
-#include "gp-hik-core/parameterizedFunctions/PFExp.h"
-#include "gp-hik-core/parameterizedFunctions/PFMKL.h"
 
 using namespace std;
 using namespace NICE;
@@ -36,32 +33,48 @@ using namespace NICE;
 /////////////////////////////////////////////////////
 /////////////////////////////////////////////////////
 GPHIKClassifier::GPHIKClassifier( ) 
-{
-  //default settings, may be overwritten lateron
-  gphyper = NULL;
-  pf = NULL;
-  //just a default value
-  uncertaintyPredictionForClassification = false;
-  
+{  
+  this->b_isTrained = false;  
   this->confSection = "";
   
+  this->gphyper = new NICE::FMKGPHyperparameterOptimization();
+  
+  // in order to be sure about all necessary variables be setup with default values, we
+  // run initFromConfig with an empty config
+  NICE::Config tmpConfEmpty ;
+  this->initFromConfig ( &tmpConfEmpty, this->confSection );  
+  
+
 }
 
-GPHIKClassifier::GPHIKClassifier( const Config *conf, const string & s_confSection ) 
+GPHIKClassifier::GPHIKClassifier( const Config *conf, const string & s_confSection )
 {
-  //default settings, may be overwritten lateron
-  gphyper = NULL;
-  pf = NULL;
-  //just a default value
-  uncertaintyPredictionForClassification = false;
+  ///////////
+  // same code as in empty constructor - duplication can be avoided with C++11 allowing for constructor delegation
+  ///////////
+  
+  this->b_isTrained = false;  
+  this->confSection = "";
   
-  this->confSection = s_confSection;
+  this->gphyper = new NICE::FMKGPHyperparameterOptimization();
+  
+  ///////////
+  // here comes the new code part different from the empty constructor
+  ///////////
+  
+  this->confSection = s_confSection;  
   
   // if no config file was given, we either restore the classifier from an external file, or run ::init with 
   // an emtpy config (using default values thereby) when calling the train-method
   if ( conf != NULL )
   {
-    this->initFromConfig(conf, confSection);
+    this->initFromConfig( conf, confSection );
+  }
+  else
+  {
+    // if no config was given, we create an empty one
+    NICE::Config tmpConfEmpty ;
+    this->initFromConfig ( &tmpConfEmpty, this->confSection );      
   }
 }
 
@@ -69,40 +82,12 @@ GPHIKClassifier::~GPHIKClassifier()
 {
   if ( gphyper != NULL )
     delete gphyper;
-  
-  if (pf != NULL)
-    delete pf;
-
 }
 
 void GPHIKClassifier::initFromConfig(const Config *conf, const string & s_confSection)
 { 
-  double parameterUpperBound = conf->gD(confSection, "parameter_upper_bound", 5.0 );
-  double parameterLowerBound = conf->gD(confSection, "parameter_lower_bound", 1.0 );  
-
   this->noise = conf->gD(confSection, "noise", 0.01);
 
-  string transform = conf->gS(confSection, "transform", "absexp" );
-  
-  if (pf == NULL)
-  {
-    if ( transform == "absexp" )
-    {
-      this->pf = new PFAbsExp( 1.0, parameterLowerBound, parameterUpperBound );
-    } else if ( transform == "exp" ) {
-      this->pf = new PFExp( 1.0, parameterLowerBound, parameterUpperBound );
-    }else if ( transform == "MKL" ) {
-      //TODO generic, please :) load from a separate file or something like this!
-      std::set<int> steps; steps.insert(4000); steps.insert(6000); //specific for VISAPP
-      this->pf = new PFMKL( steps, parameterLowerBound, parameterUpperBound );
-    } else {
-      fthrow(Exception, "Transformation type is unknown " << transform);
-    }
-  }
-  else
-  {
-    //we already know the pf from the restore-function
-  }
   this->confSection = confSection;
   this->verbose = conf->gB(confSection, "verbose", false);
   this->debug = conf->gB(confSection, "debug", false);
@@ -117,38 +102,34 @@ void GPHIKClassifier::initFromConfig(const Config *conf, const string & s_confSe
     this->varianceApproximation = APPROXIMATE_ROUGH;
     
     //no additional eigenvalue is needed here at all.
-    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
-    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0 );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );    
   }
   else if ( (s_varianceApproximation.compare("approximate_fine") == 0) || ((s_varianceApproximation.compare("2") == 0)) )
   {
-    this->varianceApproximation = APPROXIMATE_FINE;
+    this->varianceApproximation = APPROXIMATE_FINE;    
     
     //security check - compute at least one eigenvalue for this approximation strategy
     this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( std::max( conf->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
-    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", std::max( conf->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
   }
   else if ( (s_varianceApproximation.compare("exact") == 0)  || ((s_varianceApproximation.compare("3") == 0)) )
   {
     this->varianceApproximation = EXACT;
     
     //no additional eigenvalue is needed here at all.
-    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 1 );
-    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );    
-    //TODO check why here 1, and 0 above    
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
   }
   else
   {
     this->varianceApproximation = NONE;
     
     //no additional eigenvalue is needed here at all.
-    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 1 );
-    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
   } 
   
   if ( this->verbose )
     std::cerr << "varianceApproximationStrategy: " << s_varianceApproximation  << std::endl;
   
+  //NOTE init all member pointer variables here as well
   this->gphyper->initFromConfig ( conf, confSection /*possibly delete the handing of confSection*/);
 }
 
@@ -158,7 +139,7 @@ void GPHIKClassifier::initFromConfig(const Config *conf, const string & s_confSe
 
 std::set<int> GPHIKClassifier::getKnownClassNumbers ( ) const
 {
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Classifier not trained yet -- aborting!" );  
   
   return gphyper->getKnownClassNumbers();
@@ -183,7 +164,7 @@ void GPHIKClassifier::classify ( const NICE::Vector * example,  int & result, Sp
 
 void GPHIKClassifier::classify ( const SparseVector * example,  int & result, SparseVector & scores, double & uncertainty ) const
 {
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Classifier not trained yet -- aborting!" );
   
   scores.clear();
@@ -215,7 +196,7 @@ void GPHIKClassifier::classify ( const SparseVector * example,  int & result, Sp
 
 void GPHIKClassifier::classify ( const NICE::Vector * example,  int & result, SparseVector & scores, double & uncertainty ) const
 {
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Classifier not trained yet -- aborting!" );  
   
   scores.clear();
@@ -259,44 +240,16 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
     std::cerr << "GPHIKClassifier::train" << std::endl;
   }
   
-//   if ( this->conf == NULL )
-  if ( this->pf == NULL ) // pf will only be set in ::init or in ::restore
-  {
-    std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
-    NICE::Config tmpConfEmpty ;
-    this->initFromConfig ( &tmpConfEmpty, this->confSection );
-  }
-
   Timer t;
   t.start();
+  
   FastMinKernel *fmk = new FastMinKernel ( examples, noise, this->debug );
+  gphyper->setFastMinKernel ( fmk ); 
   
   t.stop();
   if (verbose)
     std::cerr << "Time used for setting up the fmk object: " << t.getLast() << std::endl;  
-  
- /* if (gphyper != NULL)
-     delete gphyper;
- */ 
-  if ( gphyper == NULL )
-  {
-    // security check, which should always be skipped since gphyper is created in this->init
-    NICE::Config tmpConfEmpty ;
-    gphyper = new FMKGPHyperparameterOptimization ( &tmpConfEmpty, confSection ); 
-  }
-  
-  if ( ( varianceApproximation != APPROXIMATE_FINE) )
-  {
-    //TODO check whether this is easier then the version above
-    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
-    //conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0);
-  }
-   
-  //those two methods replace the previous call of the "full" constructor
-  gphyper->setParameterizedFunction ( pf );
-  gphyper->setFastMinKernel ( fmk );  
-//   gphyper->init (pf, fmk ); 
-//   gphyper = new FMKGPHyperparameterOptimization ( conf, pf, fmk, confSection ); 
+ 
 
   if (verbose)
     cerr << "Learning ..." << endl;
@@ -332,6 +285,8 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
     }
   }
 
+  //indicate that we finished training successfully
+  this->b_isTrained = true;
 
   // clean up all examples ??
   if (verbose)
@@ -355,33 +310,17 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
   
   if (verbose)
     std::cerr << "GPHIKClassifier::train" << std::endl;
-  
-//   if ( this->conf == NULL )
-  if ( this->pf == NULL ) // pf will only be set in ::init or in ::restore
-  {
-    std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
-    NICE::Config tmpConfEmpty ;
-    this->initFromConfig ( &tmpConfEmpty, this->confSection );
-  }  
-
+ 
   Timer t;
   t.start();
+  
   FastMinKernel *fmk = new FastMinKernel ( examples, noise, this->debug );
+  gphyper->setFastMinKernel ( fmk );  
+  
   t.stop();
   if (verbose)
     std::cerr << "Time used for setting up the fmk object: " << t.getLast() << std::endl;  
-  
-//   if (gphyper != NULL)
-//      delete gphyper;
-  if ( gphyper == NULL )
-  {
-    // security check, which should always be skipped since gphyper is created in this->init
-    NICE::Config tmpConfEmpty ;
-    gphyper = new FMKGPHyperparameterOptimization ( &tmpConfEmpty, confSection ); 
-  }
-  //those two methods replace the previous call of the "full" constructor
-  gphyper->setParameterizedFunction ( pf );
-  gphyper->setFastMinKernel ( fmk );
+
 
 
   if (verbose)
@@ -417,6 +356,9 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
     }
   }
 
+  //indicate that we finished training successfully
+  this->b_isTrained = true;
+
   // clean up all examples ??
   if (verbose)
     std::cerr << "Learning finished" << std::endl;
@@ -455,8 +397,6 @@ void GPHIKClassifier::predictUncertainty( const NICE::SparseVector * example, do
     default:
     {
       fthrow(Exception, "GPHIKClassifier - your settings disabled the variance approximation needed for uncertainty prediction.");
-//       uncertainty = numeric_limits<double>::max();
-//       break;
     }
   }
 }
@@ -487,8 +427,6 @@ void GPHIKClassifier::predictUncertainty( const NICE::Vector * example, double &
     default:
     {
       fthrow(Exception, "GPHIKClassifier - your settings disabled the variance approximation needed for uncertainty prediction.");
-//       uncertainty = numeric_limits<double>::max();
-//       break;
     }
   }
 }
@@ -521,12 +459,6 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
       throw;
     }   
     
-    if (pf != NULL)
-    {
-      delete pf;
-      pf = NULL;
-    }
-    //
     if (gphyper != NULL)
     {
       delete gphyper;
@@ -558,33 +490,6 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
         is >> tmp; // end of block 
         tmp = this->removeEndTag ( tmp );
       }
-      else if ( tmp.compare("pf") == 0 )
-      {
-      
-        is >> tmp; // start of block 
-        if ( this->isEndTag( tmp, "pf" ) )
-        {
-          std::cerr << " ParameterizedFunction object can not be restored. Aborting..." << std::endl;
-          throw;
-        } 
-        
-        std::string transform = this->removeStartTag ( tmp );
-        
-
-        if ( transform == "PFAbsExp" )
-        {
-          this->pf = new PFAbsExp ();
-        } else if ( transform == "PFExp" ) {
-          this->pf = new PFExp ();
-        } else {
-          fthrow(Exception, "Transformation type is unknown " << transform);
-        }
-        
-        pf->restore(is, format);
-        
-        is >> tmp; // end of block 
-        tmp = this->removeEndTag ( tmp );
-      } 
       else if ( tmp.compare("gphyper") == 0 )
       {
         if ( gphyper == NULL )
@@ -596,20 +501,51 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
           
         is >> tmp; // end of block 
         tmp = this->removeEndTag ( tmp );
-      }       
+      }   
+      else if ( tmp.compare("b_isTrained") == 0 )
+      {
+        is >> b_isTrained;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
+      else if ( tmp.compare("noise") == 0 )
+      {
+        is >> noise;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else if ( tmp.compare("verbose") == 0 )
+      {
+        is >> verbose;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else if ( tmp.compare("debug") == 0 )
+      {
+        is >> debug;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else if ( tmp.compare("uncertaintyPredictionForClassification") == 0 )
+      {
+        is >> uncertaintyPredictionForClassification;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
+      else if ( tmp.compare("varianceApproximation") == 0 )
+      {
+        unsigned int ui_varianceApproximation;
+        is >> ui_varianceApproximation;        
+        varianceApproximation = static_cast<VarianceApproximation> ( ui_varianceApproximation );
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
       else
       {
       std::cerr << "WARNING -- unexpected GPHIKClassifier object -- " << tmp << " -- for restoration... aborting" << std::endl;
       throw;
       }
     }
-
-    //load every settings as well as default options
-    //TODO check that this is not needed anymore!!!
-//     std::cerr << "run this->init" << std::endl;
-//     this->init(confCopy, confSection);    
-//     std::cerr << "run gphyper->initialize" << std::endl;
-//     gphyper->initialize ( confCopy, pf, NULL, confSection );
   }
   else
   {
@@ -619,10 +555,7 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
 }
 
 void GPHIKClassifier::store ( std::ostream & os, int format ) const
-{
-  if (gphyper == NULL)
-     fthrow(Exception, "Classifier not trained yet -- aborting!" );  
-  
+{ 
   if (os.good())
   {
     // show starting point
@@ -633,17 +566,43 @@ void GPHIKClassifier::store ( std::ostream & os, int format ) const
     os << this->createStartTag( "confSection" ) << std::endl;
     os << confSection << std::endl;
     os << this->createEndTag( "confSection" ) << std::endl; 
-    
-    os << this->createStartTag( "pf" ) << std::endl;
-    pf->store(os, format);
-    os << this->createEndTag( "pf" ) << std::endl; 
    
     os << this->createStartTag( "gphyper" ) << std::endl;
     //store the underlying data
     //will be done in gphyper->store(of,format)
     //store the optimized parameter values and all that stuff
     gphyper->store(os, format);
-    os << this->createEndTag( "gphyper" ) << std::endl;   
+    os << this->createEndTag( "gphyper" ) << std::endl; 
+    
+    
+    /////////////////////////////////////////////////////////
+    // store variables which we previously set via config    
+    /////////////////////////////////////////////////////////
+    os << this->createStartTag( "b_isTrained" ) << std::endl;
+    os << b_isTrained << std::endl;
+    os << this->createEndTag( "b_isTrained" ) << std::endl; 
+    
+    os << this->createStartTag( "noise" ) << std::endl;
+    os << noise << std::endl;
+    os << this->createEndTag( "noise" ) << std::endl;
+    
+    
+    os << this->createStartTag( "verbose" ) << std::endl;
+    os << verbose << std::endl;
+    os << this->createEndTag( "verbose" ) << std::endl; 
+    
+    os << this->createStartTag( "debug" ) << std::endl;
+    os << debug << std::endl;
+    os << this->createEndTag( "debug" ) << std::endl; 
+    
+    os << this->createStartTag( "uncertaintyPredictionForClassification" ) << std::endl;
+    os << uncertaintyPredictionForClassification << std::endl;
+    os << this->createEndTag( "uncertaintyPredictionForClassification" ) << std::endl;
+    
+    os << this->createStartTag( "varianceApproximation" ) << std::endl;
+    os << varianceApproximation << std::endl;
+    os << this->createEndTag( "varianceApproximation" ) << std::endl;     
+  
     
     
     // done
@@ -662,13 +621,6 @@ void GPHIKClassifier::clear ()
     delete gphyper;
     gphyper = NULL;
   }
-  
-  if (pf != NULL)
-  {
-    delete pf;
-    pf = NULL;
-  }
-
 }
 
 ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////
@@ -681,7 +633,7 @@ void GPHIKClassifier::addExample( const NICE::SparseVector * example,
 			   )
 {
   
-  if ( this->gphyper == NULL )
+  if ( ! this->b_isTrained )
   {
     //call train method instead
     std::cerr << "Classifier not initially trained yet -- run initial training instead of incremental extension!"  << std::endl;
@@ -708,7 +660,7 @@ void GPHIKClassifier::addMultipleExamples( const std::vector< const NICE::Sparse
   if ( newExamples.size() < 1)
     return;
 
-  if ( this->gphyper == NULL )
+  if ( ! this->b_isTrained )
   {
     //call train method instead
     std::cerr << "Classifier not initially trained yet -- run initial training instead of incremental extension!"  << std::endl;

+ 14 - 12
GPHIKClassifier.h

@@ -20,7 +20,7 @@
 // gp-hik-core includes
 #include "gp-hik-core/FMKGPHyperparameterOptimization.h"
 #include "gp-hik-core/OnlineLearnable.h"
-#include "gp-hik-core/parameterizedFunctions/ParameterizedFunction.h"
+
 
 namespace NICE {
   
@@ -41,30 +41,32 @@ class GPHIKClassifier : public NICE::Persistent, public NICE::OnlineLearnable
     /////////////////////////
     /////////////////////////
     
-    // output/debug related settings
+    ///////////////////////////////////
+    // output/debug related settings //   
+    ///////////////////////////////////
     
     /** verbose flag for useful output*/
     bool verbose;
     /** debug flag for several outputs useful for debugging*/
     bool debug;
     
-    // general specifications
+    //////////////////////////////////////
+    //      general specifications      //
+    //////////////////////////////////////
     
     /** Header in configfile where variable settings are stored */
-    std::string confSection;
-//     /** Configuration file specifying variable settings */
-//     NICE::Config *confCopy; 
+    std::string confSection;    
+    
+    //////////////////////////////////////
+    // classification related variables //
+    //////////////////////////////////////    
+    /** memorize whether the classifier was already trained*/
+    bool b_isTrained;
     
-    // internal objects 
     
     /** Main object doing all the jobs: training, classification, optimization, ... */
     NICE::FMKGPHyperparameterOptimization *gphyper;    
     
-    /** Possibility for transforming feature values, parameters can be optimized */
-    NICE::ParameterizedFunction *pf;    
-    
-    
-    
     
     /** Gaussian label noise for model regularization */
     double noise;

+ 142 - 182
GPHIKRegression.cpp

@@ -27,123 +27,116 @@ using namespace NICE;
 /////////////////////////////////////////////////////
 /////////////////////////////////////////////////////
 
-void GPHIKRegression::init(const Config *conf, const string & s_confSection)
+
+
+/////////////////////////////////////////////////////
+/////////////////////////////////////////////////////
+//                 PUBLIC METHODS
+/////////////////////////////////////////////////////
+/////////////////////////////////////////////////////
+GPHIKRegression::GPHIKRegression( ) 
 {
-  //copy the given config to have it accessible lateron
-  if ( this->confCopy != conf )
-  {
-    if ( this->confCopy != NULL )
-      delete this->confCopy;
-    
-    this->confCopy = new Config ( *conf );
-    //we do not want to read until end of file for restoring    
-    this->confCopy->setIoUntilEndOfFile(false);        
-  }
+  this->b_isTrained = false;  
+  this->confSection = "";
   
-
+  this->gphyper = new NICE::FMKGPHyperparameterOptimization();
   
-  double parameterUpperBound = confCopy->gD(confSection, "parameter_upper_bound", 5.0 );
-  double parameterLowerBound = confCopy->gD(confSection, "parameter_lower_bound", 1.0 );  
-
-  this->noise = confCopy->gD(confSection, "noise", 0.01);
+  // in order to be sure about all necessary variables be setup with default values, we
+  // run initFromConfig with an empty config
+  NICE::Config tmpConfEmpty ;
+  this->initFromConfig ( &tmpConfEmpty, this->confSection ); 
+  
+  //indicate that we perform regression here
+  this->gphyper->setPerformRegression ( true );  
+}
 
-  string transform = confCopy->gS(confSection, "transform", "absexp" );
+GPHIKRegression::GPHIKRegression( const Config *conf, const string & s_confSection ) 
+{
+  ///////////
+  // same code as in empty constructor - duplication can be avoided with C++11 allowing for constructor delegation
+  ///////////  
+  this->b_isTrained = false;  
+  this->confSection = "";
+  
+  this->gphyper = new NICE::FMKGPHyperparameterOptimization();
+  
+  ///////////
+  // here comes the new code part different from the empty constructor
+  /////////// 
   
-  if (pf == NULL)
+  this->confSection = s_confSection;  
+  
+  // if no config file was given, we either restore the classifier from an external file, or run ::init with 
+  // an emtpy config (using default values thereby) when calling the train-method
+  if ( conf != NULL )
   {
-    if ( transform == "absexp" )
-    {
-      this->pf = new PFAbsExp( 1.0, parameterLowerBound, parameterUpperBound );
-    } else if ( transform == "exp" ) {
-      this->pf = new PFExp( 1.0, parameterLowerBound, parameterUpperBound );
-    }else if ( transform == "MKL" ) {
-      //TODO generic, please :) load from a separate file or something like this!
-      std::set<int> steps; steps.insert(4000); steps.insert(6000); //specific for VISAPP
-      this->pf = new PFMKL( steps, parameterLowerBound, parameterUpperBound );
-    } else {
-      fthrow(Exception, "Transformation type is unknown " << transform);
-    }
+    this->initFromConfig( conf, confSection );
   }
   else
   {
-    //we already know the pf from the restore-function
+    // if no config was given, we create an empty one
+    NICE::Config tmpConfEmpty ;
+    this->initFromConfig ( &tmpConfEmpty, this->confSection );      
   }
+  
+  //indicate that we perform regression here
+  this->gphyper->setPerformRegression ( true );    
+}
+
+GPHIKRegression::~GPHIKRegression()
+{
+  if ( gphyper != NULL )
+    delete gphyper;
+}
+
+void GPHIKRegression::initFromConfig(const Config *conf, const string & s_confSection)
+{
+
+  this->noise = conf->gD(confSection, "noise", 0.01);
+
   this->confSection = confSection;
-  this->verbose = confCopy->gB(confSection, "verbose", false);
-  this->debug = confCopy->gB(confSection, "debug", false);
-  this->uncertaintyPredictionForRegression = confCopy->gB( confSection, "uncertaintyPredictionForRegression", false );
+  this->verbose = conf->gB(confSection, "verbose", false);
+  this->debug = conf->gB(confSection, "debug", false);
+  this->uncertaintyPredictionForRegression = conf->gB( confSection, "uncertaintyPredictionForRegression", false );
   
 
    
   //how do we approximate the predictive variance for regression uncertainty?
-  string s_varianceApproximation = confCopy->gS(confSection, "varianceApproximation", "approximate_fine"); //default: fine approximative uncertainty prediction
+  string s_varianceApproximation = conf->gS(confSection, "varianceApproximation", "approximate_fine"); //default: fine approximative uncertainty prediction
   if ( (s_varianceApproximation.compare("approximate_rough") == 0) || ((s_varianceApproximation.compare("1") == 0)) )
   {
     this->varianceApproximation = APPROXIMATE_ROUGH;
     
     //no additional eigenvalue is needed here at all.
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0 );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );     
   }
   else if ( (s_varianceApproximation.compare("approximate_fine") == 0) || ((s_varianceApproximation.compare("2") == 0)) )
   {
     this->varianceApproximation = APPROXIMATE_FINE;
     
     //security check - compute at least one eigenvalue for this approximation strategy
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", std::max( confCopy->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( std::max( conf->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
   }
   else if ( (s_varianceApproximation.compare("exact") == 0)  || ((s_varianceApproximation.compare("3") == 0)) )
   {
     this->varianceApproximation = EXACT;
     
     //no additional eigenvalue is needed here at all.
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );    
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
   }
   else
   {
     this->varianceApproximation = NONE;
     
     //no additional eigenvalue is needed here at all.
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
   } 
   
   if ( this->verbose )
     std::cerr << "varianceApproximationStrategy: " << s_varianceApproximation  << std::endl;
-}
-
-/////////////////////////////////////////////////////
-/////////////////////////////////////////////////////
-//                 PUBLIC METHODS
-/////////////////////////////////////////////////////
-/////////////////////////////////////////////////////
-GPHIKRegression::GPHIKRegression( const Config *conf, const string & s_confSection ) 
-{
-  //default settings, may be overwritten lateron
-  gphyper = NULL;
-  pf = NULL;
-  confCopy = NULL;
-  //just a default value
-  uncertaintyPredictionForRegression = false;
-  
-  this->confSection = s_confSection;
-  
-  // if no config file was given, we either restore the classifier from an external file, or run ::init with 
-  // an emtpy config (using default values thereby) when calling the train-method
-  if ( conf != NULL )
-  {
-    this->init(conf, confSection);
-  }
-}
-
-GPHIKRegression::~GPHIKRegression()
-{
-  if ( gphyper != NULL )
-    delete gphyper;
   
-  if (pf != NULL)
-    delete pf;
-
-  if ( confCopy != NULL )
-    delete confCopy;
+  //NOTE init all member pointer variables here as well
+  this->gphyper->initFromConfig ( conf, confSection /*possibly delete the handing of confSection*/);  
 }
 
 ///////////////////// ///////////////////// /////////////////////
@@ -170,7 +163,7 @@ void GPHIKRegression::estimate ( const NICE::Vector * example,  double & result
 
 void GPHIKRegression::estimate ( const SparseVector * example,  double & result, double & uncertainty ) const
 {
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Regression object not trained yet -- aborting!" );
   
   NICE::SparseVector scores;
@@ -206,7 +199,7 @@ void GPHIKRegression::estimate ( const SparseVector * example,  double & result,
 
 void GPHIKRegression::estimate ( const NICE::Vector * example,  double & result, double & uncertainty ) const
 {
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Regression object not trained yet -- aborting!" );  
   
   NICE::SparseVector scores;
@@ -253,34 +246,17 @@ void GPHIKRegression::train ( const std::vector< const NICE::SparseVector *> & e
   {
     std::cerr << "GPHIKRegression::train" << std::endl;
   }
-  
-  if ( this->confCopy == NULL )
-  {
-    std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
-    NICE::Config tmpConfEmpty ;
-    this->init ( &tmpConfEmpty, this->confSection );
-  }
 
   Timer t;
   t.start();
+  
   FastMinKernel *fmk = new FastMinKernel ( examples, noise, this->debug );
+  gphyper->setFastMinKernel ( fmk );
   
   t.stop();
   if (verbose)
     std::cerr << "Time used for setting up the fmk object: " << t.getLast() << std::endl;  
   
-  if (gphyper != NULL)
-     delete gphyper;
-  
-  
-  if ( ( varianceApproximation != APPROXIMATE_FINE) )
-    confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0);
-
-  // add flag for gphyper that only regression is performed
-  // thereby, all the binary-label-stuff should be skipped :)  
-  confCopy->sB ( confSection, "b_performRegression", true );
-  gphyper = new FMKGPHyperparameterOptimization ( confCopy, pf, fmk, confSection ); 
-
   if (verbose)
     cerr << "Learning ..." << endl;
 
@@ -315,7 +291,9 @@ void GPHIKRegression::train ( const std::vector< const NICE::SparseVector *> & e
     }
   }
 
-
+  //indicate that we finished training successfully
+  this->b_isTrained = true;
+  
   // clean up all examples ??
   if (verbose)
     std::cerr << "Learning finished" << std::endl;
@@ -331,7 +309,7 @@ GPHIKRegression *GPHIKRegression::clone () const
   
 void GPHIKRegression::predictUncertainty( const NICE::SparseVector * example, double & uncertainty ) const
 {  
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Regression object not trained yet -- aborting!" );  
   
   switch (varianceApproximation)    
@@ -360,7 +338,7 @@ void GPHIKRegression::predictUncertainty( const NICE::SparseVector * example, do
 
 void GPHIKRegression::predictUncertainty( const NICE::Vector * example, double & uncertainty ) const
 {  
-  if (gphyper == NULL)
+  if ( ! this->b_isTrained )
      fthrow(Exception, "Regression object not trained yet -- aborting!" );  
   
   switch (varianceApproximation)    
@@ -415,16 +393,6 @@ void GPHIKRegression::restore ( std::istream & is, int format )
       throw;
     }   
     
-    if (pf != NULL)
-    {
-      delete pf;
-      pf = NULL;
-    }
-    if ( confCopy != NULL )
-    {
-      delete confCopy;
-      confCopy = NULL;
-    }
     if (gphyper != NULL)
     {
       delete gphyper;
@@ -456,49 +424,6 @@ void GPHIKRegression::restore ( std::istream & is, int format )
         is >> tmp; // end of block 
         tmp = this->removeEndTag ( tmp );
       }
-      else if ( tmp.compare("pf") == 0 )
-      {
-      
-        is >> tmp; // start of block 
-        if ( this->isEndTag( tmp, "pf" ) )
-        {
-          std::cerr << " ParameterizedFunction object can not be restored. Aborting..." << std::endl;
-          throw;
-        } 
-        
-        std::string transform = this->removeStartTag ( tmp );
-        
-
-        if ( transform == "PFAbsExp" )
-        {
-          this->pf = new PFAbsExp ();
-        } else if ( transform == "PFExp" ) {
-          this->pf = new PFExp ();
-        } else {
-          fthrow(Exception, "Transformation type is unknown " << transform);
-        }
-        
-        pf->restore(is, format);
-        
-        is >> tmp; // end of block 
-        tmp = this->removeEndTag ( tmp );
-      } 
-      else if ( tmp.compare("ConfigCopy") == 0 )
-      {
-        // possibly obsolete safety checks
-        if ( confCopy == NULL )
-          confCopy = new Config;
-        confCopy->clear();
-        
-        
-        //we do not want to read until the end of the file
-        confCopy->setIoUntilEndOfFile( false );
-        //load every options we determined explicitely
-        confCopy->restore(is, format);
-        
-        is >> tmp; // end of block 
-        tmp = this->removeEndTag ( tmp );
-      }
       else if ( tmp.compare("gphyper") == 0 )
       {
         if ( gphyper == NULL )
@@ -510,20 +435,51 @@ void GPHIKRegression::restore ( std::istream & is, int format )
           
         is >> tmp; // end of block 
         tmp = this->removeEndTag ( tmp );
-      }       
+      }
+      else if ( tmp.compare("b_isTrained") == 0 )
+      {
+        is >> b_isTrained;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
+      else if ( tmp.compare("noise") == 0 )
+      {
+        is >> noise;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else if ( tmp.compare("verbose") == 0 )
+      {
+        is >> verbose;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else if ( tmp.compare("debug") == 0 )
+      {
+        is >> debug;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
+      else if ( tmp.compare("uncertaintyPredictionForRegression") == 0 )
+      {
+        is >> uncertaintyPredictionForRegression;
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else if ( tmp.compare("varianceApproximation") == 0 )
+      {
+        unsigned int ui_varianceApproximation;
+        is >> ui_varianceApproximation;        
+        varianceApproximation = static_cast<VarianceApproximation> ( ui_varianceApproximation );
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
       else
       {
       std::cerr << "WARNING -- unexpected GPHIKRegression object -- " << tmp << " -- for restoration... aborting" << std::endl;
       throw;
       }
     }
-
-    //load every settings as well as default options
-    std::cerr << "run this->init" << std::endl;
-    this->init(confCopy, confSection);    
-    std::cerr << "run gphyper->initialize" << std::endl;
-    //TODO!
-    gphyper->initFromConfig ( confCopy, confSection ); //pf, NULL, confSection );
   }
   else
   {
@@ -548,16 +504,6 @@ void GPHIKRegression::store ( std::ostream & os, int format ) const
     os << confSection << std::endl;
     os << this->createEndTag( "confSection" ) << std::endl; 
     
-    os << this->createStartTag( "pf" ) << std::endl;
-    pf->store(os, format);
-    os << this->createEndTag( "pf" ) << std::endl; 
-
-    os << this->createStartTag( "ConfigCopy" ) << std::endl;
-    //we do not want to read until end of file for restoring    
-    confCopy->setIoUntilEndOfFile(false);
-    confCopy->store(os,format);
-    os << this->createEndTag( "ConfigCopy" ) << std::endl; 
-    
     os << this->createStartTag( "gphyper" ) << std::endl;
     //store the underlying data
     //will be done in gphyper->store(of,format)
@@ -565,6 +511,32 @@ void GPHIKRegression::store ( std::ostream & os, int format ) const
     gphyper->store(os, format);
     os << this->createEndTag( "gphyper" ) << std::endl;   
     
+    os << this->createStartTag( "b_isTrained" ) << std::endl;
+    os << b_isTrained << std::endl;
+    os << this->createEndTag( "b_isTrained" ) << std::endl; 
+    
+    os << this->createStartTag( "noise" ) << std::endl;
+    os << noise << std::endl;
+    os << this->createEndTag( "noise" ) << std::endl;
+    
+    
+    os << this->createStartTag( "verbose" ) << std::endl;
+    os << verbose << std::endl;
+    os << this->createEndTag( "verbose" ) << std::endl; 
+    
+    os << this->createStartTag( "debug" ) << std::endl;
+    os << debug << std::endl;
+    os << this->createEndTag( "debug" ) << std::endl; 
+    
+    os << this->createStartTag( "uncertaintyPredictionForRegression" ) << std::endl;
+    os << uncertaintyPredictionForRegression << std::endl;
+    os << this->createEndTag( "uncertaintyPredictionForRegression" ) << std::endl;
+    
+    os << this->createStartTag( "varianceApproximation" ) << std::endl;
+    os << varianceApproximation << std::endl;
+    os << this->createEndTag( "varianceApproximation" ) << std::endl;     
+      
+    
     
     // done
     os << this->createEndTag( "GPHIKRegression" ) << std::endl;    
@@ -582,18 +554,6 @@ void GPHIKRegression::clear ()
     delete gphyper;
     gphyper = NULL;
   }
-  
-  if (pf != NULL)
-  {
-    delete pf;
-    pf = NULL;
-  }
-
-  if ( confCopy != NULL )
-  {
-    delete confCopy; 
-    confCopy = NULL;
-  } 
 }
 
 ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////
@@ -606,7 +566,7 @@ void GPHIKRegression::addExample( const NICE::SparseVector * example,
 			   )
 {
   
-  if ( this->gphyper == NULL )
+  if ( ! this->b_isTrained )
   {
     //call train method instead
     std::cerr << "Regression object not initially trained yet -- run initial training instead of incremental extension!"  << std::endl;

+ 30 - 20
GPHIKRegression.h

@@ -41,31 +41,35 @@ class GPHIKRegression : public NICE::Persistent, public NICE::OnlineLearnable
     /////////////////////////
     /////////////////////////
     
-    // output/debug related settings
+    ///////////////////////////////////
+    // output/debug related settings //   
+    ///////////////////////////////////
     
     /** verbose flag for useful output*/
     bool verbose;
     /** debug flag for several outputs useful for debugging*/
     bool debug;
     
-    // general specifications
+    
+    //////////////////////////////////////
+    //      general specifications      //
+    //////////////////////////////////////
     
     /** Header in configfile where variable settings are stored */
     std::string confSection;
-    /** Configuration file specifying variable settings */
-    NICE::Config *confCopy; 
+    
+    //////////////////////////////////////
+    // classification related variables //
+    //////////////////////////////////////     
+    /** memorize whether the classifier was already trained*/    
+    bool b_isTrained;      
     
     // internal objects 
     
     /** Main object doing all the jobs: training, regression, optimization, ... */
     NICE::FMKGPHyperparameterOptimization *gphyper;    
-    
-    /** Possibility for transforming feature values, parameters can be optimized */
-    NICE::ParameterizedFunction *pf;    
-    
-    
-    
-    
+  
+        
     /** Gaussian label noise for model regularization */
     double noise;
 
@@ -88,17 +92,15 @@ class GPHIKRegression : public NICE::Persistent, public NICE::OnlineLearnable
     /////////////////////////
     /////////////////////////
     
-    /** 
-    * @brief Setup internal variables and objects used
-    * @author Alexander Freytag
-    * @param conf Config file to specify variable settings
-    * @param s_confSection
-    */    
-    void init(const NICE::Config *conf, const std::string & s_confSection);
-       
-
   public:
 
+    /** 
+     * @brief default constructor
+     * @author Alexander Freytag
+     * @brief 06-02-2014 (dd-mm-yyyy)
+     */
+    GPHIKRegression( );    
+    
     /** 
      * @brief standard constructor
      * @author Alexander Freytag
@@ -111,6 +113,14 @@ class GPHIKRegression : public NICE::Persistent, public NICE::OnlineLearnable
      */
     ~GPHIKRegression();
     
+    /** 
+    * @brief Setup internal variables and objects used
+    * @author Alexander Freytag
+    * @param conf Config file to specify variable settings
+    * @param s_confSection
+    */    
+    void initFromConfig(const NICE::Config *conf, const std::string & s_confSection);    
+    
     ///////////////////// ///////////////////// /////////////////////
     //                         GET / SET
     ///////////////////// ///////////////////// /////////////////////      

+ 60 - 1
Quantization.cpp

@@ -1,7 +1,7 @@
 /** 
 * @file Quantization.cpp
 * @brief Quantization of one-dimensional signals with a standard range of [0,1] (Implementation)
-* @author Erik Rodner
+* @author Erik Rodner, Alexander Freytag
 * @date 01/09/2012
 
 */
@@ -11,6 +11,10 @@
 
 using namespace NICE;
 
+Quantization::Quantization( )
+{
+  this->numBins = 1;
+}
 
 Quantization::Quantization( uint numBins )
 {
@@ -37,3 +41,58 @@ uint Quantization::quantize (double value) const
   else if ( value >= 1.0 ) return numBins-1;
   else return (uint)( value * (numBins-1) + 0.5 );
 }
+
+// ---------------------- STORE AND RESTORE FUNCTIONS ----------------------
+
+void Quantization::restore ( std::istream & is, int format )
+{
+  if (is.good())
+  {    
+    std::string tmp;    
+
+    bool b_endOfBlock ( false ) ;
+    
+    while ( !b_endOfBlock )
+    {
+      is >> tmp; // start of block 
+      
+      if ( this->isEndTag( tmp, "Quantization" ) )
+      {
+        b_endOfBlock = true;
+        continue;
+      }                  
+      
+      tmp = this->removeStartTag ( tmp );
+      
+      if ( tmp.compare("numBins") == 0 )
+      {
+          is >> numBins;
+      }
+      else
+      {
+        std::cerr << "WARNING -- unexpected Quantization object -- " << tmp << " -- for restoration... aborting" << std::endl;
+        throw;  
+      }
+      
+      is >> tmp; // end of block 
+      tmp = this->removeEndTag ( tmp );      
+    }
+   }
+  else
+  {
+    std::cerr << "Quantization::restore -- InStream not initialized - restoring not possible!" << std::endl;
+  }
+}
+
+void Quantization::store ( std::ostream & os, int format ) const
+{
+  // show starting point
+  os << this->createStartTag( "Quantization" ) << std::endl;
+  
+  os << this->createStartTag( "numBins" ) << std::endl;
+  os << numBins << std::endl;
+  os << this->createEndTag( "numBins" ) << std::endl;
+    
+  // done
+  os << this->createEndTag( "Quantization" ) << std::endl;
+}

+ 26 - 5
Quantization.h

@@ -1,23 +1,25 @@
 /** 
 * @file Quantization.h
 * @brief Quantization of one-dimensional signals with a standard range of [0,1] (Interface)
-* @author Erik Rodner
+* @author Erik Rodner, Alexander Freytag
 * @date 01/09/2012
 */
 #ifndef _NICE_QUANTIZATIONINCLUDE
 #define _NICE_QUANTIZATIONINCLUDE
 
+// NICE-core includes
 #include <core/basics/types.h>
+#include <core/basics/Persistent.h>
 
 namespace NICE {
   
  /** 
  * @class Quantization
  * @brief Quantization of one-dimensional signals with a standard range of [0,1]
- * @author Erik Rodner
+ * @author Erik Rodner, Alexander Freytag
  */
  
-class Quantization
+class Quantization  : public NICE::Persistent
 {
 
   /** TODO
@@ -28,11 +30,23 @@ class Quantization
 
   protected:
 
-  uint numBins;
+    uint numBins;
 
   public:
 
-  /** simple constructor */
+  /** 
+   * @brief default constructor
+   * @author Alexander Freytag
+   * @date 06-02-2014
+   */
+  
+  Quantization( );
+  
+  /**
+   * @brief simple constructor
+   * @author Erik Rodner
+   * @date 
+   */
   Quantization( uint numBins );
     
   /** simple destructor */
@@ -61,6 +75,13 @@ class Quantization
   * @return index of the bin entry corresponding to the given signal value
   */
   virtual uint quantize (double value) const;
+  
+  ///////////////////// INTERFACE PERSISTENT /////////////////////
+  // interface specific methods for store and restore
+  ///////////////////// INTERFACE PERSISTENT /////////////////////
+  virtual void restore ( std::istream & is, int format = 0 );
+  virtual void store ( std::ostream & os, int format = 0 ) const; 
+  virtual void clear () {};  
      
 };
 

+ 2 - 0
tests/TestGPHIKRegression.cpp

@@ -117,6 +117,8 @@ void TestGPHIKRegression::testRegressionHoldInData()
   NICE::GPHIKRegression * regressionMethod;
   regressionMethod = new NICE::GPHIKRegression ( &conf );
   regressionMethod->train ( examplesTrain , yValues );
+  std::cerr << " yValues used for training regression object" << std::endl;
+  std::cerr << yValues << std::endl;
   
   double holdInLoss ( 0.0 );
   

部分文件因为文件数量过多而无法显示