Parcourir la source

towards new persistent layout without configs, not completed yet

Alexander Freytag il y a 11 ans
Parent
commit
04a46dcd66

+ 65 - 31
FMKGPHyperparameterOptimization.cpp

@@ -212,7 +212,7 @@ void FMKGPHyperparameterOptimization::updateAfterIncrement (
 /////////////////////////////////////////////////////
 /////////////////////////////////////////////////////
 
-FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization( const bool & b_performRegression )
+FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization( )
 {
   // initialize pointer variables
   pf = NULL;
@@ -231,31 +231,29 @@ FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization( const bool & b
   //stupid unneeded default values
   binaryLabelPositive = -1;
   binaryLabelNegative = -2;
+  knownClasses.clear();  
   
   this->b_usePreviousAlphas = false;
+  this->b_performRegression = false;
+}
+
+FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization( const bool & b_performRegression ) :FMKGPHyperparameterOptimization()
+{
   this->b_performRegression = b_performRegression;
 }
 
+FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization ( const Config *_conf, const string & _confSection )
+    : FMKGPHyperparameterOptimization()
+{
+  this->initFromConfig ( _conf, _confSection );
+}
+
 FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization ( const Config *_conf, ParameterizedFunction *_pf, FastMinKernel *_fmk, const string & _confSection )
+    : FMKGPHyperparameterOptimization()
 {
-   // initialize pointer variables
-  pf = NULL;
-  eig = NULL;
-  linsolver = NULL;
-  fmk = NULL;
-  q = NULL;
-  precomputedTForVarEst = NULL;
-  ikmsum = NULL;
-  
-  //stupid unneeded default values
-  binaryLabelPositive = -1;
-  binaryLabelNegative = -2;  
-  knownClasses.clear();
-  
-  if ( _fmk == NULL )
-    this->initialize ( _conf, _pf ); //then the confSection is also the default value
-  else
-    this->initialize ( _conf, _pf, _fmk, _confSection );
+  this->initFromConfig ( _conf, _confSection );
+  this->setParameterizedFunction( _pf );
+  this->setFastMinKernel( _fmk );
 }
 
 FMKGPHyperparameterOptimization::~FMKGPHyperparameterOptimization()
@@ -280,20 +278,20 @@ FMKGPHyperparameterOptimization::~FMKGPHyperparameterOptimization()
     delete ikmsum;
 }
 
-void FMKGPHyperparameterOptimization::initialize ( const Config *_conf, ParameterizedFunction *_pf, FastMinKernel *_fmk, const std::string & _confSection )
+void FMKGPHyperparameterOptimization::initFromConfig ( const Config *_conf, const std::string & _confSection )
 {
 
-  if ( _fmk != NULL )
-  {
-    if ( this->fmk != NULL )
-    {
-      delete this->fmk;
-      fmk = NULL;
-    }    
-    this->fmk = _fmk;
-  }
-  
-  this->pf = _pf;
+//   if ( _fmk != NULL )
+//   {
+//     if ( this->fmk != NULL )
+//     {
+//       delete this->fmk;
+//       fmk = NULL;
+//     }    
+//     this->fmk = _fmk;
+//   }
+//   
+//   this->pf = _pf;
  
   
   this->verbose = _conf->gB ( _confSection, "verbose", false );
@@ -434,6 +432,42 @@ std::set<int> FMKGPHyperparameterOptimization::getKnownClassNumbers ( ) const
   return this->knownClasses;
 }
 
+void FMKGPHyperparameterOptimization::setPerformRegression ( const bool & b_performRegression )
+{
+  //TODO check previously whether we already trained
+  if ( false )
+    throw NICE::Exception ( "FMPGKHyperparameterOptimization already initialized - switching between classification and regression not allowed!" );
+  else
+    this->b_performRegression = b_performRegression;
+}
+
+
+void FMKGPHyperparameterOptimization::setParameterizedFunction ( ParameterizedFunction *pf )
+{
+  //TODO check previously whether we already trained  
+  this->pf = pf;
+}
+
+
+void FMKGPHyperparameterOptimization::setFastMinKernel ( FastMinKernel * _fmk )
+{
+  //TODO check previously whether we already trained  
+  if ( _fmk != NULL )
+  {
+    if ( this->fmk != NULL )
+    {
+      delete this->fmk;
+      this->fmk = NULL;
+    }    
+    this->fmk = _fmk;
+  }  
+}
+
+void FMKGPHyperparameterOptimization::setNrOfEigenvaluesToConsiderForVarApprox ( const int & i_nrOfEigenvaluesToConsiderForVarApprox )
+{
+  //TODO check previously whether we already trained
+  this->nrOfEigenvaluesToConsiderForVarApprox = i_nrOfEigenvaluesToConsiderForVarApprox;  
+}
 
 
 ///////////////////// ///////////////////// /////////////////////

+ 55 - 9
FMKGPHyperparameterOptimization.h

@@ -246,21 +246,37 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
 
     
   public:  
+
+    /**
+    * @brief default constructor
+    * @author Alexander Freytag
+    */
+    FMKGPHyperparameterOptimization( );
     
     /**
     * @brief simple constructor
     * @author Alexander Freytag
     */
-    FMKGPHyperparameterOptimization( const bool & b_performRegression = false);
-        
+    FMKGPHyperparameterOptimization( const bool & b_performRegression );
+
+    /**
+    * @brief recommended constructor, only calls this->initialize with same input arguments
+    *
+    * @param pf pointer to a parameterized function used within the minimum kernel min(f(x_i), f(x_j)) (will not be deleted)
+    * @param noise GP label noise
+    * @param fmk pointer to a pre-initialized structure (will be deleted)
+    */
+    FMKGPHyperparameterOptimization( const Config *conf, const std::string & confSection = "GPHIKClassifier" );
+    
+    
     /**
-    * @brief standard constructor
+    * @brief recommended constructor, only calls this->initialize with same input arguments
     *
     * @param pf pointer to a parameterized function used within the minimum kernel min(f(x_i), f(x_j)) (will not be deleted)
     * @param noise GP label noise
     * @param fmk pointer to a pre-initialized structure (will be deleted)
     */
-    FMKGPHyperparameterOptimization( const Config *conf, ParameterizedFunction *pf, FastMinKernel *fmk = NULL, const std::string & confSection = "GPHIKClassifier" );
+    FMKGPHyperparameterOptimization( const Config *conf, ParameterizedFunction *_pf, FastMinKernel *_fmk, const std::string & confSection = "GPHIKClassifier" );
       
     /**
     * @brief standard destructor
@@ -268,6 +284,13 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
     */
     virtual ~FMKGPHyperparameterOptimization();
     
+    /**
+    * @brief Set variables and parameters to default or config-specified values
+    * @author Alexander Freytag
+    */       
+    void initFromConfig( const Config *conf, const std::string & confSection = "GPHIKClassifier" );
+    
+    
     ///////////////////// ///////////////////// /////////////////////
     //                         GET / SET
     ///////////////////// ///////////////////// /////////////////////
@@ -289,15 +312,38 @@ class FMKGPHyperparameterOptimization : public NICE::Persistent, public NICE::On
     */    
     std::set<int> getKnownClassNumbers ( ) const;
     
+    /**
+     * @brief Change between classification and regression, only allowed if not trained. Otherwise, exceptions will be thrown...
+     * @author Alexander Freytag
+     * @date 05-02-2014 (dd-mm-yyyy)
+     */
+    void setPerformRegression ( const bool & b_performRegression );
+
+      /**
+     * @brief Set the ParameterizedFunction object. Only allowed if not trained. Otherwise, exceptions will be thrown...
+     * @author Alexander Freytag
+     * @date 05-02-2014 (dd-mm-yyyy)
+     */
+    void setParameterizedFunction ( ParameterizedFunction *pf );
+    
+    /**
+     * @brief Set the FastMinKernel object. Only allowed if not trained. Otherwise, exceptions will be thrown...
+     * @author Alexander Freytag
+     * @date 05-02-2014 (dd-mm-yyyy)
+     */    
+    void setFastMinKernel ( FastMinKernel *fmk );  
+
+    /**
+     * @brief Set the number of EV we considere for variance approximation. Only allowed if not trained. Otherwise, exceptions will be thrown...
+     * @author Alexander Freytag
+     * @date 06-02-2014 (dd-mm-yyyy)
+     */        
+    void setNrOfEigenvaluesToConsiderForVarApprox ( const int & i_nrOfEigenvaluesToConsiderForVarApprox );
+    
     ///////////////////// ///////////////////// /////////////////////
     //                      CLASSIFIER STUFF
     ///////////////////// ///////////////////// /////////////////////  
     
-    /**
-    * @brief Set variables and parameters to default or config-specified values
-    * @author Alexander Freytag
-    */       
-    void initialize( const Config *conf, ParameterizedFunction *pf, FastMinKernel *fmk = NULL, const std::string & confSection = "GPHIKClassifier" );
        
 #ifdef NICE_USELIB_MATIO
     /**

+ 25 - 2
FastMinKernel.cpp

@@ -21,7 +21,11 @@ using namespace NICE;
 /* protected methods*/
 
 
-/* public methods*/
+/////////////////////////////////////////////////////
+/////////////////////////////////////////////////////
+//                 PUBLIC METHODS
+/////////////////////////////////////////////////////
+/////////////////////////////////////////////////////
 
 
 FastMinKernel::FastMinKernel()
@@ -75,7 +79,24 @@ FastMinKernel::~FastMinKernel()
 
 ///////////////////// ///////////////////// /////////////////////
 //                         GET / SET
-///////////////////// ///////////////////// ///////////////////// 
+//                   INCLUDING ACCESS OPERATORS
+///////////////////// ///////////////////// //////////////////// 
+
+int FastMinKernel::get_n() const
+{
+  return n;
+}
+
+
+int FastMinKernel::get_d() const 
+{
+  return d;
+}
+
+double FastMinKernel::getSparsityRatio()  const
+{
+  return X_sorted.computeSparsityRatio();
+}
 
 void FastMinKernel::setVerbose( const bool & _verbose)
 {
@@ -98,6 +119,8 @@ bool FastMinKernel::getDebug( )   const
   return debug;
 }
 
+
+      
 ///////////////////// ///////////////////// /////////////////////
 //                      CLASSIFIER STUFF
 ///////////////////// ///////////////////// /////////////////////

+ 11 - 10
FastMinKernel.h

@@ -93,14 +93,14 @@ namespace NICE {
       //------------------------------------------------------
       
       /** 
-      * @brief dummy constructor
+      * @brief default constructor
       * @author Alexander Freytag
       * @date 20-04-2012 (dd-mm-yyyy)
       */
       FastMinKernel();      
       
       /** 
-      * @brief initialize with some data
+      * @brief recommended constructor, initialize with some data
       * @author Alexander Freytag
       * @date 06-12-2011 (dd-mm-yyyy)
       */
@@ -108,7 +108,7 @@ namespace NICE {
 
       
       /**
-      * @brief Just another sparse data structure
+      * @brief recommended constructor, just another sparse data structure
       *
       * @param X vector of sparse vector pointers
       * @param noise GP noise
@@ -117,7 +117,7 @@ namespace NICE {
 
 #ifdef NICE_USELIB_MATIO
       /**
-      * @brief intialize with some data given in a matlab-sparse struct and restricted with an example index
+      * @brief recommended constructor, intialize with some data given in a matlab-sparse struct and restricted with an example index
       *
       * @param X matlab-struct containing the feature vectors
       * @param noise additional noise variance of the labels
@@ -133,9 +133,10 @@ namespace NICE {
       */
       ~FastMinKernel();
 
-      //------------------------------------------------------
-      // several get and set methods including access operators
-      //------------------------------------------------------
+    ///////////////////// ///////////////////// /////////////////////
+    //                         GET / SET
+    //                   INCLUDING ACCESS OPERATORS
+    ///////////////////// ///////////////////// /////////////////////       
       
       
       void setApproximationScheme(const ApproximationScheme & _approxScheme = MEDIAN) {approxScheme = _approxScheme;};
@@ -147,21 +148,21 @@ namespace NICE {
       * @author Alexander Freytag
       * @date 07-12-2011 (dd-mm-yyyy)
       */
-      int get_n() const {return n;};
+      int get_n() const;
       
       /** 
       * @brief Get number of dimensions
       * @author Alexander Freytag
       * @date 07-12-2011 (dd-mm-yyyy)
       */
-      int get_d() const {return d;};
+      int get_d() const;
 
       /** 
       * @brief Computes the ratio of sparsity across the matrix
       * @author Alexander Freytag
       * @date 11-01-2012 (dd-mm-yyyy)
       */
-      double getSparsityRatio(){return X_sorted.computeSparsityRatio();};
+      double getSparsityRatio() const;
       
       /** set verbose flag used for restore-functionality*/
       void setVerbose( const bool & _verbose);

+ 2 - 2
FeatureMatrixT.h

@@ -38,7 +38,7 @@ namespace NICE {
  * @author Alexander Freytag
  */  
   
-template<class T> class FeatureMatrixT : NICE::Persistent
+template<class T> class FeatureMatrixT : public NICE::Persistent
 {
 
   protected:
@@ -273,7 +273,7 @@ template<class T> class FeatureMatrixT : NICE::Persistent
     * @author Alexander Freytag
     * @date 11-01-2012 (dd-mm-yyyy)
     */
-    double computeSparsityRatio();
+    double computeSparsityRatio() const;
 
     /** 
     * @brief add a new feature and insert its elements in the already ordered structure

+ 1 - 1
FeatureMatrixT.tcc

@@ -485,7 +485,7 @@ namespace NICE {
     
     //Computes the ratio of sparsity across the matrix
     template <typename T>
-    double FeatureMatrixT<T>:: computeSparsityRatio()
+    double FeatureMatrixT<T>:: computeSparsityRatio() const
     {
       double ratio(0.0);
       for (typename std::vector<NICE::SortedVectorSparse<T> >::const_iterator it = features.begin(); it != features.end(); it++)

+ 106 - 106
GPHIKClassifier.cpp

@@ -28,27 +28,61 @@ using namespace NICE;
 /////////////////////////////////////////////////////
 /////////////////////////////////////////////////////
 
-void GPHIKClassifier::init(const Config *conf, const string & s_confSection)
+
+
+/////////////////////////////////////////////////////
+/////////////////////////////////////////////////////
+//                 PUBLIC METHODS
+/////////////////////////////////////////////////////
+/////////////////////////////////////////////////////
+GPHIKClassifier::GPHIKClassifier( ) 
 {
-  //copy the given config to have it accessible lateron
-  if ( this->confCopy != conf )
+  //default settings, may be overwritten lateron
+  gphyper = NULL;
+  pf = NULL;
+  //just a default value
+  uncertaintyPredictionForClassification = false;
+  
+  this->confSection = "";
+  
+}
+
+GPHIKClassifier::GPHIKClassifier( const Config *conf, const string & s_confSection ) 
+{
+  //default settings, may be overwritten lateron
+  gphyper = NULL;
+  pf = NULL;
+  //just a default value
+  uncertaintyPredictionForClassification = false;
+  
+  this->confSection = s_confSection;
+  
+  // if no config file was given, we either restore the classifier from an external file, or run ::init with 
+  // an emtpy config (using default values thereby) when calling the train-method
+  if ( conf != NULL )
   {
-    if ( this->confCopy != NULL )
-      delete this->confCopy;
-    
-    this->confCopy = new Config ( *conf );
-    //we do not want to read until end of file for restoring    
-    this->confCopy->setIoUntilEndOfFile(false);        
+    this->initFromConfig(conf, confSection);
   }
-  
+}
 
+GPHIKClassifier::~GPHIKClassifier()
+{
+  if ( gphyper != NULL )
+    delete gphyper;
   
-  double parameterUpperBound = confCopy->gD(confSection, "parameter_upper_bound", 5.0 );
-  double parameterLowerBound = confCopy->gD(confSection, "parameter_lower_bound", 1.0 );  
+  if (pf != NULL)
+    delete pf;
 
-  this->noise = confCopy->gD(confSection, "noise", 0.01);
+}
+
+void GPHIKClassifier::initFromConfig(const Config *conf, const string & s_confSection)
+{ 
+  double parameterUpperBound = conf->gD(confSection, "parameter_upper_bound", 5.0 );
+  double parameterLowerBound = conf->gD(confSection, "parameter_lower_bound", 1.0 );  
 
-  string transform = confCopy->gS(confSection, "transform", "absexp" );
+  this->noise = conf->gD(confSection, "noise", 0.01);
+
+  string transform = conf->gS(confSection, "transform", "absexp" );
   
   if (pf == NULL)
   {
@@ -70,81 +104,52 @@ void GPHIKClassifier::init(const Config *conf, const string & s_confSection)
     //we already know the pf from the restore-function
   }
   this->confSection = confSection;
-  this->verbose = confCopy->gB(confSection, "verbose", false);
-  this->debug = confCopy->gB(confSection, "debug", false);
-  this->uncertaintyPredictionForClassification = confCopy->gB( confSection, "uncertaintyPredictionForClassification", false );
+  this->verbose = conf->gB(confSection, "verbose", false);
+  this->debug = conf->gB(confSection, "debug", false);
+  this->uncertaintyPredictionForClassification = conf->gB( confSection, "uncertaintyPredictionForClassification", false );
   
 
    
   //how do we approximate the predictive variance for classification uncertainty?
-  string s_varianceApproximation = confCopy->gS(confSection, "varianceApproximation", "approximate_fine"); //default: fine approximative uncertainty prediction
+  string s_varianceApproximation = conf->gS(confSection, "varianceApproximation", "approximate_fine"); //default: fine approximative uncertainty prediction
   if ( (s_varianceApproximation.compare("approximate_rough") == 0) || ((s_varianceApproximation.compare("1") == 0)) )
   {
     this->varianceApproximation = APPROXIMATE_ROUGH;
     
     //no additional eigenvalue is needed here at all.
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0 );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
+    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0 );
   }
   else if ( (s_varianceApproximation.compare("approximate_fine") == 0) || ((s_varianceApproximation.compare("2") == 0)) )
   {
     this->varianceApproximation = APPROXIMATE_FINE;
     
     //security check - compute at least one eigenvalue for this approximation strategy
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", std::max( confCopy->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( std::max( conf->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
+    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", std::max( conf->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
   }
   else if ( (s_varianceApproximation.compare("exact") == 0)  || ((s_varianceApproximation.compare("3") == 0)) )
   {
     this->varianceApproximation = EXACT;
     
     //no additional eigenvalue is needed here at all.
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );    
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 1 );
+    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );    
+    //TODO check why here 1, and 0 above    
   }
   else
   {
     this->varianceApproximation = NONE;
     
     //no additional eigenvalue is needed here at all.
-    this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 1 );
+    //this->conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );
   } 
   
   if ( this->verbose )
     std::cerr << "varianceApproximationStrategy: " << s_varianceApproximation  << std::endl;
-}
-
-/////////////////////////////////////////////////////
-/////////////////////////////////////////////////////
-//                 PUBLIC METHODS
-/////////////////////////////////////////////////////
-/////////////////////////////////////////////////////
-GPHIKClassifier::GPHIKClassifier( const Config *conf, const string & s_confSection ) 
-{
-  //default settings, may be overwritten lateron
-  gphyper = NULL;
-  pf = NULL;
-  confCopy = NULL;
-  //just a default value
-  uncertaintyPredictionForClassification = false;
   
-  this->confSection = s_confSection;
-  
-  // if no config file was given, we either restore the classifier from an external file, or run ::init with 
-  // an emtpy config (using default values thereby) when calling the train-method
-  if ( conf != NULL )
-  {
-    this->init(conf, confSection);
-  }
-}
-
-GPHIKClassifier::~GPHIKClassifier()
-{
-  if ( gphyper != NULL )
-    delete gphyper;
-  
-  if (pf != NULL)
-    delete pf;
-
-  if ( confCopy != NULL )
-    delete confCopy;
+  this->gphyper->initFromConfig ( conf, confSection /*possibly delete the handing of confSection*/);
 }
 
 ///////////////////// ///////////////////// /////////////////////
@@ -254,11 +259,12 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
     std::cerr << "GPHIKClassifier::train" << std::endl;
   }
   
-  if ( this->confCopy == NULL )
+//   if ( this->conf == NULL )
+  if ( this->pf == NULL ) // pf will only be set in ::init or in ::restore
   {
     std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
     NICE::Config tmpConfEmpty ;
-    this->init ( &tmpConfEmpty, this->confSection );
+    this->initFromConfig ( &tmpConfEmpty, this->confSection );
   }
 
   Timer t;
@@ -269,14 +275,28 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
   if (verbose)
     std::cerr << "Time used for setting up the fmk object: " << t.getLast() << std::endl;  
   
-  if (gphyper != NULL)
+ /* if (gphyper != NULL)
      delete gphyper;
-  
+ */ 
+  if ( gphyper == NULL )
+  {
+    // security check, which should always be skipped since gphyper is created in this->init
+    NICE::Config tmpConfEmpty ;
+    gphyper = new FMKGPHyperparameterOptimization ( &tmpConfEmpty, confSection ); 
+  }
   
   if ( ( varianceApproximation != APPROXIMATE_FINE) )
-    confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0);
-  
-  gphyper = new FMKGPHyperparameterOptimization ( confCopy, pf, fmk, confSection ); 
+  {
+    //TODO check whether this is easier then the version above
+    this->gphyper->setNrOfEigenvaluesToConsiderForVarApprox ( 0 );
+    //conf->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0);
+  }
+   
+  //those two methods replace the previous call of the "full" constructor
+  gphyper->setParameterizedFunction ( pf );
+  gphyper->setFastMinKernel ( fmk );  
+//   gphyper->init (pf, fmk ); 
+//   gphyper = new FMKGPHyperparameterOptimization ( conf, pf, fmk, confSection ); 
 
   if (verbose)
     cerr << "Learning ..." << endl;
@@ -336,11 +356,12 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
   if (verbose)
     std::cerr << "GPHIKClassifier::train" << std::endl;
   
-  if ( this->confCopy == NULL )
+//   if ( this->conf == NULL )
+  if ( this->pf == NULL ) // pf will only be set in ::init or in ::restore
   {
     std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
     NICE::Config tmpConfEmpty ;
-    this->init ( &tmpConfEmpty, this->confSection );
+    this->initFromConfig ( &tmpConfEmpty, this->confSection );
   }  
 
   Timer t;
@@ -350,9 +371,18 @@ void GPHIKClassifier::train ( const std::vector< const NICE::SparseVector *> & e
   if (verbose)
     std::cerr << "Time used for setting up the fmk object: " << t.getLast() << std::endl;  
   
-  if (gphyper != NULL)
-     delete gphyper;
-  gphyper = new FMKGPHyperparameterOptimization ( confCopy, pf, fmk, confSection ); 
+//   if (gphyper != NULL)
+//      delete gphyper;
+  if ( gphyper == NULL )
+  {
+    // security check, which should always be skipped since gphyper is created in this->init
+    NICE::Config tmpConfEmpty ;
+    gphyper = new FMKGPHyperparameterOptimization ( &tmpConfEmpty, confSection ); 
+  }
+  //those two methods replace the previous call of the "full" constructor
+  gphyper->setParameterizedFunction ( pf );
+  gphyper->setFastMinKernel ( fmk );
+
 
   if (verbose)
     cerr << "Learning ..." << endl;
@@ -496,11 +526,7 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
       delete pf;
       pf = NULL;
     }
-    if ( confCopy != NULL )
-    {
-      delete confCopy;
-      confCopy = NULL;
-    }
+    //
     if (gphyper != NULL)
     {
       delete gphyper;
@@ -559,22 +585,6 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
         is >> tmp; // end of block 
         tmp = this->removeEndTag ( tmp );
       } 
-      else if ( tmp.compare("ConfigCopy") == 0 )
-      {
-        // possibly obsolete safety checks
-        if ( confCopy == NULL )
-          confCopy = new Config;
-        confCopy->clear();
-        
-        
-        //we do not want to read until the end of the file
-        confCopy->setIoUntilEndOfFile( false );
-        //load every options we determined explicitely
-        confCopy->restore(is, format);
-        
-        is >> tmp; // end of block 
-        tmp = this->removeEndTag ( tmp );
-      }
       else if ( tmp.compare("gphyper") == 0 )
       {
         if ( gphyper == NULL )
@@ -595,10 +605,11 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
     }
 
     //load every settings as well as default options
-    std::cerr << "run this->init" << std::endl;
-    this->init(confCopy, confSection);    
-    std::cerr << "run gphyper->initialize" << std::endl;
-    gphyper->initialize ( confCopy, pf, NULL, confSection );
+    //TODO check that this is not needed anymore!!!
+//     std::cerr << "run this->init" << std::endl;
+//     this->init(confCopy, confSection);    
+//     std::cerr << "run gphyper->initialize" << std::endl;
+//     gphyper->initialize ( confCopy, pf, NULL, confSection );
   }
   else
   {
@@ -626,13 +637,7 @@ void GPHIKClassifier::store ( std::ostream & os, int format ) const
     os << this->createStartTag( "pf" ) << std::endl;
     pf->store(os, format);
     os << this->createEndTag( "pf" ) << std::endl; 
-
-    os << this->createStartTag( "ConfigCopy" ) << std::endl;
-    //we do not want to read until end of file for restoring    
-    confCopy->setIoUntilEndOfFile(false);
-    confCopy->store(os,format);
-    os << this->createEndTag( "ConfigCopy" ) << std::endl; 
-    
+   
     os << this->createStartTag( "gphyper" ) << std::endl;
     //store the underlying data
     //will be done in gphyper->store(of,format)
@@ -664,11 +669,6 @@ void GPHIKClassifier::clear ()
     pf = NULL;
   }
 
-  if ( confCopy != NULL )
-  {
-    delete confCopy; 
-    confCopy = NULL;
-  } 
 }
 
 ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////

+ 20 - 12
GPHIKClassifier.h

@@ -52,8 +52,8 @@ class GPHIKClassifier : public NICE::Persistent, public NICE::OnlineLearnable
     
     /** Header in configfile where variable settings are stored */
     std::string confSection;
-    /** Configuration file specifying variable settings */
-    NICE::Config *confCopy; 
+//     /** Configuration file specifying variable settings */
+//     NICE::Config *confCopy; 
     
     // internal objects 
     
@@ -87,23 +87,23 @@ class GPHIKClassifier : public NICE::Persistent, public NICE::OnlineLearnable
     //  PROTECTED METHODS  //
     /////////////////////////
     /////////////////////////
-    
-    /** 
-    * @brief Setup internal variables and objects used
-    * @author Alexander Freytag
-    * @param conf Config file to specify variable settings
-    * @param s_confSection
-    */    
-    void init(const NICE::Config *conf, const std::string & s_confSection);
-       
+          
 
   public:
 
+    /** 
+     * @brief default constructor
+     * @author Alexander Freytag
+     * @date 05-02-2014 ( dd-mm-yyyy)
+     */
+    GPHIKClassifier( );
+     
+    
     /** 
      * @brief standard constructor
      * @author Alexander Freytag
      */
-    GPHIKClassifier( const NICE::Config *conf = NULL, const std::string & s_confSection = "GPHIKClassifier" );
+    GPHIKClassifier( const NICE::Config *conf , const std::string & s_confSection = "GPHIKClassifier" );
       
     /**
      * @brief simple destructor
@@ -111,6 +111,14 @@ class GPHIKClassifier : public NICE::Persistent, public NICE::OnlineLearnable
      */
     ~GPHIKClassifier();
     
+    /** 
+    * @brief Setup internal variables and objects used
+    * @author Alexander Freytag
+    * @param conf Config file to specify variable settings
+    * @param s_confSection
+    */    
+    void initFromConfig(const NICE::Config *conf, const std::string & s_confSection);    
+    
     ///////////////////// ///////////////////// /////////////////////
     //                         GET / SET
     ///////////////////// ///////////////////// /////////////////////      

+ 2 - 1
GPHIKRegression.cpp

@@ -522,7 +522,8 @@ void GPHIKRegression::restore ( std::istream & is, int format )
     std::cerr << "run this->init" << std::endl;
     this->init(confCopy, confSection);    
     std::cerr << "run gphyper->initialize" << std::endl;
-    gphyper->initialize ( confCopy, pf, NULL, confSection );
+    //TODO!
+    gphyper->initFromConfig ( confCopy, confSection ); //pf, NULL, confSection );
   }
   else
   {

+ 35 - 3
tests/TestGPHIKPersistent.cpp

@@ -24,6 +24,7 @@ using namespace std; //C basics
 using namespace NICE;  // nice-core
 
 const bool verboseStartEnd = true;
+const bool verbose = true;
 
 
 CPPUNIT_TEST_SUITE_REGISTRATION( TestGPHIKPersistent );
@@ -77,8 +78,15 @@ void TestGPHIKPersistent::testPersistentMethods()
   // TRAIN CLASSIFIER FROM SCRATCH
   
   classifier = new GPHIKClassifier ( &conf );  
-    
-  classifier->train ( examplesTrain , yMultiTrain );
+  
+  yBinTrain *= 2;
+  yBinTrain -= 1;
+  yBinTrain *= -1;
+  
+  std::cerr << yBinTrain << std::endl;
+  
+  std::cerr << "train classifier with artifially disturbed labels" << std::endl;
+  classifier->train ( examplesTrain , yBinTrain);//yMultiTrain );
   
   
   // TEST STORING ABILITIES
@@ -96,7 +104,7 @@ void TestGPHIKPersistent::testPersistentMethods()
   
   // TEST RESTORING ABILITIES
     
-  NICE::GPHIKClassifier * classifierRestored = new GPHIKClassifier;  
+  NICE::GPHIKClassifier * classifierRestored = new GPHIKClassifier();  
       
   std::string s_destination_load ( "myClassifier.txt" );
   
@@ -170,6 +178,21 @@ void TestGPHIKPersistent::testPersistentMethods()
       mapClNoToIdxTest.insert ( std::pair<int,int> ( *clTestIt, i )  ); 
           
   
+  if ( verbose )
+  {
+    std::cout << "Train data mapping: " << std::endl;
+    for ( std::map<int,int>::const_iterator clTrainIt = mapClNoToIdxTrain.begin(); clTrainIt != mapClNoToIdxTrain.end(); clTrainIt++ )
+    {
+      std::cout << " " << clTrainIt->first << " " << clTrainIt->second << std::endl;
+    }
+
+    std::cout << "Test data mapping: " << std::endl;
+    for ( std::map<int,int>::const_iterator clTestIt = mapClNoToIdxTest.begin(); clTestIt != mapClNoToIdxTest.end(); clTestIt++ )
+    {
+      std::cout << " " << clTestIt->first << " " << clTestIt->second << std::endl;
+    }    
+  }
+  
   NICE::Matrix confusionMatrix         ( noClassesKnownTraining, noClassesKnownTest, 0.0);
   NICE::Matrix confusionMatrixRestored ( noClassesKnownTraining, noClassesKnownTest, 0.0);
   
@@ -202,6 +225,15 @@ void TestGPHIKPersistent::testPersistentMethods()
 
   confusionMatrixRestored.normalizeColumnsL1();
   double arrRestored ( confusionMatrixRestored.trace()/confusionMatrixRestored.cols() );
+  
+  if ( verbose )
+  {
+    std::cout << "confusionMatrix: " << confusionMatrix << std::endl;
+    std::cout << "confusionMatrixRestored: " << confusionMatrixRestored << std::endl;
+    std::cout << "arr: " << arr << std::endl;
+    std::cout << "arrRestored: " << arrRestored << std::endl;   
+    
+  }
 
   
   CPPUNIT_ASSERT_DOUBLES_EQUAL( arr, arrRestored, 1e-8);