Преглед изворни кода

Matlab interface for GPHIKRegression

Alexander Freytag пре 11 година
родитељ
комит
c349ab6991

+ 11 - 11
matlab/GPHIKClassifier.m

@@ -1,4 +1,4 @@
-% brief:    MATLAB class wrapper for the underlying Matlab-C++ Interface (GPHIK.cpp)
+% brief:    MATLAB class wrapper for the underlying Matlab-C++ Interface (GPHIKClassifierMex.cpp)
 % author:   Alexander Freytag
 % author:   Alexander Freytag
 % date:     07-01-2014 (dd-mm-yyyy)
 % date:     07-01-2014 (dd-mm-yyyy)
 classdef GPHIKClassifier < handle
 classdef GPHIKClassifier < handle
@@ -15,12 +15,12 @@ classdef GPHIKClassifier < handle
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%        
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%        
         %% constructor - create object
         %% constructor - create object
         function this = GPHIKClassifier(varargin)
         function this = GPHIKClassifier(varargin)
-            this.objectHandle = GPHIK('new', varargin{:});
+            this.objectHandle = GPHIKClassifierMex('new', varargin{:});
         end
         end
         
         
         %% destructor - delete object
         %% destructor - delete object
         function delete(this)
         function delete(this)
-            GPHIK('delete', this.objectHandle);
+            GPHIKClassifierMex('delete', this.objectHandle);
         end
         end
 
 
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -28,22 +28,22 @@ classdef GPHIKClassifier < handle
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%         
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%         
         %% train - standard train - assumes initialized object
         %% train - standard train - assumes initialized object
         function varargout = train(this, varargin)
         function varargout = train(this, varargin)
-            [varargout{1:nargout}] = GPHIK('train', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('train', this.objectHandle, varargin{:});
         end
         end
         
         
         %% classify
         %% classify
         function varargout = classify(this, varargin)
         function varargout = classify(this, varargin)
-            [varargout{1:nargout}] = GPHIK('classify', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('classify', this.objectHandle, varargin{:});
         end 
         end 
         
         
         %% uncertainty - Uncertainty prediction
         %% uncertainty - Uncertainty prediction
         function varargout = uncertainty(this, varargin)
         function varargout = uncertainty(this, varargin)
-            [varargout{1:nargout}] = GPHIK('uncertainty', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('uncertainty', this.objectHandle, varargin{:});
         end        
         end        
 
 
         %% test - evaluate classifier on whole test set
         %% test - evaluate classifier on whole test set
         function varargout = test(this, varargin)
         function varargout = test(this, varargin)
-            [varargout{1:nargout}] = GPHIK('test', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('test', this.objectHandle, varargin{:});
         end
         end
         
         
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -51,11 +51,11 @@ classdef GPHIKClassifier < handle
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %% addExample
         %% addExample
         function varargout = addExample(this, varargin)
         function varargout = addExample(this, varargin)
-            [varargout{1:nargout}] = GPHIK('addExample', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('addExample', this.objectHandle, varargin{:});
         end 
         end 
         %% addMultipleExamples
         %% addMultipleExamples
         function varargout = addMultipleExamples(this, varargin)
         function varargout = addMultipleExamples(this, varargin)
-            [varargout{1:nargout}] = GPHIK('addMultipleExamples', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('addMultipleExamples', this.objectHandle, varargin{:});
         end
         end
         
         
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -63,11 +63,11 @@ classdef GPHIKClassifier < handle
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
         %% store - store the classifier to an external file
         %% store - store the classifier to an external file
         function varargout = store(this, varargin)
         function varargout = store(this, varargin)
-            [varargout{1:nargout}] = GPHIK('store', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('store', this.objectHandle, varargin{:});
         end
         end
         %% restore -  load classifier from external file 
         %% restore -  load classifier from external file 
         function varargout = restore(this, varargin)
         function varargout = restore(this, varargin)
-            [varargout{1:nargout}] = GPHIK('restore', this.objectHandle, varargin{:});
+            [varargout{1:nargout}] = GPHIKClassifierMex('restore', this.objectHandle, varargin{:});
         end
         end
     end
     end
 end
 end

+ 3 - 3
matlab/GPHIK.cpp → matlab/GPHIKClassifierMex.cpp

@@ -1,5 +1,5 @@
 /** 
 /** 
-* @file GPHIK.cpp
+* @file GPHIKClassifierMex.cpp
 * @author Alexander Freytag
 * @author Alexander Freytag
 * @date 07-01-2014 (dd-mm-yyyy)
 * @date 07-01-2014 (dd-mm-yyyy)
 * @brief Matlab-Interface of our GPHIKClassifier, allowing for training, classification, optimization, variance prediction, incremental learning, and  storing/re-storing.
 * @brief Matlab-Interface of our GPHIKClassifier, allowing for training, classification, optimization, variance prediction, incremental learning, and  storing/re-storing.
@@ -33,7 +33,7 @@ using namespace std; //C basics
 using namespace NICE;  // nice-core
 using namespace NICE;  // nice-core
 
 
 
 
-NICE::Config parseParameters(const mxArray *prhs[], int nrhs)
+NICE::Config parseParametersGPHIKClassifier(const mxArray *prhs[], int nrhs)
 {
 {
   NICE::Config conf;
   NICE::Config conf;
   
   
@@ -235,7 +235,7 @@ void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
             mexErrMsgTxt("New: One output expected.");
             mexErrMsgTxt("New: One output expected.");
         
         
         // read config settings
         // read config settings
-        NICE::Config conf = parseParameters(prhs+1,nrhs-1);
+        NICE::Config conf = parseParametersGPHIKClassifier(prhs+1,nrhs-1);
         
         
         // create class instance
         // create class instance
         NICE::GPHIKClassifier * classifier = new NICE::GPHIKClassifier ( &conf, "GPHIKClassifier" /*sectionName in config*/ );
         NICE::GPHIKClassifier * classifier = new NICE::GPHIKClassifier ( &conf, "GPHIKClassifier" /*sectionName in config*/ );

+ 73 - 0
matlab/GPHIKRegression.m

@@ -0,0 +1,73 @@
+% brief:    MATLAB class wrapper for the underlying Matlab-C++ Interface (GPHIKRegressionMex.cpp)
+% author:   Alexander Freytag
+% date:     17-01-2014 (dd-mm-yyyy)
+classdef GPHIKRegression < handle
+    
+    properties (SetAccess = private, Hidden = true)
+        % Handle to the underlying C++ class instance
+        objectHandle; 
+    end
+    
+    methods
+        
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        %%      Constructor / Destructor    %%
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%        
+        %% constructor - create object
+        function this = GPHIKRegression(varargin)
+            this.objectHandle = GPHIKRegressionMex('new', varargin{:});
+        end
+        
+        %% destructor - delete object
+        function delete(this)
+            GPHIKRegressionMex('delete', this.objectHandle);
+        end
+
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        %%          Regression stuff        %%
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%         
+        %% train - standard train - assumes initialized object
+        function varargout = train(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('train', this.objectHandle, varargin{:});
+        end
+        
+        %% perform regression
+        function varargout = estimate(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('estimate', this.objectHandle, varargin{:});
+        end 
+        
+        %% uncertainty - Uncertainty prediction
+        function varargout = uncertainty(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('uncertainty', this.objectHandle, varargin{:});
+        end        
+
+        %% test - evaluate regression on whole test set using L2 loss
+        function varargout = testL2loss(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('testL2loss', this.objectHandle, varargin{:});
+        end
+        
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        %%       Online Learnable methods   %%
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        %% addExample
+        function varargout = addExample(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('addExample', this.objectHandle, varargin{:});
+        end 
+        %% addMultipleExamples
+        function varargout = addMultipleExamples(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('addMultipleExamples', this.objectHandle, varargin{:});
+        end
+        
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        %%       Persistent methods         %%
+        %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+        %% store - store the classifier to an external file
+        function varargout = store(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('store', this.objectHandle, varargin{:});
+        end
+        %% restore -  load classifier from external file 
+        function varargout = restore(this, varargin)
+            [varargout{1:nargout}] = GPHIKRegressionMex('restore', this.objectHandle, varargin{:});
+        end
+    end
+end

+ 679 - 0
matlab/GPHIKRegressionMex.cpp

@@ -0,0 +1,679 @@
+/** 
+* @file GPHIKRegressionMex.cpp
+* @author Alexander Freytag
+* @date 17-01-2014 (dd-mm-yyyy)
+* @brief Matlab-Interface of our GPHIKRegression, allowing for training, regression, optimization, variance prediction, incremental learning, and  storing/re-storing.
+*/
+
+// STL includes
+#include <math.h>
+#include <matrix.h>
+#include <mex.h>
+
+// NICE-core includes
+#include <core/basics/Config.h>
+#include <core/basics/Timer.h>
+#include <core/vector/MatrixT.h>
+#include <core/vector/VectorT.h>
+
+// gp-hik-core includes
+#include "gp-hik-core/GPHIKRegression.h"
+
+
+// Interface for conversion between Matlab and C objects
+#include "gp-hik-core/matlab/classHandleMtoC.h"
+#include "gp-hik-core/matlab/ConverterMatlabToNICE.h"
+#include "gp-hik-core/matlab/ConverterNICEToMatlab.h"
+
+const NICE::ConverterMatlabToNICE converterMtoNICE;
+const NICE::ConverterNICEToMatlab converterNICEtoM;
+
+
+using namespace std; //C basics
+using namespace NICE;  // nice-core
+
+
+NICE::Config parseParametersGPHIKRegression(const mxArray *prhs[], int nrhs)
+{
+  NICE::Config conf;
+  
+  // if first argument is the filename of an existing config file,
+  // read the config accordingly
+  
+  int i_start ( 0 );
+  std::string variable = converterMtoNICE.convertMatlabToString(prhs[i_start]);
+  if(variable == "conf")
+  {
+      conf = NICE::Config ( converterMtoNICE.convertMatlabToString( prhs[i_start+1] )  );
+      i_start = i_start+2;
+  }
+  
+  // now run over all given parameter specifications
+  // and add them to the config
+  for( int i=i_start; i < nrhs; i+=2 )
+  {
+    std::string variable = converterMtoNICE.convertMatlabToString(prhs[i]);
+    
+    /////////////////////////////////////////
+    // READ STANDARD BOOLEAN VARIABLES
+    /////////////////////////////////////////
+    if( (variable == "verboseTime") || (variable == "verbose") ||
+        (variable == "optimize_noise") || (variable == "uncertaintyPredictionForClassification") ||
+        (variable == "use_quantization") || (variable == "ils_verbose")
+      )
+    {
+      if ( mxIsChar( prhs[i+1] ) )
+      {
+        string value = converterMtoNICE.convertMatlabToString( prhs[i+1] );
+        if ( (value != "true") && (value != "false") )
+        {
+          std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. In string modus, \'true\' or \'false\' expected.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
+        }
+        
+        if( value == "true" )
+          conf.sB("GPHIKRegression", variable, true);
+        else
+          conf.sB("GPHIKRegression", variable, false);
+      }
+      else if ( mxIsLogical( prhs[i+1] ) )
+      {
+        bool value = converterMtoNICE.convertMatlabToBool( prhs[i+1] );
+        conf.sB("GPHIKRegression", variable, value);
+      }
+      else
+      {
+          std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. \'true\', \'false\', or logical expected.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );        
+      }
+    }
+    
+    /////////////////////////////////////////
+    // READ STANDARD INT VARIABLES
+    /////////////////////////////////////////
+    if ( (variable == "nrOfEigenvaluesToConsiderForVarApprox")
+       )
+    {
+      if ( mxIsDouble( prhs[i+1] ) )
+      {
+        double value = converterMtoNICE.convertMatlabToDouble(prhs[i+1]);
+        conf.sI("GPHIKRegression", variable, (int) value);        
+      }
+      else if ( mxIsInt32( prhs[i+1] ) )
+      {
+        int value = converterMtoNICE.convertMatlabToInt32(prhs[i+1]);
+        conf.sI("GPHIKRegression", variable, value);          
+      }
+      else
+      {
+          std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Int32 or Double expected.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );         
+      }     
+    }
+    
+    /////////////////////////////////////////
+    // READ STRICT POSITIVE INT VARIABLES
+    /////////////////////////////////////////
+    if ( (variable == "num_bins") || (variable == "ils_max_iterations")
+       )
+    {
+      if ( mxIsDouble( prhs[i+1] ) )
+      {
+        double value = converterMtoNICE.convertMatlabToDouble(prhs[i+1]);
+        if( value < 1 )
+        {
+          std::string errorMsg = "Expected parameter value larger than 0 for \'" +  variable + "\'.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );     
+        }
+        conf.sI("GPHIKRegression", variable, (int) value);        
+      }
+      else if ( mxIsInt32( prhs[i+1] ) )
+      {
+        int value = converterMtoNICE.convertMatlabToInt32(prhs[i+1]);
+        if( value < 1 )
+        {
+          std::string errorMsg = "Expected parameter value larger than 0 for \'" +  variable + "\'.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );     
+        }        
+        conf.sI("GPHIKRegression", variable, value);          
+      }
+      else
+      {
+          std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Int32 or Double expected.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );         
+      }     
+    }
+    
+    /////////////////////////////////////////
+    // READ POSITIVE DOUBLE VARIABLES
+    /////////////////////////////////////////
+    if ( (variable == "ils_min_delta") || (variable == "ils_min_residual") ||
+         (variable == "noise")
+       )
+    {
+      if ( mxIsDouble( prhs[i+1] ) )
+      {
+        double value = converterMtoNICE.convertMatlabToDouble(prhs[i+1]);
+        if( value < 0.0 )
+        {
+          std::string errorMsg = "Expected parameter value larger than 0 for \'" +  variable + "\'.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );     
+        }
+        conf.sD("GPHIKRegression", variable, value);        
+      }
+      else
+      {
+          std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Double expected.";
+          mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );         
+      }     
+    }    
+    
+    /////////////////////////////////////////
+    // READ REMAINING SPECIFIC VARIABLES
+    /////////////////////////////////////////  
+
+    if(variable == "ils_method")
+    {
+      string value = converterMtoNICE.convertMatlabToString(prhs[i+1]);
+      if(value != "CG" && value != "CGL" && value != "SYMMLQ" && value != "MINRES")
+        mexErrMsgIdAndTxt("mexnice:error","Unexpected parameter value for \'ils_method\'. \'CG\', \'CGL\', \'SYMMLQ\' or \'MINRES\' expected.");
+        conf.sS("GPHIKRegression", variable, value);
+    }
+
+
+    if(variable == "optimization_method")
+    {
+      string value = converterMtoNICE.convertMatlabToString(prhs[i+1]);
+      if(value != "greedy" && value != "downhillsimplex" && value != "none")
+        mexErrMsgIdAndTxt("mexnice:error","Unexpected parameter value for \'optimization_method\'. \'greedy\', \'downhillsimplex\' or \'none\' expected.");
+        conf.sS("GPHIKRegression", variable, value);
+    }
+
+    if(variable == "transform")
+    {
+      string value = converterMtoNICE.convertMatlabToString( prhs[i+1] );
+      if(value != "absexp" && value != "exp" && value != "MKL" && value != "WeightedDim")
+        mexErrMsgIdAndTxt("mexnice:error","Unexpected parameter value for \'transform\'. \'absexp\', \'exp\' , \'MKL\' or \'WeightedDim\' expected.");
+        conf.sS("GPHIKRegression", variable, value);
+    }
+
+  
+    if(variable == "varianceApproximation")
+    {
+      string value = converterMtoNICE.convertMatlabToString(prhs[i+1]);
+      if(value != "approximate_fine" && value != "approximate_rough" && value != "exact" && value != "none")
+        mexErrMsgIdAndTxt("mexnice:error","Unexpected parameter value for \'varianceApproximation\'. \'approximate_fine\', \'approximate_rough\', \'none\' or \'exact\' expected.");
+        conf.sS("GPHIKRegression", variable, value);
+    }
+    
+
+    
+  }
+
+
+  return conf;
+}
+
+// MAIN MATLAB FUNCTION
+void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
+{    
+    // get the command string specifying what to do
+    if (nrhs < 1)
+        mexErrMsgTxt("No commands and options passed... Aborting!");        
+    
+    if( !mxIsChar( prhs[0] ) )
+        mexErrMsgTxt("First argument needs to be the command, ie.e, the class method to call... Aborting!");        
+    
+    std::string cmd = converterMtoNICE.convertMatlabToString( prhs[0] );
+      
+        
+    // create object
+    if ( !strcmp("new", cmd.c_str() ) )
+    {
+        // check output variable
+        if (nlhs != 1)
+            mexErrMsgTxt("New: One output expected.");
+        
+        // read config settings
+        NICE::Config conf = parseParametersGPHIKRegression(prhs+1,nrhs-1);
+        
+        // create class instance
+        NICE::GPHIKRegression * regressor = new NICE::GPHIKRegression ( &conf, "GPHIKRegression" /*sectionName in config*/ );
+        
+         
+        // handle to the C++ instance
+        plhs[0] = convertPtr2Mat<NICE::GPHIKRegression>( regressor );
+        return;
+    }
+    
+    // in all other cases, there should be a second input,
+    // which the be the class instance handle
+    if (nrhs < 2)
+      mexErrMsgTxt("Second input should be a class instance handle.");
+    
+    // delete object
+    if ( !strcmp("delete", cmd.c_str() ) )
+    {
+        // Destroy the C++ object
+        destroyObject<NICE::GPHIKRegression>(prhs[1]);
+        return;
+    }
+    
+    // get the class instance pointer from the second input
+    // every following function needs the regressor object
+    NICE::GPHIKRegression * regressor = convertMat2Ptr<NICE::GPHIKRegression>(prhs[1]);
+    
+    
+    ////////////////////////////////////////
+    //  Check which class method to call  //
+    ////////////////////////////////////////
+    
+    
+    // standard train - assumes initialized object
+    if (!strcmp("train", cmd.c_str() ))
+    {
+        // Check parameters
+        if (nlhs < 0 || nrhs < 4)
+        {
+            mexErrMsgTxt("Train: Unexpected arguments.");
+        }
+        
+        //------------- read the data --------------
+          
+        std::vector< const NICE::SparseVector *> examplesTrain;
+        NICE::Vector yValuesTrain;  
+
+        if ( mxIsSparse( prhs[2] ) )
+        {
+            examplesTrain = converterMtoNICE.convertSparseMatrixToNice( prhs[2] );
+        }
+        else
+        {
+            NICE::Matrix dataTrain;
+            dataTrain = converterMtoNICE.convertDoubleMatrixToNice(prhs[2]);
+            
+            //----------------- convert data to sparse data structures ---------
+            examplesTrain.resize( dataTrain.rows() );
+
+                    
+            std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin();
+            for (int i = 0; i < (int)dataTrain.rows(); i++, exTrainIt++)
+            {
+                *exTrainIt =  new NICE::SparseVector( dataTrain.getRow(i) );
+            }            
+        }
+          
+        yValuesTrain = converterMtoNICE.convertDoubleVectorToNice(prhs[3]);
+
+        //----------------- train our regressor -------------
+        regressor->train ( examplesTrain , yValuesTrain );
+
+        //----------------- clean up -------------
+        for(int i=0;i<examplesTrain.size();i++)
+            delete examplesTrain[i];
+        
+        return;
+    }
+    
+    
+    // perform regression    
+    if ( !strcmp("estimate", cmd.c_str() ) )
+    {
+        // Check parameters
+        if ( (nlhs < 0) || (nrhs < 2) )
+        {
+            mexErrMsgTxt("Test: Unexpected arguments.");
+        }
+        
+        //------------- read the data --------------
+
+        double result;
+        double uncertainty;        
+
+        if ( mxIsSparse( prhs[2] ) )
+        {
+            NICE::SparseVector * example;
+            example = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
+            regressor->estimate ( example,  result, uncertainty );
+            
+            //----------------- clean up -------------
+            delete example;
+        }
+        else
+        {
+            NICE::Vector * example;
+            example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) ); 
+            regressor->estimate ( example,  result, uncertainty );
+            
+            //----------------- clean up -------------
+            delete example;            
+        }
+          
+          
+
+          // output
+          plhs[0] = mxCreateDoubleScalar( result ); 
+          
+          
+          if(nlhs >= 2)
+          {
+            plhs[1] = mxCreateDoubleScalar( uncertainty );          
+          }
+          return;
+    }
+    
+    // Uncertainty prediction    
+    if ( !strcmp("uncertainty", cmd.c_str() ) )
+    {
+        // Check parameters
+        if ( (nlhs < 0) || (nrhs < 2) )
+        {
+            mexErrMsgTxt("Test: Unexpected arguments.");
+        }
+        
+        double uncertainty;        
+        
+        //------------- read the data --------------
+
+        if ( mxIsSparse( prhs[2] ) )
+        {
+            NICE::SparseVector * example;
+            example = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
+            regressor->predictUncertainty( example, uncertainty );
+            
+            //----------------- clean up -------------
+            delete example;            
+        }
+        else
+        {
+            NICE::Vector * example;
+            example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) ); 
+            regressor->predictUncertainty( example, uncertainty );
+            
+            //----------------- clean up -------------
+            delete example;            
+        }
+        
+       
+
+          // output
+          plhs[0] = mxCreateDoubleScalar( uncertainty );                    
+          return;
+    }    
+    
+    
+    // Test - evaluate regressor on whole test set  
+    if ( !strcmp("testL2loss", cmd.c_str() ) )
+    {        
+        // Check parameters
+        if (nlhs < 0 || nrhs < 3)
+            mexErrMsgTxt("Test: Unexpected arguments.");
+        //------------- read the data --------------
+        
+        
+        bool dataIsSparse ( mxIsSparse( prhs[2] ) );
+        
+        std::vector< const NICE::SparseVector *> dataTest_sparse;
+        NICE::Matrix dataTest_dense;
+
+        if ( dataIsSparse )
+        {
+            dataTest_sparse = converterMtoNICE.convertSparseMatrixToNice( prhs[2] );
+        }
+        else
+        {    
+            dataTest_dense = converterMtoNICE.convertDoubleMatrixToNice(prhs[2]);          
+        }        
+
+        NICE::Vector yValuesTest;
+        yValuesTest = converterMtoNICE.convertDoubleVectorToNice(prhs[3]);
+	
+        int i_numTestSamples ( yValuesTest.size() );
+        
+	double l2loss ( 0.0 );
+	
+	NICE::Vector scores;
+	NICE::Vector::iterator itScores;
+	if ( nlhs >= 2 )
+	{
+	  scores.resize( i_numTestSamples );
+	  itScores = scores.begin();
+	}
+          
+          
+
+        // ------------------------------------------
+        // ------------- REGRESSION --------------
+        // ------------------------------------------          
+        
+        NICE::Timer t;
+        double testTime (0.0);
+        
+
+
+        for (int i = 0; i < i_numTestSamples; i++)
+        {
+            //----------------- convert data to sparse data structures ---------
+          
+
+            double result;
+
+            if ( dataIsSparse )
+            {                
+              // and perform regression
+              t.start();
+              regressor->estimate( dataTest_sparse[ i ], result);
+              t.stop();
+              testTime += t.getLast();
+            }
+            else
+            {
+                NICE::Vector example ( dataTest_dense.getRow(i) );
+              // and perform regression
+              t.start();
+              regressor->estimate( &example, result );
+              t.stop();
+              testTime += t.getLast();                
+            }
+
+            l2loss += pow ( yValuesTest[i] - result, 2); 
+	    
+	    if ( nlhs >= 2 )
+	    {
+	      *itScores = result;
+	      itScores++;
+	    }	    
+        }
+        
+        std::cerr << "Time for testing: " << testTime << std::endl;          
+        
+        // clean up
+        if ( dataIsSparse )
+        {
+            for ( std::vector<const NICE::SparseVector *>::iterator it = dataTest_sparse.begin(); it != dataTest_sparse.end(); it++) 
+                delete *it;
+        }
+        
+
+
+        plhs[0] = mxCreateDoubleScalar( l2loss );
+
+        if(nlhs >= 2)
+          plhs[1] = converterNICEtoM.convertVectorFromNice(scores);          
+          
+          
+        return;
+    }
+    
+    ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////
+    // interface specific methods for incremental extensions
+    ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////      
+    
+    // addExample    
+    if ( !strcmp("addExample", cmd.c_str() ) )
+    {
+        // Check parameters
+        if ( (nlhs < 0) || (nrhs < 4) )
+        {
+            mexErrMsgTxt("Test: Unexpected arguments.");
+        }
+        
+        //------------- read the data --------------
+
+        NICE::SparseVector * newExample;
+        double newLabel;        
+
+        if ( mxIsSparse( prhs[2] ) )
+        {
+            newExample = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
+        }
+        else
+        {
+            NICE::Vector * example;
+            example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) ); 
+            newExample = new NICE::SparseVector ( *example );
+            //----------------- clean up -------------
+            delete example;            
+        }
+        
+        newLabel = converterMtoNICE.convertMatlabToDouble( prhs[3] );
+        
+        // setting performOptimizationAfterIncrement is optional
+        if ( nrhs > 4 )
+        {
+          bool performOptimizationAfterIncrement;          
+          performOptimizationAfterIncrement = converterMtoNICE.convertMatlabToBool( prhs[4] );
+          
+          regressor->addExample ( newExample,  newLabel, performOptimizationAfterIncrement );
+        }
+        else
+        {
+          regressor->addExample ( newExample,  newLabel );
+        }
+          
+        
+        //----------------- clean up -------------
+        delete newExample;        
+
+        return;
+    }
+    
+    // addMultipleExamples    
+    if ( !strcmp("addMultipleExamples", cmd.c_str() ) )
+    {
+        // Check parameters
+        if ( (nlhs < 0) || (nrhs < 4) )
+        {
+            mexErrMsgTxt("Test: Unexpected arguments.");
+        }
+        
+        //------------- read the data --------------
+
+        std::vector< const NICE::SparseVector *> newExamples;
+        NICE::Vector newLabels;
+
+        if ( mxIsSparse( prhs[2] ) )
+        {
+            newExamples = converterMtoNICE.convertSparseMatrixToNice( prhs[2] );
+        }
+        else
+        {
+            NICE::Matrix newData;
+            newData = converterMtoNICE.convertDoubleMatrixToNice(prhs[2]);
+            
+            //----------------- convert data to sparse data structures ---------
+            newExamples.resize( newData.rows() );
+
+                    
+            std::vector< const NICE::SparseVector *>::iterator exTrainIt = newExamples.begin();
+            for (int i = 0; i < (int)newData.rows(); i++, exTrainIt++)
+            {
+                *exTrainIt =  new NICE::SparseVector( newData.getRow(i) );
+            }            
+        }
+          
+        newLabels = converterMtoNICE.convertDoubleVectorToNice(prhs[3]);
+        
+        // setting performOptimizationAfterIncrement is optional
+        if ( nrhs > 4 )
+        {
+          bool performOptimizationAfterIncrement;          
+          performOptimizationAfterIncrement = converterMtoNICE.convertMatlabToBool( prhs[4] );
+          
+          regressor->addMultipleExamples ( newExamples,  newLabels, performOptimizationAfterIncrement );
+        }
+        else
+        {
+          regressor->addMultipleExamples ( newExamples,  newLabels );
+        }
+          
+        
+        //----------------- clean up -------------
+        for ( std::vector< const NICE::SparseVector *>::iterator exIt = newExamples.begin();
+              exIt != newExamples.end(); exIt++
+            ) 
+        {
+          delete *exIt;
+        }
+
+        return;
+    }    
+    
+
+    
+    ///////////////////// INTERFACE PERSISTENT /////////////////////
+    // interface specific methods for store and restore
+    ///////////////////// INTERFACE PERSISTENT /////////////////////    
+    
+  
+    
+    // store the regressor  to an external file
+    if ( !strcmp("store", cmd.c_str() ) || !strcmp("save", cmd.c_str() ) )
+    {
+        // Check parameters
+        if ( nrhs < 3 )
+            mexErrMsgTxt("store: no destination given.");        
+               
+        std::string s_destination = converterMtoNICE.convertMatlabToString( prhs[2] );
+          
+        std::filebuf fb;
+        fb.open ( s_destination.c_str(), ios::out );
+        std::ostream os(&fb);
+        //
+        regressor->store( os );
+        //   
+        fb.close();        
+            
+        return;
+    }
+    
+    // load regressor from external file    
+    if ( !strcmp("restore", cmd.c_str() ) || !strcmp("load", cmd.c_str() ) )
+    {
+        // Check parameters
+        if ( nrhs < 3 )
+            mexErrMsgTxt("restore: no destination given.");        
+               
+        std::string s_destination = converterMtoNICE.convertMatlabToString( prhs[2] );
+        
+        std::cerr << " aim at restoring the regressor from " << s_destination << std::endl;
+          
+        std::filebuf fbIn;
+        fbIn.open ( s_destination.c_str(), ios::in );
+        std::istream is (&fbIn);
+        //
+        regressor->restore( is );
+        //   
+        fbIn.close();        
+            
+        return;
+    }    
+    
+    
+    // Got here, so command not recognized
+    
+    std::string errorMsg (cmd.c_str() );
+    errorMsg += " -- command not recognized.";
+    mexErrMsgTxt( errorMsg.c_str() );
+
+}

+ 8 - 1
matlab/Makefile

@@ -2,4 +2,11 @@ NICEFLAGS1=$(shell pkg-config libgp-hik-core --cflags --libs)
 NICEFLAGS=$(subst -fopenmp,,$(NICEFLAGS1))
 NICEFLAGS=$(subst -fopenmp,,$(NICEFLAGS1))
 
 
 default:
 default:
-	/home/alex/bin/matlab/7.14/bin/mex ${NICEFLAGS} -largeArrayDims GPHIK.cpp ConverterMatlabToNICE.cpp ConverterNICEToMatlab.cpp
+	/home/alex/bin/matlab/7.14/bin/mex ${NICEFLAGS} -largeArrayDims GPHIKClassifierMex.cpp ConverterMatlabToNICE.cpp ConverterNICEToMatlab.cpp
+	/home/alex/bin/matlab/7.14/bin/mex ${NICEFLAGS} -largeArrayDims GPHIKRegressionMex.cpp ConverterMatlabToNICE.cpp ConverterNICEToMatlab.cpp
+
+classification:
+	/home/alex/bin/matlab/7.14/bin/mex ${NICEFLAGS} -largeArrayDims GPHIKClassifierMex.cpp ConverterMatlabToNICE.cpp ConverterNICEToMatlab.cpp
+
+regression:        
+	/home/alex/bin/matlab/7.14/bin/mex ${NICEFLAGS} -largeArrayDims GPHIKRegressionMex.cpp ConverterMatlabToNICE.cpp ConverterNICEToMatlab.cpp

+ 0 - 0
matlab/plot1dExample.m → matlab/plot1dExampleClassification.m


+ 56 - 0
matlab/plot1dExampleRegression.m

@@ -0,0 +1,56 @@
+myData = [ 0.2; 0.8];
+% create l1-normalized 'histograms'
+myData = cat(2,myData , 1-myData)';
+myValues = [1,2];
+
+
+% init new GPHIKRegression object
+myGPHIKRegression = GPHIKRegression ( 'verbose', 'false', ...
+    'optimization_method', 'none', 'varianceApproximation', 'approximate_fine',...
+    'nrOfEigenvaluesToConsiderForVarApprox',2,...
+    'uncertaintyPredictionForClassification', true ...
+    );
+
+% run train method
+myGPHIKRegression.train( myData, myValues );
+
+myDataTest = 0:0.01:1;
+% create l1-normalized 'histograms'
+myDataTest = cat(1, myDataTest, 1-myDataTest)';
+
+
+scores = zeros(size(myDataTest,1),1);
+uncertainties = zeros(size(myDataTest,1),1);
+for i=1:size(myDataTest,1)
+    example = myDataTest(i,:);
+    [ scores(i), uncertainties(i)] = myGPHIKRegression.estimate( example );
+end
+
+
+
+figure;
+hold on;
+
+%#initialize x array
+x=0:0.01:1;
+
+%#create first curve
+uncLower=scores-uncertainties;
+%#create second curve
+uncUpper=scores+uncertainties;
+
+
+%#create polygon-like x values for plotting
+X=[x,fliplr(x)];
+%# concatenate y-values accordingly
+Y=[uncLower',fliplr(uncUpper')]; 
+%#plot filled area
+fill(X,Y,'y');                  
+
+plot ( x,scores,'rx');
+
+
+% clean up and delete object
+myGPHIKRegression.delete();
+
+clear ( 'myGPHIKRegression' );

+ 11 - 11
matlab/testGPHIK.m → matlab/testGPHIKClassifierMex.m

@@ -13,47 +13,47 @@ myLabels = [1,1,2,2,3,3];
 
 
 
 
 % init new GPHIKClassifier object
 % init new GPHIKClassifier object
-myGPHIKClassifier = GPHIK ( 'new', 'verbose', 'false', ...
+myGPHIKClassifier = GPHIKClassifierMex ( 'new', 'verbose', 'false', ...
     'optimization_method', 'none', 'varianceApproximation', 'approximate_rough',...
     'optimization_method', 'none', 'varianceApproximation', 'approximate_rough',...
     'nrOfEigenvaluesToConsiderForVarApprox',4,...
     'nrOfEigenvaluesToConsiderForVarApprox',4,...
     'uncertaintyPredictionForClassification', false ...
     'uncertaintyPredictionForClassification', false ...
     );
     );
 
 
 % run train method
 % run train method
-GPHIK ( 'train', myGPHIKClassifier, myData, myLabels);
+GPHIKClassifierMex ( 'train', myGPHIKClassifier, myData, myLabels);
 
 
 myDataTest = [ 0.3 0.4 0.3
 myDataTest = [ 0.3 0.4 0.3
              ];
              ];
 myLabelsTest = [1];
 myLabelsTest = [1];
 
 
 % run single classification call
 % run single classification call
-[ classNoEst, score, uncertainty] = GPHIK ( 'classify', myGPHIKClassifier, myDataTest )
+[ classNoEst, score, uncertainty] = GPHIKClassifierMex ( 'classify', myGPHIKClassifier, myDataTest )
 % compute predictive variance
 % compute predictive variance
-uncertainty = GPHIK ( 'uncertainty', myGPHIKClassifier, myDataTest )
+uncertainty = GPHIKClassifierMex ( 'uncertainty', myGPHIKClassifier, myDataTest )
 % run test method evaluating arr potentially using multiple examples
 % run test method evaluating arr potentially using multiple examples
-[ arr, confMat, scores] = GPHIK ( 'test', myGPHIKClassifier, myDataTest, myLabelsTest )
+[ arr, confMat, scores] = GPHIKClassifierMex ( 'test', myGPHIKClassifier, myDataTest, myLabelsTest )
 
 
 % add a single new example
 % add a single new example
 newExample = [ 0.5 0.5 0.0
 newExample = [ 0.5 0.5 0.0
              ];
              ];
 newLabel = [4];
 newLabel = [4];
-GPHIK ( 'addExample', myGPHIKClassifier, newExample, newLabel);
+GPHIKClassifierMex ( 'addExample', myGPHIKClassifier, newExample, newLabel);
 
 
 % add mutiple new examples
 % add mutiple new examples
 newExamples = [ 0.3 0.3 0.4;
 newExamples = [ 0.3 0.3 0.4;
                 0.1, 0.2, 0.7
                 0.1, 0.2, 0.7
              ];
              ];
 newLabels = [1,3];
 newLabels = [1,3];
-GPHIK ( 'addMultipleExamples', myGPHIKClassifier, newExamples, newLabels );
+GPHIKClassifierMex ( 'addMultipleExamples', myGPHIKClassifier, newExamples, newLabels );
 
 
 % perform evaluation again
 % perform evaluation again
 
 
 % run single classification call
 % run single classification call
-[ classNoEst, score, uncertainty] = GPHIK ( 'classify', myGPHIKClassifier, myDataTest )
+[ classNoEst, score, uncertainty] = GPHIKClassifierMex ( 'classify', myGPHIKClassifier, myDataTest )
 % compute predictive variance
 % compute predictive variance
-uncertainty = GPHIK ( 'uncertainty', myGPHIKClassifier, myDataTest )
+uncertainty = GPHIKClassifierMex ( 'uncertainty', myGPHIKClassifier, myDataTest )
 % run test method evaluating arr potentially using multiple examples
 % run test method evaluating arr potentially using multiple examples
-[ arr, confMat, scores] = GPHIK ( 'test', myGPHIKClassifier, myDataTest, myLabelsTest )
+[ arr, confMat, scores] = GPHIKClassifierMex ( 'test', myGPHIKClassifier, myDataTest, myLabelsTest )
 
 
 % clean up and delete object
 % clean up and delete object
-GPHIK ( 'delete',myGPHIKClassifier);
+GPHIKClassifierMex ( 'delete',myGPHIKClassifier);

+ 132 - 3
tests/TestGPHIKRegression.cpp

@@ -24,7 +24,7 @@ using namespace std; //C basics
 using namespace NICE;  // nice-core
 using namespace NICE;  // nice-core
 
 
 const bool verboseStartEnd = true;
 const bool verboseStartEnd = true;
-const bool verbose = false;
+const bool verbose = true;
 
 
 
 
 CPPUNIT_TEST_SUITE_REGISTRATION( TestGPHIKRegression );
 CPPUNIT_TEST_SUITE_REGISTRATION( TestGPHIKRegression );
@@ -113,7 +113,7 @@ void TestGPHIKRegression::testRegressionHoldInData()
     *exTrainIt =  new NICE::SparseVector( dataTrain.getRow(i) );
     *exTrainIt =  new NICE::SparseVector( dataTrain.getRow(i) );
   }
   }
     
     
-  //create classifier object
+  //create regressionMethod object
   NICE::GPHIKRegression * regressionMethod;
   NICE::GPHIKRegression * regressionMethod;
   regressionMethod = new NICE::GPHIKRegression ( &conf );
   regressionMethod = new NICE::GPHIKRegression ( &conf );
   regressionMethod->train ( examplesTrain , yValues );
   regressionMethod->train ( examplesTrain , yValues );
@@ -180,7 +180,7 @@ void TestGPHIKRegression::testRegressionHoldOutData()
     *exTrainIt =  new NICE::SparseVector( dataTrain.getRow(i) );
     *exTrainIt =  new NICE::SparseVector( dataTrain.getRow(i) );
   }
   }
     
     
-  //create classifier object
+  //create regressionMethod object
   NICE::GPHIKRegression * regressionMethod;
   NICE::GPHIKRegression * regressionMethod;
   regressionMethod = new NICE::GPHIKRegression ( &conf );
   regressionMethod = new NICE::GPHIKRegression ( &conf );
   regressionMethod->train ( examplesTrain , yValues );
   regressionMethod->train ( examplesTrain , yValues );
@@ -230,6 +230,135 @@ void TestGPHIKRegression::testRegressionOnlineLearning()
 {
 {
   if (verboseStartEnd)
   if (verboseStartEnd)
     std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearning ===================== " << std::endl;  
     std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearning ===================== " << std::endl;  
+
+  NICE::Config conf;
+  
+  conf.sB ( "GPHIKRegressionMethod", "eig_verbose", false);
+  conf.sS ( "GPHIKRegressionMethod", "optimization_method", "downhillsimplex");//downhillsimplex greedy
+  
+  std::string s_trainData = conf.gS( "main", "trainData", "toyExampleSmallScaleTrain.data" );
+  
+  //------------- read the training data --------------
+  
+  NICE::Matrix dataTrain;
+  NICE::Vector yValuesTrain; 
+  
+  readData ( s_trainData, dataTrain, yValuesTrain );
+
+  //----------------- convert data to sparse data structures ---------
+  std::vector< const NICE::SparseVector *> examplesTrain;
+  examplesTrain.resize( dataTrain.rows()-1 );
+  
+  std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin();
+  for (int i = 0; i < (int)dataTrain.rows()-1; i++, exTrainIt++)
+  {
+    *exTrainIt =  new NICE::SparseVector( dataTrain.getRow(i) );
+  }  
+  
+  // TRAIN INITIAL CLASSIFIER FROM SCRATCH
+  NICE::GPHIKRegression * regressionMethod;
+  regressionMethod = new NICE::GPHIKRegression ( &conf );
+
+  //use all but the first example for training and add the first one lateron
+  NICE::Vector yValuesRelevantTrain  ( yValuesTrain.getRangeRef( 0, yValuesTrain.size()-2  ) );
+  
+  regressionMethod->train ( examplesTrain , yValuesRelevantTrain );
+  
+  std::cerr << " initial training done " << std::endl;
+  
+  // RUN INCREMENTAL LEARNING
+  
+  bool performOptimizationAfterIncrement ( true );
+  
+  NICE::SparseVector * exampleToAdd = new NICE::SparseVector ( dataTrain.getRow( (int)dataTrain.rows()-1 ) );
+  
+  exampleToAdd->store  ( std::cerr );
+  std::cerr << "corresponding label: " << yValuesTrain[ (int)dataTrain.rows()-2 ] << std::endl;
+  
+  // TODO seg fault happens here!
+  regressionMethod->addExample ( exampleToAdd, yValuesTrain[ (int)dataTrain.rows()-2 ], performOptimizationAfterIncrement );
+  
+  if ( verbose )
+    std::cerr << "label of example to add: " << yValuesTrain[ (int)dataTrain.rows()-1 ] << std::endl;
+  
+  // TRAIN SECOND CLASSIFIER FROM SCRATCH USING THE SAME OVERALL AMOUNT OF EXAMPLES
+  examplesTrain.push_back(  exampleToAdd );
+
+  NICE::GPHIKRegression * regressionMethodScratch = new NICE::GPHIKRegression ( &conf );
+  regressionMethodScratch->train ( examplesTrain, yValuesTrain );
+  
+  if ( verbose )
+    std::cerr << "trained both regressionMethods - now start evaluating them" << std::endl;
+  
+  
+  // TEST that both regressionMethods produce equal store-files
+   std::string s_destination_save_IL ( "myRegressionMethodIL.txt" );
+  
+  std::filebuf fbOut;
+  fbOut.open ( s_destination_save_IL.c_str(), ios::out );
+  std::ostream os (&fbOut);
+  //
+  regressionMethod->store( os );
+  //   
+  fbOut.close(); 
+  
+  std::string s_destination_save_scratch ( "myRegressionMethodScratch.txt" );
+  
+  std::filebuf fbOutScratch;
+  fbOutScratch.open ( s_destination_save_scratch.c_str(), ios::out );
+  std::ostream osScratch (&fbOutScratch);
+  //
+  regressionMethodScratch->store( osScratch );
+  //   
+  fbOutScratch.close(); 
+  
+  
+  // TEST both regressionMethods to produce equal results
+  
+  //------------- read the test data --------------
+  
+  
+  NICE::Matrix dataTest;
+  NICE::Vector yValuesTest; 
+  
+  std::string s_testData = conf.gS( "main", "testData", "toyExampleTest.data" );  
+  
+  readData ( s_testData, dataTest, yValuesTest );
+
+  
+  // ------------------------------------------
+  // ------------- REGRESSION --------------
+  // ------------------------------------------  
+
+
+  double holdOutLossIL ( 0.0 );
+  double holdOutLossScratch ( 0.0 );
+  
+  evaluateRegressionMethod ( holdOutLossIL, regressionMethod, dataTest, yValuesTest ); 
+  
+  evaluateRegressionMethod ( holdOutLossScratch, regressionMethodScratch, dataTest, yValuesTest );  
+  
+    
+  if ( verbose ) 
+  {
+    std::cerr << "holdOutLossIL: " << holdOutLossIL  << std::endl;
+  
+    std::cerr << "holdOutLossScratch: " << holdOutLossScratch << std::endl;
+  }
+  
+  
+  CPPUNIT_ASSERT_DOUBLES_EQUAL( holdOutLossIL, holdOutLossScratch, 1e-8);
+  
+  // don't waste memory
+  
+  delete regressionMethod;
+  delete regressionMethodScratch;
+  
+  for (std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin(); exTrainIt != examplesTrain.end(); exTrainIt++)
+  {
+    delete *exTrainIt;
+  } 
+
   
   
   if (verboseStartEnd)
   if (verboseStartEnd)
     std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearning done ===================== " << std::endl;   
     std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearning done ===================== " << std::endl;