Browse Source

fixed mex function for CodebookRandomForest, incl. MATLAB unit test / demo code

Johannes Ruehle 11 years ago
parent
commit
4c6463c2a8

+ 255 - 270
features/simplefeatures/matlab/CodebookRandomForestMex.cpp

@@ -26,30 +26,24 @@
 #include "gp-hik-core/matlab/ConverterMatlabToNICE.h"
 #include "gp-hik-core/matlab/ConverterNICEToMatlab.h"
 
-#include "HelperDataConversionMex.h"
-
 using namespace std; //C basics
 using namespace NICE;  // nice-core
 
+#define DEBUG_VERBOSE
 
 NICE::Config parseParametersERC(const mxArray *prhs[], int nrhs)
 {
   NICE::Config conf;
-  
-  // if first argument is the filename of an existing config file,
-  // read the config accordingly
-  
-  int i_start ( 0 );
-  std::string variable = MatlabConversion::convertMatlabToString(prhs[i_start]);
-  if(variable == "conf")
+
+  // Check parameters
+  if ( nrhs % 2 == 1 )
   {
-      conf = NICE::Config ( MatlabConversion::convertMatlabToString( prhs[i_start+1] )  );
-      i_start = i_start+2;
+      mexErrMsgTxt("parseParametersERC: uneven number of config arguments.");
   }
-  
+
   // now run over all given parameter specifications
   // and add them to the config
-  for( int i=i_start; i < nrhs; i+=2 )
+  for( int i=0; i < nrhs; i+=2 )
   {
     std::string variable = MatlabConversion::convertMatlabToString(prhs[i]);
     
@@ -65,13 +59,45 @@ NICE::Config parseParametersERC(const mxArray *prhs[], int nrhs)
 
     //std::string builder_section = conf->gS(section, "builder_section", "DTBRandom");
 
+#ifdef DEBUG_VERBOSE
+    std::cerr << "config variable: "<< variable << std::endl;
+#endif
+    if(variable == "conf")
+    {
+        // if first argument is the filename of an existing config file,
+        // read the config accordingly
 
-    if( variable == "number_of_trees")
+        conf = NICE::Config ( MatlabConversion::convertMatlabToString( prhs[i+1] )  );
+#ifdef DEBUG_VERBOSE
+        std::cerr << "conf " << MatlabConversion::convertMatlabToString( prhs[i+1] ) << std::endl;
+#endif
+    }
+    else if( variable == "number_of_trees")
+    {
+        if ( mxIsInt32( prhs[i+1] ) )
+        {
+            int value = MatlabConversion::convertMatlabToInt32(prhs[i+1]);
+            conf.sI("RandomForest", variable, value);
+#ifdef DEBUG_VERBOSE
+            std::cerr << "number_of_trees " << value << std::endl;
+#endif
+        }
+        else
+        {
+            std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Int32 expected.";
+            mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
+        }
+
+    }
+    else if( variable == "maxDepthTree")
     {
         if ( mxIsInt32( prhs[i+1] ) )
         {
             int value = MatlabConversion::convertMatlabToInt32(prhs[i+1]);
-            conf.sI("FPCRandomForests", variable, value);
+            conf.sI("CodebookRandomForest", variable, value);
+#ifdef DEBUG_VERBOSE
+            std::cerr << "maxDepthTree " << value << std::endl;
+#endif
         }
         else
         {
@@ -80,6 +106,23 @@ NICE::Config parseParametersERC(const mxArray *prhs[], int nrhs)
         }
 
     }
+    else if( variable == "verbose")
+    {
+        if ( mxIsLogical( prhs[i+1] ) )
+        {
+            bool bVerbose = MatlabConversion::convertMatlabToBool(prhs[i+1]);
+            conf.sB("CodebookRandomForest", variable, bVerbose);
+#ifdef DEBUG_VERBOSE
+            std::cerr << "verbose " << bVerbose << std::endl;
+#endif
+        }
+        else
+        {
+            std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Boolean expected.";
+            mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
+        }
+
+    }
 
   }
 
@@ -89,6 +132,10 @@ NICE::Config parseParametersERC(const mxArray *prhs[], int nrhs)
 // MAIN MATLAB FUNCTION
 void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
 {    
+#ifdef DEBUG_VERBOSE
+    std::cerr << "Verbose Debug Output on (compiled with debug definition)." << std::endl;
+#endif
+
     // get the command string specifying what to do
     if (nrhs < 1)
         mexErrMsgTxt("No commands and options passed... Aborting!");        
@@ -99,25 +146,6 @@ void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
     std::string cmd = MatlabConversion::convertMatlabToString( prhs[0] );
       
         
-    // create object
-    if ( !strcmp("new", cmd.c_str() ) )
-    {
-        // check output variable
-        if (nlhs != 1)
-            mexErrMsgTxt("New: One output expected.");
-        
-        // read config settings
-        //NICE::Config conf = parseParametersGPHIKRegression(prhs+1,nrhs-1);
-
-        int nMaxDepth = 10;
-        // create class instance
-        OBJREC::CodebookRandomForest *pRandomForest = new OBJREC::CodebookRandomForest(nMaxDepth);
-         
-        // handle to the C++ instance
-        plhs[0] = MatlabConversion::convertPtr2Mat<OBJREC::CodebookRandomForest>( pRandomForest );
-        return;
-    }
-    
     // in all other cases, there should be a second input,
     // which the be the class instance handle
     if (nrhs < 2)
@@ -131,61 +159,77 @@ void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
         return;
     }
     
-    // get the class instance pointer from the second input
-    // every following function needs the object
-    OBJREC::CodebookRandomForest *pCodebookClusterer = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
-    
-    
     ////////////////////////////////////////
     //  Check which class method to call  //
     ////////////////////////////////////////
     
     
     // standard train - assumes initialized object
-    if (!strcmp("train", cmd.c_str() ))
+    if (!strcmp("createAndTrain", cmd.c_str() ))
     {
         // Check parameters
-        if (nlhs < 0 || nrhs < 4)
+        if (nlhs < 0 || nrhs < 4 )
         {
             mexErrMsgTxt("Train: Unexpected arguments.");
         }
         
         //------------- read the data --------------
-        if (nrhs != 4)
+        if (nrhs < 4)
         {
-            mexErrMsgTxt("needs 2 matrix inputs, first the training features, second the sample labels");
+            mexErrMsgTxt("needs at least 2 matrix inputs, first the training features, second the sample labels");
             return;
         }
 
-        const mxArray *t_pArrTrainData   = prhs[2];
-        const mxArray *t_pArrTrainLabels = prhs[3];
+        const mxArray *t_pArrTrainData   = prhs[1];
+        const mxArray *t_pArrTrainLabels = prhs[2];
 
-        //std::vector< const NICE::SparseVector *> examplesTrain;
+
+        //----------------- parse config options  -------------
+        NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
 
         int iNumFeatureDimension = mxGetM( t_pArrTrainData ); // feature dimensions
+#ifdef DEBUG_VERBOSE
+        std::cerr << "iNumFeatureDimension " << iNumFeatureDimension << std::endl;
+#endif
+        //----------------- create examples object -------------
+        NICE::Vector t_vecLabelsTrain = MatlabConversion::convertDoubleVectorToNice( t_pArrTrainLabels );
+        NICE::Matrix t_matDataTrain   = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
 
         OBJREC::Examples examplesTrain;
 
-        bool bRet = MatlabConversion::convertDoubleRawPointersToExamples( t_pArrTrainData, t_pArrTrainLabels, examplesTrain);
-        if( ~bRet )
+        bool bRet = OBJREC::Examples::wrapExamplesAroundFeatureMatrix( t_matDataTrain, t_vecLabelsTrain, examplesTrain );
+        if( !bRet )
         {
-            mexErrMsgTxt("Train: Error creating Examples from raw feature matrix and labels.");
+            mexErrMsgTxt("createAndTrain: Error creating Examples from raw feature matrix and labels.");
         }
 
-        //----------------- train our random Forest -------------
-        // read config settings
+        //----------------- create raw feature mapping -------------
         OBJREC::FeaturePool fp;
         OBJREC::VectorFeature *pVecFeature = new OBJREC::VectorFeature(iNumFeatureDimension);
         pVecFeature->explode(fp);
 
-        NICE::Config conf = parseParametersERC(prhs+1,nrhs-1);
-
-        OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&conf,"FPCRandomForests");
-
+#ifdef DEBUG_VERBOSE
+        //----------------- debug features -------------
+        OBJREC::Example t_Exp = examplesTrain[0].second;
+        NICE::Vector t_FeatVector;
+        fp.calcFeatureVector(t_Exp, t_FeatVector);
+        std::cerr << "first full Feature Vec: " <<t_FeatVector << std::endl;
+#endif
+        //----------------- train our random Forest -------------
+        OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&conf,"RandomForest");
         pRandForest->train(fp, examplesTrain);
 
-        pCodebookClusterer->setClusterForest( pRandForest );
+        //----------------- create codebook ERC clusterer -------------
+        int nMaxDepth        = conf.gI("CodebookRandomForest", "maxDepthTree",10);
+        int nMaxCodebookSize = conf.gI("CodebookRandomForest", "maxCodebookSize",100);
+#ifdef DEBUG_VERBOSE
+        std::cerr << "maxDepthTree " << nMaxDepth << std::endl;
+        std::cerr << "nMaxCodebookSize " << nMaxCodebookSize << std::endl;
+#endif
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(pRandForest, nMaxDepth,nMaxCodebookSize);
 
+        // handle to the C++ instance
+        plhs[0] = MatlabConversion::convertPtr2Mat<OBJREC::CodebookRandomForest>( pCodebookRandomForest );
 
         //----------------- clean up -------------
 
@@ -194,265 +238,206 @@ void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
         // delete all "exploded" features, they are internally cloned in the random trees anyway
         fp.destroy();
         //
+        examplesTrain.clean();
 
-        for(int i=0;i<examplesTrain.size(); i++)
-        {
-            if ( examplesTrain[i].second.vec != NULL )
-            {
-                delete examplesTrain[i].second.vec;
-                examplesTrain[i].second.vec = NULL;
-            }
-        }
-        
         return;
     }
-/*
-    
-    // perform regression    
-    if ( !strcmp("estimate", cmd.c_str() ) )
+    ///// generate Histogram over trees
+    else if (!strcmp("generateHistogram", cmd.c_str() ))
     {
-        // Check parameters
-        if ( (nlhs < 0) || (nrhs < 2) )
+        //------------- read the data --------------
+        if (nrhs < 3)
         {
-            mexErrMsgTxt("Test: Unexpected arguments.");
+            mexErrMsgTxt("needs at least 1 matrix inputs, first the training features");
+            return;
+        }
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
+        if( pCodebookRandomForest == NULL )
+        {
+            mexErrMsgTxt("classify: No valid trained classifier given");
         }
-        
-        //------------- read the data --------------
 
-        double result;
-        double uncertainty;        
+        //----------------- convert matlab data into NICE data -------------
+        const mxArray *t_pArrTrainData   = prhs[2];
 
-        if ( mxIsSparse( prhs[2] ) )
+        NICE::Matrix matDataTrain = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
+        size_t numTrainSamples      = matDataTrain.cols();
+        size_t iNumFeatureDimension = matDataTrain.rows();
+        size_t iNumCodewords        = pCodebookRandomForest->getCodebookSize();
+#ifdef DEBUG_VERBOSE
+        std::cerr << "numTrainSamples "      << numTrainSamples         << std::endl;
+        std::cerr << "iNumFeatureDimension " << iNumFeatureDimension    << std::endl;
+        std::cerr << "iNumCodewords "        << iNumCodewords           << std::endl;
+#endif
+
+        //----------------- parse config options  -------------
+        bool bVerboseOutput = false;
+        if( nrhs > 3)
         {
-            NICE::SparseVector * example;
-            example = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
-            regressor->estimate ( example,  result, uncertainty );
-            
-            //----------------- clean up -------------
-            delete example;
+            NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
+            bVerboseOutput = conf.gB("CodebookRandomForest", "verbose", false);
         }
-        else
+
+        //----------------- quantize samples into histogram -------------
+        NICE::Vector histogram(iNumCodewords, 0.0f);
+
+        const double *pDataPtr = matDataTrain.getDataPointer();
+        int t_iCodebookEntry; double t_fWeight; double t_fDistance;
+        for (size_t i = 0; i < numTrainSamples; i++, pDataPtr+= iNumFeatureDimension )
         {
-            NICE::Vector * example;
-            example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) ); 
-            regressor->estimate ( example,  result, uncertainty );
-            
-            //----------------- clean up -------------
-            delete example;            
+            const NICE::Vector t_VecTrainData( pDataPtr , iNumFeatureDimension);
+            pCodebookRandomForest->voteVQ(t_VecTrainData, histogram, t_iCodebookEntry, t_fWeight, t_fDistance );
+            if(bVerboseOutput)
+                std::cerr << i << ": " << "CBEntry " << t_iCodebookEntry << " Weight: " << t_fWeight << " Distance: " << t_fDistance << std::endl;
         }
-          
-          
-
-          // output
-          plhs[0] = mxCreateDoubleScalar( result ); 
-          
-          
-          if(nlhs >= 2)
-          {
-            plhs[1] = mxCreateDoubleScalar( uncertainty );          
-          }
-          return;
+
+        //----------------- convert NICE histogram into MATLAB data -------------
+        plhs[0] = MatlabConversion::convertVectorFromNice(histogram);
+
+        return;
     }
-    
-    // Uncertainty prediction    
-    if ( !strcmp("uncertainty", cmd.c_str() ) )
+    ///// get distribution of classes per sample
+    else if (!strcmp("calcClassDistributionPerSample", cmd.c_str() ))
     {
-        // Check parameters
-        if ( (nlhs < 0) || (nrhs < 2) )
-        {
-            mexErrMsgTxt("Test: Unexpected arguments.");
-        }
-        
-        double uncertainty;        
-        
         //------------- read the data --------------
-
-        if ( mxIsSparse( prhs[2] ) )
+        if (nrhs < 3)
         {
-            NICE::SparseVector * example;
-            example = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
-            regressor->predictUncertainty( example, uncertainty );
-            
-            //----------------- clean up -------------
-            delete example;            
+            mexErrMsgTxt("needs at least 1 matrix inputs, first the training features");
+            return;
         }
-        else
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
+        if( pCodebookRandomForest == NULL )
         {
-            NICE::Vector * example;
-            example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) ); 
-            regressor->predictUncertainty( example, uncertainty );
-            
-            //----------------- clean up -------------
-            delete example;            
+            mexErrMsgTxt("classify: No valid trained classifier given");
         }
-        
-       
 
-          // output
-          plhs[0] = mxCreateDoubleScalar( uncertainty );                    
-          return;
-    }    
-    
-    
-    // Test - evaluate regressor on whole test set  
-    if ( !strcmp("testL2loss", cmd.c_str() ) )
-    {        
-        // Check parameters
-        if (nlhs < 0 || nrhs < 3)
-            mexErrMsgTxt("Test: Unexpected arguments.");
-        //------------- read the data --------------
-        
-        
-        bool dataIsSparse ( mxIsSparse( prhs[2] ) );
-        
-        std::vector< const NICE::SparseVector *> dataTest_sparse;
-        NICE::Matrix dataTest_dense;
+        //----------------- convert matlab data into NICE data -------------
+        const mxArray *t_pArrTrainData   = prhs[2];
 
-        if ( dataIsSparse )
+        NICE::Matrix matData = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
+        size_t numTrainSamples      = matData.cols();
+        size_t iNumFeatureDimension = matData.rows();
+#ifdef DEBUG_VERBOSE
+        std::cerr << "numTrainSamples "      << numTrainSamples         << std::endl;
+        std::cerr << "iNumFeatureDimension " << iNumFeatureDimension    << std::endl;
+#endif
+
+        //----------------- parse config options  -------------
+        bool bVerboseOutput = false;
+        if( nrhs > 3)
         {
-            dataTest_sparse = converterMtoNICE.convertSparseMatrixToNice( prhs[2] );
+            NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
+            bVerboseOutput = conf.gB("CodebookRandomForest", "verbose", false);
         }
-        else
-        {    
-            dataTest_dense = converterMtoNICE.convertDoubleMatrixToNice(prhs[2]);          
-        }        
-
-        NICE::Vector yValuesTest;
-        yValuesTest = converterMtoNICE.convertDoubleVectorToNice(prhs[3]);
-	
-        int i_numTestSamples ( yValuesTest.size() );
-        
-	double l2loss ( 0.0 );
-	
-	NICE::Vector scores;
-	NICE::Vector::iterator itScores;
-	if ( nlhs >= 2 )
-	{
-	  scores.resize( i_numTestSamples );
-	  itScores = scores.begin();
-	}
-          
-          
-
-        // ------------------------------------------
-        // ------------- REGRESSION --------------
-        // ------------------------------------------          
-        
-        NICE::Timer t;
-        double testTime (0.0);
-        
-
 
-        for (int i = 0; i < i_numTestSamples; i++)
+        //----------------- quantize samples into histogram -------------
+        const double *pDataPtr = matData.getDataPointer();
+        for (size_t i = 0; i < numTrainSamples; i++, pDataPtr+= iNumFeatureDimension )
         {
-            //----------------- convert data to sparse data structures ---------
-          
-
-            double result;
-
-            if ( dataIsSparse )
-            {                
-              // and perform regression
-              t.start();
-              regressor->estimate( dataTest_sparse[ i ], result);
-              t.stop();
-              testTime += t.getLast();
-            }
-            else
+            NICE::SparseVector votes;
+            NICE::Vector distribution;
+            const NICE::Vector t_VecTrainData( pDataPtr , iNumFeatureDimension);
+            pCodebookRandomForest->voteAndClassify(t_VecTrainData, votes, distribution);
+            if(bVerboseOutput)
             {
-                NICE::Vector example ( dataTest_dense.getRow(i) );
-              // and perform regression
-              t.start();
-              regressor->estimate( &example, result );
-              t.stop();
-              testTime += t.getLast();                
+                NICE::Vector t_fullVector;
+                votes.convertToVectorT( t_fullVector );
+                std::cerr << i << ": " << "votes " << t_fullVector << " distribution: " << distribution << std::endl;
             }
+        }
 
-            l2loss += pow ( yValuesTest[i] - result, 2); 
-	    
-	    if ( nlhs >= 2 )
-	    {
-	      *itScores = result;
-	      itScores++;
-	    }	    
+        //----------------- convert NICE histogram into MATLAB data -------------
+        //plhs[0] = MatlabConversion::convertVectorFromNice(histogram);
+        plhs[0] =  mxCreateLogicalScalar( true );
+
+        return;
+    }
+    // store codebook random forest to file
+    else if ( strcmp("storeToFile", cmd.c_str()) == 0 )
+    {
+        //------------- read the data --------------
+        if (nrhs != 3)
+        {
+            mexErrMsgTxt("needs a string for filename to save to");
+            return;
         }
-        
-        std::cerr << "Time for testing: " << testTime << std::endl;          
-        
-        // clean up
-        if ( dataIsSparse )
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
+        if( pCodebookRandomForest == NULL )
         {
-            for ( std::vector<const NICE::SparseVector *>::iterator it = dataTest_sparse.begin(); it != dataTest_sparse.end(); it++) 
-                delete *it;
+            mexErrMsgTxt("classify: No valid trained classifier given");
         }
-        
 
+        bool bSuccess = false;
 
-        plhs[0] = mxCreateDoubleScalar( l2loss );
+        try
+        {
+            std::string sStoreFilename = MatlabConversion::convertMatlabToString( prhs[2] );
+            std::ofstream ofs;
+            ofs.open (sStoreFilename.c_str(), std::ofstream::out);
+            pCodebookRandomForest->store( ofs );
+            ofs.close();
+            bSuccess = true;
+        }
+        catch( std::exception &e)
+        {
+            std::cerr << "exception occured: " << e.what() << std::endl;
+            mexErrMsgTxt("storing failed");
+        }
+
+        plhs[0] =  mxCreateLogicalScalar( bSuccess );
 
-        if(nlhs >= 2)
-          plhs[1] = converterNICEtoM.convertVectorFromNice(scores);          
-          
-          
         return;
     }
-    
+    // restore codebook random forest from file
+    else if (!strcmp("restoreFromFile", cmd.c_str() ))
+    {
+        //------------- read the data --------------
+        if (nrhs != 2)
+        {
+            mexErrMsgTxt("needs a string for filename to load from");
+            return;
+        }
 
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pRestoredCRF = new OBJREC::CodebookRandomForest(-1, -1);
+
+        bool bSuccess = false;
+
+        try
+        {
+            std::string sStoreFilename = MatlabConversion::convertMatlabToString( prhs[1] );
+            std::ifstream ifs;
+            ifs.open( sStoreFilename.c_str() );
+            pRestoredCRF->restore( ifs );
+            ifs.close();
+            bSuccess = true;
+        }
+        catch( std::exception &e)
+        {
+            std::cerr << "exception occured: " << e.what() << std::endl;
+            mexErrMsgTxt("restoring failed");
+        }
+
+        // handle to the C++ instance
+        if(bSuccess)
+            plhs[0] = MatlabConversion::convertPtr2Mat<OBJREC::CodebookRandomForest>( pRestoredCRF );
+        else
+            plhs[0] = mxCreateLogicalScalar(false);
 
-    ///////////////////// INTERFACE PERSISTENT /////////////////////
-    // interface specific methods for store and restore
-    ///////////////////// INTERFACE PERSISTENT /////////////////////    
-    
-  
-    
-    // store the regressor  to an external file
-    if ( !strcmp("store", cmd.c_str() ) || !strcmp("save", cmd.c_str() ) )
-    {
-        // Check parameters
-        if ( nrhs < 3 )
-            mexErrMsgTxt("store: no destination given.");        
-               
-        std::string s_destination = converterMtoNICE.convertMatlabToString( prhs[2] );
-          
-        std::filebuf fb;
-        fb.open ( s_destination.c_str(), ios::out );
-        std::ostream os(&fb);
-        //
-        regressor->store( os );
-        //   
-        fb.close();        
-            
         return;
     }
-    
-    // load regressor from external file    
-    if ( !strcmp("restore", cmd.c_str() ) || !strcmp("load", cmd.c_str() ) )
-    {
-        // Check parameters
-        if ( nrhs < 3 )
-            mexErrMsgTxt("restore: no destination given.");        
-               
-        std::string s_destination = converterMtoNICE.convertMatlabToString( prhs[2] );
-        
-        std::cerr << " aim at restoring the regressor from " << s_destination << std::endl;
-          
-        std::filebuf fbIn;
-        fbIn.open ( s_destination.c_str(), ios::in );
-        std::istream is (&fbIn);
-        //
-        regressor->restore( is );
-        //   
-        fbIn.close();        
-            
-        return;
-    }    
 
-*/
     
     // Got here, so command not recognized
     
     std::string errorMsg (cmd.c_str() );
-    errorMsg += " -- command not recognized.";
+    errorMsg += "--command not recognized.";
     mexErrMsgTxt( errorMsg.c_str() );
 
 }

+ 34 - 2
features/simplefeatures/matlab/unittestCodebookRandomForestMex.m

@@ -4,10 +4,42 @@
 
 %% test Creation of OBJREC::Examples class from sample matrix and label data
 
-%try
-    hClassifier = CodebookRandomForestMex('new');
+    numSamples  = 10000;
+    numFeatures = 5;
+    maxClass    = 3;
 
+    matFeatures = rand(numSamples, numFeatures, 'double')';
+    matLabels = randi(maxClass, numSamples,1,'double');
+%%  create and train
+    hClassifier = CodebookRandomForestMex('createAndTrain',... 
+                            matFeatures, matLabels,...
+                            'conf', 'config.conf');
+%%  calcClassDistributionPerSample
+    bSucceess = CodebookRandomForestMex('calcClassDistributionPerSample',... 
+                                        hClassifier,...
+                                        matFeatures,'verbose',true );
+    assert(bSucceess);
+%%  generateHistogram
+    matHistogram = CodebookRandomForestMex('generateHistogram',... 
+                                        hClassifier,...
+                                        matFeatures, 'verbose',false);
+    
+%%  store
+    bSuccess = CodebookRandomForestMex('storeToFile',... 
+                                        hClassifier,...
+                                        'codebookrf.stored.txt');    
+%%  restore
+    hRestoredClassifier = CodebookRandomForestMex('restoreFromFile',... 
+                                            'codebookrf.stored.txt');
+    assert( ~(hRestoredClassifier == false) );
+    matHistogramNew = CodebookRandomForestMex('generateHistogram',... 
+                                        hRestoredClassifier,...
+                                        matFeatures, 'verbose',false);
+    d = matHistogramNew-matHistogram;
+    assert( sum(d(:)) == 0 ); % histogram are alike
+%%    
     CodebookRandomForestMex('delete', hClassifier);
+    CodebookRandomForestMex('delete', hRestoredClassifier);
 %catch ecpn
 %    disp( ecpn );
 %end