Просмотр исходного кода

restore options - bug fixes - more comfortable system is in progress

Alexander Freytag 11 лет назад
Родитель
Сommit
c32b32acdf
5 измененных файлов с 252 добавлено и 62 удалено
  1. 150 17
      FMKGPHyperparameterOptimization.cpp
  2. 4 0
      FastMinKernel.cpp
  3. 22 2
      GPHIKClassifier.cpp
  4. 20 14
      matlab/GPHIK.cpp
  5. 56 29
      progs/toyExample.cpp

+ 150 - 17
FMKGPHyperparameterOptimization.cpp

@@ -41,12 +41,16 @@ using namespace std;
 
 FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization()
 {
+  // initialize pointer variables
   pf = NULL;
   eig = NULL;
   linsolver = NULL;
   fmk = NULL;
   q = NULL;
   precomputedTForVarEst = NULL;
+  ikmsum  = NULL;
+  
+  // initialize boolean flags
   verbose = false;
   verboseTime = false;
   debug = false;
@@ -58,13 +62,14 @@ FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization()
 
 FMKGPHyperparameterOptimization::FMKGPHyperparameterOptimization ( const Config *_conf, ParameterizedFunction *_pf, FastMinKernel *_fmk, const string & _confSection )
 {
-  //default settings, may become overwritten lateron
+   // initialize pointer variables
   pf = NULL;
   eig = NULL;
   linsolver = NULL;
   fmk = NULL;
   q = NULL;
   precomputedTForVarEst = NULL;
+  ikmsum = NULL;
   
   //stupid unneeded default values
   binaryLabelPositive = -1;
@@ -106,10 +111,18 @@ FMKGPHyperparameterOptimization::~FMKGPHyperparameterOptimization()
 void FMKGPHyperparameterOptimization::initialize ( const Config *_conf, ParameterizedFunction *_pf, FastMinKernel *_fmk, const std::string & _confSection )
 {
   if ( this->fmk != NULL )
+  {
+    std::cerr << "fmk deleted" << std::endl;
     delete this->fmk;
+  }
   if ( _fmk != NULL )
+  {
+    std::cerr << "fmk copied" << std::endl;    
     this->fmk = _fmk;
+  }
+  
   this->pf = _pf;
+  std::cerr << "pf copied" << std::endl;
   
   
   std::cerr << "------------" << std::endl;
@@ -1151,23 +1164,27 @@ t.start();*/
 
 void FMKGPHyperparameterOptimization::restore ( std::istream & is, int format )
 {
+  bool b_restoreVerbose ( false );
   if ( is.good() )
   {
+    if ( b_restoreVerbose ) 
+      std::cerr << " in FMKGP restore" << std::endl;
     //load the underlying data
     if (fmk != NULL)
       delete fmk;
+    if ( b_restoreVerbose ) 
+      std::cerr << " create FMK" << std::endl;
     fmk = new FastMinKernel;
-    fmk->restore(is,format);    
-    
-    //now set up the GHIK-things in ikmsums
-    ikmsum->addModel ( new GMHIKernel ( fmk, this->pf, this->q ) );
+    if ( b_restoreVerbose ) 
+      std::cerr << " restore FMK" << std::endl;
+    fmk->restore(is,format); 
+    if ( b_restoreVerbose ) 
+      std::cerr << "fmk->restore done " << std::endl;
     
     is.precision ( numeric_limits<double>::digits10 + 1 );
 
     string tmp;
     is >> tmp; //class name
-
-    is >> tmp;
     
     is >> tmp; //precomputedA:
     is >> tmp; //size:
@@ -1176,6 +1193,8 @@ void FMKGPHyperparameterOptimization::restore ( std::istream & is, int format )
     is >> preCompSize;
     precomputedA.clear();
 
+    if ( b_restoreVerbose ) 
+      std::cerr << "restore precomputedA with size: " << preCompSize << std::endl;
     for ( int i = 0; i < preCompSize; i++ )
     {
       int nr;
@@ -1192,6 +1211,8 @@ void FMKGPHyperparameterOptimization::restore ( std::istream & is, int format )
     is >> preCompSize;
     precomputedB.clear();
 
+    if ( b_restoreVerbose ) 
+      std::cerr << "restore precomputedB with size: " << preCompSize << std::endl;
     for ( int i = 0; i < preCompSize; i++ )
     {
       int nr;
@@ -1202,14 +1223,21 @@ void FMKGPHyperparameterOptimization::restore ( std::istream & is, int format )
       precomputedB.insert ( std::pair<int, PrecomputedType> ( nr, pct ) );
     }    
     
-    is >> tmp;
-    int precomputedTSize;
+    is >> tmp; //precomputedT: std::cerr << " content of tmp: " << tmp << std::endl; 
+    is >> tmp; //size: std::cerr << " content of tmp: " << tmp << std::endl;
+    
+    int precomputedTSize ( 0 );
     is >> precomputedTSize;
 
     precomputedT.clear();
+    
+    if ( b_restoreVerbose ) 
+      std::cerr << "restore precomputedT with size: " << precomputedTSize << std::endl;
 
     if ( precomputedTSize > 0 )
     {
+      if ( b_restoreVerbose ) 
+        std::cerr << " restore precomputedT" << std::endl;
       is >> tmp;
       int sizeOfLUT;
       is >> sizeOfLUT;    
@@ -1226,24 +1254,43 @@ void FMKGPHyperparameterOptimization::restore ( std::istream & is, int format )
         }
         precomputedT.insert ( std::pair<int, double*> ( index, array ) );
       }
-    }    
+    } 
+    else
+    {
+      if ( b_restoreVerbose ) 
+        std::cerr << " skip restoring precomputedT" << std::endl;
+    }
 
     //now restore the things we need for the variance computation
     is >> tmp;
+    if ( b_restoreVerbose ) 
+      std::cerr << " content of tmp: " << tmp << std::endl;
     int sizeOfAForVarEst;
     is >> sizeOfAForVarEst;
-    if ( sizeOfAForVarEst > 0 )
     
-    if (precomputedAForVarEst.size() > 0)
+    if ( b_restoreVerbose ) 
+      std::cerr << "restore precomputedAForVarEst with size: " << sizeOfAForVarEst << std::endl;
+    
+    if (sizeOfAForVarEst > 0)
     {
+      precomputedAForVarEst.clear();
+      
       precomputedAForVarEst.setIoUntilEndOfFile ( false );
+      std::cerr << "restore precomputedAForVarEst" << std::endl;
       precomputedAForVarEst.restore ( is, format );
     }    
 
     is >> tmp; //precomputedTForVarEst
+      if ( b_restoreVerbose ) 
+    std::cerr << "content of tmp: " << tmp << std::endl;
     is >> tmp; // NOTNULL or NULL
+    if ( b_restoreVerbose ) 
+      std::cerr << "content of tmp: " << tmp << std::endl;    
     if (tmp.compare("NOTNULL") == 0)
     {
+      if ( b_restoreVerbose ) 
+        std::cerr << "restore precomputedTForVarEst" << std::endl;
+      
       int sizeOfLUT;
       is >> sizeOfLUT;      
       precomputedTForVarEst = new double [ sizeOfLUT ];
@@ -1254,37 +1301,118 @@ void FMKGPHyperparameterOptimization::restore ( std::istream & is, int format )
     }
     else
     {
+      if ( b_restoreVerbose ) 
+        std::cerr << "skip restoring of precomputedTForVarEst" << std::endl;
       if (precomputedTForVarEst != NULL)
         delete precomputedTForVarEst;
     }
     
+    if ( b_restoreVerbose ) 
+      std::cerr << "restore eigenMax and eigenMaxVectors " << std::endl;
+    
     //restore eigenvalues and eigenvectors
+    is >> tmp; //"eigenmax:"
+    if ( b_restoreVerbose ) 
+      std::cerr << "content of tmp: " << tmp << std::endl;
     is >> eigenMax;
+    if ( b_restoreVerbose ) 
+      std::cerr << "loaded the following eigenMax: " << eigenMax << std::endl;
+    is >> tmp; //"eigenMaxVectors:"
+    if ( b_restoreVerbose ) 
+      std::cerr << "content of tmp: " << tmp << std::endl;
     is >> eigenMaxVectors;
+    if ( b_restoreVerbose ) 
+      std::cerr << "loaded the following eigenMaxVectors: " << eigenMaxVectors << std::endl;
+    
+    if ( b_restoreVerbose ) 
+      std::cerr << " create ikmsum object" << std::endl;
+    
+    if ( ikmsum == NULL )
+    {
+        ikmsum = new IKMLinearCombination (); 
+        if ( b_restoreVerbose ) 
+          std::cerr << "ikmsum object created" << std::endl;
+    }
+    else
+    {
+      if ( b_restoreVerbose ) 
+        std::cerr << "ikmsum object already existing" << std::endl;
+    }
+      
 
-    IKMLinearCombination *ikmsum = new IKMLinearCombination ();
-
+    is >> tmp; //"numberOfModels:"
+    if ( b_restoreVerbose ) 
+      std::cerr << "content of tmp: " << tmp << std::endl;
     int nrOfModels ( 0 );
     is >> nrOfModels;
+    if ( b_restoreVerbose ) 
+      std::cerr << "number of models to add in total: " << nrOfModels << std::endl;
+    
+    if ( b_restoreVerbose ) 
+      std::cerr << " restore IKMNoise " << std::endl;
     
     //the first one is always our noise-model
     IKMNoise * ikmnoise = new IKMNoise ();
     ikmnoise->restore ( is, format );
 
+    
+    if ( b_restoreVerbose ) 
+      std::cerr << " add ikmnoise to ikmsum object " << std::endl;
     ikmsum->addModel ( ikmnoise );
 
     //NOTE are there any more models you added? then add them here respectively in the correct order
     //.....  
 
 
-    //the last one is the GHIK - which we do not have to restore, but simply reset it lateron
+    //the last one is the GHIK - which we do not have to restore, but simply reset it
+    if ( b_restoreVerbose ) 
+      std::cerr << " add GMHIKernel" << std::endl;
+    ikmsum->addModel ( new GMHIKernel ( fmk, this->pf, this->q ) );    
+    
+    if ( b_restoreVerbose ) 
+      std::cerr << " restore positive and negative label" << std::endl;
 
       
     //restore the class numbers for binary settings (if mc-settings, these values will be negative by default)
     is >> tmp; // "binaryLabelPositive: " 
+    if ( b_restoreVerbose ) 
+      std::cerr << " content of tmp: " << tmp << std::endl; 
     is >> binaryLabelPositive;
+    if ( b_restoreVerbose ) 
+      std::cerr << " content of binaryLabelPositive: " << binaryLabelPositive << std::endl; 
     is >> tmp; // " binaryLabelNegative: "
-    is >> binaryLabelNegative;          
+    if ( b_restoreVerbose ) 
+      std::cerr << " content of tmp: " << tmp << std::endl; 
+    is >> binaryLabelNegative;
+    if ( b_restoreVerbose ) 
+      std::cerr << " content of binaryLabelNegative: " << binaryLabelNegative << std::endl; 
+    
+    is >> tmp; // "labels: "
+    if ( b_restoreVerbose ) 
+      std::cerr << " content of tmp: " << tmp << std::endl;
+    is >> this->labels;    
+    if ( b_restoreVerbose ) 
+      std::cerr << " restored labels: " << labels << std::endl;
+
+    knownClasses.clear();
+    
+    if ( b_restoreVerbose ) 
+      std::cerr << " fill known classes object " << std::endl;
+    
+    if ( precomputedA.size() == 1)
+    {
+      knownClasses.insert( binaryLabelPositive );
+      knownClasses.insert( binaryLabelNegative );
+      if ( b_restoreVerbose ) 
+        std::cerr << " binary setting - added corresp. two class numbers" << std::endl;
+    }
+    else
+    {
+      for ( std::map<int, PrecomputedType>::const_iterator itA = precomputedA.begin(); itA != precomputedA.end(); itA++)
+          knownClasses.insert ( itA->first );
+      if ( b_restoreVerbose ) 
+        std::cerr << " multi class setting - added corresp. multiple class numbers" << std::endl;
+    }
   }
   else
   {
@@ -1313,6 +1441,7 @@ void FMKGPHyperparameterOptimization::store ( std::ostream & os, int format ) co
       ( preCompIt->second ).store ( os, format );
       preCompIt++;
     }
+    
     os << "precomputedB: size: " << precomputedB.size() << std::endl;
     preCompIt = precomputedB.begin();
     for ( uint i = 0; i < precomputedB.size(); i++ )
@@ -1323,7 +1452,7 @@ void FMKGPHyperparameterOptimization::store ( std::ostream & os, int format ) co
     }    
     
     
-    os << "precomputedT.size(): " << precomputedT.size() << std::endl;
+    os << "precomputedT: size: " << precomputedT.size() << std::endl;
     if ( precomputedT.size() > 0 )
     {
       int sizeOfLUT ( 0 );
@@ -1371,7 +1500,9 @@ void FMKGPHyperparameterOptimization::store ( std::ostream & os, int format ) co
     }
     
     //store the eigenvalues and eigenvectors
+    os << "eigenMax" << std::endl;
     os << eigenMax << std::endl;
+    os << "eigenMaxVectors" << std::endl;
     os << eigenMaxVectors << std::endl;
 
     //store the ikmsum object
@@ -1385,6 +1516,8 @@ void FMKGPHyperparameterOptimization::store ( std::ostream & os, int format ) co
     
     //store the class numbers for binary settings (if mc-settings, these values will be negative by default)
     os << "binaryLabelPositive: " << binaryLabelPositive << " binaryLabelNegative: " << binaryLabelNegative << std::endl;
+    
+    os << "labels: " << this->labels << std::endl;    
   }
   else
   {

+ 4 - 0
FastMinKernel.cpp

@@ -1377,6 +1377,7 @@ void FastMinKernel::restore ( std::istream & is, int format )
 {
   if (is.good())
   {
+    std::cerr << "FastMinKernel::restore  " << std::endl;
     is.precision (numeric_limits<double>::digits10 + 1);  
     
     string tmp;
@@ -1396,12 +1397,15 @@ void FastMinKernel::restore ( std::istream & is, int format )
     is >> approxSchemeInt;
     setApproximationScheme(approxSchemeInt);
    
+    std::cerr << "start restoring X_sorted  " << std::endl;
     X_sorted.restore(is,format);
+    std::cerr << " done :) " << std::endl;
    }
   else
   {
     std::cerr << "FastMinKernel::restore -- InStream not initialized - restoring not possible!" << std::endl;
   }  
+  std::cerr << " FMK restore ended " << std::endl;
 }
 void FastMinKernel::store ( std::ostream & os, int format ) const
 {

+ 22 - 2
GPHIKClassifier.cpp

@@ -50,6 +50,7 @@ GPHIKClassifier::~GPHIKClassifier()
 
 void GPHIKClassifier::init(const Config *conf, const string & confSection)
 {
+  std::cerr << " init  method " << std::endl;
   double parameterLowerBound = conf->gD(confSection, "parameter_lower_bound", 1.0 );
   double parameterUpperBound = conf->gD(confSection, "parameter_upper_bound", 5.0 );
 
@@ -59,6 +60,7 @@ void GPHIKClassifier::init(const Config *conf, const string & confSection)
   
   if (pf == NULL)
   {
+    std::cerr << " pf is currently NULL  " << std::endl;
     if ( transform == "absexp" )
     {
       this->pf = new PFAbsExp( 1.0, parameterLowerBound, parameterUpperBound );
@@ -74,6 +76,7 @@ void GPHIKClassifier::init(const Config *conf, const string & confSection)
   }
   else{
     //we already know the pf from the restore-function
+    std::cerr << " pf is already loaded" << std::endl;
   }
   this->confSection = confSection;
   this->verbose = conf->gB(confSection, "verbose", false);
@@ -82,6 +85,7 @@ void GPHIKClassifier::init(const Config *conf, const string & confSection)
   
   if (confCopy != conf)
   {  
+    std::cerr << " copy config" << std::endl;
     this->confCopy = new Config ( *conf );
     //we do not want to read until end of file for restoring    
     confCopy->setIoUntilEndOfFile(false);    
@@ -192,7 +196,9 @@ void GPHIKClassifier::classify ( const NICE::Vector * example,  int & result, Sp
 void GPHIKClassifier::train ( const std::vector< NICE::SparseVector *> & examples, const NICE::Vector & labels )
 {
   if (verbose)
+  {
     std::cerr << "GPHIKClassifier::train" << std::endl;
+  }
 
   Timer t;
   t.start();
@@ -392,6 +398,7 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
 {
   if (is.good())
   {
+    std::cerr << "restore GPHIKClassifier" << std::endl;
     is.precision (numeric_limits<double>::digits10 + 1);
     
     string tmp;
@@ -413,22 +420,35 @@ void GPHIKClassifier::restore ( std::istream & is, int format )
       fthrow(Exception, "Transformation type is unknown " << transform);
     }    
     pf->restore(is, format);
+    
+    std::cerr << "pf restored" << std::endl;
             
     //load every options we determined explicitely
     confCopy->clear();
     //we do not want to read until the end of the file
     confCopy->setIoUntilEndOfFile( false );
     confCopy->restore(is, format);
+    
+    std::cerr << "conf restored" << std::endl;
 
     //load every settings as well as default options
     this->init(confCopy, confSection); 
+    
+    std::cerr << "GPHIK initialized" << std::endl;
   
     //first read things from the config
-    gphyper->initialize ( confCopy, pf );
+    if ( gphyper == NULL )
+      gphyper = new NICE::FMKGPHyperparameterOptimization();
+    
+    gphyper->initialize ( confCopy, pf, NULL, confSection );
+    
+    std::cerr << "gphyper initialized" << std::endl;
     
     //then, load everything that we stored explicitely,
     // including precomputed matrices, LUTs, eigenvalues, ... and all that stuff
-    gphyper->restore(is, format);      
+    gphyper->restore(is, format);    
+    
+    std::cerr << "gphyper restored" << std::endl;
   }
   else
   {

+ 20 - 14
matlab/GPHIK.cpp

@@ -283,12 +283,26 @@ double convertMatlabToDouble(const mxArray *matlabDouble)
   return ptr[0];
 }
 
-Config parseParameters(const mxArray *prhs[], int nrhs)
+NICE::Config parseParameters(const mxArray *prhs[], int nrhs)
 {
-  Config conf;
-  for(int i=0;i<nrhs;i+=2)
+  NICE::Config conf;
+  
+  // if first argument is the filename of an existing config file,
+  // read the config accordingly
+  
+  int i_start ( 0 );
+  std::string variable = convertMatlabToString(prhs[i_start]);
+  if(variable == "conf")
   {
-    string variable = convertMatlabToString(prhs[i]);
+      conf = NICE::Config ( convertMatlabToString( prhs[i_start+1] )  );
+      i_start = i_start+2;
+  }
+  
+  // now run over all given parameter specifications
+  // and add them to the config
+  for( int i=i_start; i < nrhs; i+=2 )
+  {
+    std::string variable = convertMatlabToString(prhs[i]);
     if(variable == "ils_verbose")
     {
       string value = convertMatlabToString(prhs[i+1]);
@@ -398,16 +412,6 @@ Config parseParameters(const mxArray *prhs[], int nrhs)
       conf.sD("GPHIKClassifier", variable, value);
     }
 
-    if(variable == "learn_balanced")
-    {
-      string value = convertMatlabToString(prhs[i+1]);
-      if(value != "true" && value != "false")
-        mexErrMsgIdAndTxt("mexnice:error","Unexpected parameter value for \'learn_balanced\'. \'true\' or \'false\' expected.");
-      if(value == "true")
-        conf.sB("GPHIKClassifier", variable, true);
-      else
-        conf.sB("GPHIKClassifier", variable, false);
-    }
 
     if(variable == "optimize_noise")
     {
@@ -826,6 +830,8 @@ void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
             mexErrMsgTxt("restore: no destination given.");        
                
         std::string s_destination = convertMatlabToString( prhs[2] );
+        
+        std::cerr << " aim at restoring the classifier from " << s_destination << std::endl;
           
         std::filebuf fbIn;
         fbIn.open ( s_destination.c_str(), ios::in );

+ 56 - 29
progs/toyExample.cpp

@@ -46,7 +46,7 @@ int main (int argc, char* argv[])
     yMultiTrain.resize(6);
     yMultiTrain[0] = 1; yMultiTrain[1] = 1;
     yMultiTrain[2] = 2; yMultiTrain[3] = 2;
-    yMultiTrain[2] = 3; yMultiTrain[3] = 3;
+    yMultiTrain[4] = 3; yMultiTrain[5] = 3;
   }
   else 
   {
@@ -79,8 +79,9 @@ int main (int argc, char* argv[])
   std::cerr << "Number of training examples: " << examplesTrain.size() << std::endl;
   
   //----------------- train our classifier -------------
-  conf.sB("GPHIKClassifier", "verbose", false);
+//   conf.sB("GPHIKClassifier", "verbose", false);
   GPHIKClassifier * classifier  = new GPHIKClassifier ( &conf );  
+    
   classifier->train ( examplesTrain , yMultiTrain );
   
   // ------------------------------------------
@@ -101,8 +102,8 @@ int main (int argc, char* argv[])
     dataTest.set(0);
     dataTest(0,0) = 0.3; dataTest(0,1) = 0.4; dataTest(0,2) = 0.3;
     
-    yMultiTrain.resize(1);
-    yMultiTrain[0] = 1;
+    yMultiTest.resize(1);
+    yMultiTest[0] = 1;
   }
   else 
   {  
@@ -122,8 +123,44 @@ int main (int argc, char* argv[])
     }
   }
   
-  //TODO adapt this to the actual number of classes
-  NICE::Matrix confusionMatrix(3, 3, 0.0);
+  // ------------------------------------------
+  // ------------- PREPARATION --------------
+  // ------------------------------------------   
+  
+  // determine classes known during training and corresponding mapping
+  // thereby allow for non-continous class labels
+  std::set<int> classesKnownTraining = classifier->getKnownClassNumbers();
+  
+  int noClassesKnownTraining ( classesKnownTraining.size() );
+  std::map<int,int> mapClNoToIdxTrain;
+  std::set<int>::const_iterator clTrIt = classesKnownTraining.begin();
+  for ( int i=0; i < noClassesKnownTraining; i++, clTrIt++ )
+      mapClNoToIdxTrain.insert ( std::pair<int,int> ( *clTrIt, i )  );
+  
+  // determine classes known during testing and corresponding mapping
+  // thereby allow for non-continous class labels
+  std::set<int> classesKnownTest;
+  classesKnownTest.clear();
+  
+
+  // determine which classes we have in our label vector
+  // -> MATLAB: myClasses = unique(y);
+  for ( NICE::Vector::const_iterator it = yMultiTest.begin(); it != yMultiTest.end(); it++ )
+  {
+    if ( classesKnownTest.find ( *it ) == classesKnownTest.end() )
+    {
+      classesKnownTest.insert ( *it );
+    }
+  }          
+  
+  int noClassesKnownTest ( classesKnownTest.size() );  
+  std::map<int,int> mapClNoToIdxTest;
+  std::set<int>::const_iterator clTestIt = classesKnownTest.begin();
+  for ( int i=0; i < noClassesKnownTest; i++, clTestIt++ )
+      mapClNoToIdxTest.insert ( std::pair<int,int> ( *clTestIt, i )  ); 
+          
+  
+  NICE::Matrix confusionMatrix( noClassesKnownTraining, noClassesKnownTest, 0.0);
   
   NICE::Timer t;
   double testTime (0.0);
@@ -132,22 +169,16 @@ int main (int argc, char* argv[])
   
   int i_loopEnd  ( (int)dataTest.rows() );
   
-  if ( b_debug )
-  {
-    i_loopEnd = 1;
-  }
   
   for (int i = 0; i < i_loopEnd ; i++)
   {
-    //----------------- convert data to sparse data structures ---------
-    NICE::SparseVector * example =  new NICE::SparseVector( dataTest.getRow(i) );
-       
-    int result;
+    NICE::Vector example ( dataTest.getRow(i) );
     NICE::SparseVector scores;
-   
+    int result;
+    
     // and classify
     t.start();
-    classifier->classify( example, result, scores );
+    classifier->classify( &example, result, scores );
     t.stop();
     testTime += t.getLast();
     
@@ -156,24 +187,20 @@ int main (int argc, char* argv[])
     
     if ( b_debug )
     {    
-      classifier->predictUncertainty( example, uncertainty );
+      classifier->predictUncertainty( &example, uncertainty );
       std::cerr << " uncertainty: " << uncertainty << std::endl;
     }
-    else
-    {
-      confusionMatrix(result, yMultiTest[i]) += 1.0;
-    }
+    
+    confusionMatrix( mapClNoToIdxTrain.find(result)->second, mapClNoToIdxTest.find(yMultiTest[i])->second ) += 1.0;
   }
   
-  if ( !b_debug )
-  {
-    std::cerr << "Time for testing: " << testTime << std::endl;
-    
-    confusionMatrix.normalizeColumnsL1();
-    std::cerr << confusionMatrix << std::endl;
 
-    std::cerr << "average recognition rate: " << confusionMatrix.trace()/confusionMatrix.rows() << std::endl;
-  }
+  std::cerr << "Time for testing: " << testTime << std::endl;
+  
+  confusionMatrix.normalizeColumnsL1();
+  std::cerr << confusionMatrix << std::endl;
+
+  std::cerr << "average recognition rate: " << confusionMatrix.trace()/confusionMatrix.cols() << std::endl;
   
   
   return 0;