Prechádzať zdrojové kódy

fixed some memory leaks in regrdf

Sven Sickert 12 rokov pred
rodič
commit
5e4f4b7f53

+ 1 - 0
regression/gpregression/RegGaussianProcess.cpp

@@ -68,6 +68,7 @@ RegGaussianProcess::RegGaussianProcess ( const RegGaussianProcess & src ) :
 {
 {
 	kInvY = src.kInvY;
 	kInvY = src.kInvY;
 	verbose = src.verbose;
 	verbose = src.verbose;
+  useLooParameters = src.useLooParameters;
   maxIterations = src.maxIterations;
   maxIterations = src.maxIterations;
 	optimizeParameters = src.optimizeParameters;
 	optimizeParameters = src.optimizeParameters;
 	optimizationMethod = src.optimizationMethod;
 	optimizationMethod = src.optimizationMethod;

+ 10 - 2
regression/progs/testRegressionRDFGP.cpp

@@ -160,8 +160,9 @@ void testFrame ( Config confRDF,
   
   
   /*------------Initialize Variables-----------*/
   /*------------Initialize Variables-----------*/
   ofstream storeEvalData;
   ofstream storeEvalData;
+  double trainRatio = confRDF.gD( "debug", "training_ratio", .9 );
   
   
-  int trainingSize = (int)(.2*xdata.size());
+  int trainingSize = (int)(trainRatio*xdata.size());
   int testingSize = xdata.size() - trainingSize;
   int testingSize = xdata.size() - trainingSize;
   
   
   vector<int> indices;
   vector<int> indices;
@@ -216,9 +217,9 @@ void testFrame ( Config confRDF,
     
     
     cerr << "Initializing leaf regression method " << leafReg << "...";
     cerr << "Initializing leaf regression method " << leafReg << "...";
     RegressionAlgorithm *leafRegression = NULL;
     RegressionAlgorithm *leafRegression = NULL;
+    Kernel *kernel_function = NULL;
     if ( leafReg == "GaussProcess" )
     if ( leafReg == "GaussProcess" )
     {
     {
-      Kernel *kernel_function = NULL;
       kernel_function = new KernelExp ( *(kernel_template) );
       kernel_function = new KernelExp ( *(kernel_template) );
       leafRegression = new RegGaussianProcess( &confRDF, kernel_function, "GPRegression" );
       leafRegression = new RegGaussianProcess( &confRDF, kernel_function, "GPRegression" );
     }
     }
@@ -266,6 +267,7 @@ void testFrame ( Config confRDF,
     
     
     /*---------------Evaluation----------------*/
     /*---------------Evaluation----------------*/
     NICE::Vector diff = testVals - predictionValues;
     NICE::Vector diff = testVals - predictionValues;
+    
     double mod_var = diff.StdDev()*diff.StdDev();
     double mod_var = diff.StdDev()*diff.StdDev();
     double tar_var = testVals.StdDev()*testVals.StdDev();
     double tar_var = testVals.StdDev()*testVals.StdDev();
     mef_v.set( k, (1-mod_var/tar_var) );
     mef_v.set( k, (1-mod_var/tar_var) );
@@ -282,6 +284,9 @@ void testFrame ( Config confRDF,
     diff *= diff;
     diff *= diff;
     diff_v.set( k, diff.Mean());
     diff_v.set( k, diff.Mean());
     resub_v.set( k, (diff.Mean() / tar_var) );
     resub_v.set( k, (diff.Mean() / tar_var) );
+    
+    if (kernel_function != NULL)
+      delete kernel_function;
   }
   }
   
   
   /*------------------Output-------------------*/
   /*------------------Output-------------------*/
@@ -290,6 +295,9 @@ void testFrame ( Config confRDF,
   cout << "  Correlation: " << corr_v.Mean() << endl;
   cout << "  Correlation: " << corr_v.Mean() << endl;
   cout << "  Mean Square Error: " << diff_v.Mean() << endl;
   cout << "  Mean Square Error: " << diff_v.Mean() << endl;
   cout << "  Standardized MSE: " << resub_v.Mean() << endl;
   cout << "  Standardized MSE: " << resub_v.Mean() << endl;
+  
+  /*-----------------Cleaning------------------*/
+  delete kernel_template;
 }
 }
 
 
 
 

+ 7 - 2
regression/regcombination/RegPreRandomForests.cpp

@@ -31,13 +31,18 @@ RegPreRandomForests::~RegPreRandomForests()
   if ( randomforest != NULL )
   if ( randomforest != NULL )
     delete randomforest;
     delete randomforest;
   
   
-  // delte all regression methods in the leafs
+  // delete all regression methods in the leafs
   for ( map<RegressionNode *, RegressionAlgorithm * >::const_iterator it = leafRegressions.begin();
   for ( map<RegressionNode *, RegressionAlgorithm * >::const_iterator it = leafRegressions.begin();
         it != leafRegressions.end(); it++ )
         it != leafRegressions.end(); it++ )
   {
   {
     RegressionAlgorithm * lr = it->second;
     RegressionAlgorithm * lr = it->second;
-    delete lr;
+    if ( lr != NULL )
+      delete lr;
   }
   }
+  
+  // delete regression prototype
+  if ( leafRegressionPrototype != NULL )
+    delete leafRegressionPrototype;
 }
 }
 
 
 void RegPreRandomForests::teach ( const VVector & X, const Vector & y )
 void RegPreRandomForests::teach ( const VVector & X, const Vector & y )

+ 2 - 0
regression/regressionbase/RegressionAlgorithmKernel.cpp

@@ -50,6 +50,8 @@ void RegressionAlgorithmKernel::teach ( const VVector & X, const NICE::Vector &
 	kernelData->updateCholeskyFactorization();
 	kernelData->updateCholeskyFactorization();
 
 
 	teach ( kernelData, this->y );
 	teach ( kernelData, this->y );
+  
+  delete kernelData;
 }
 }
 
 
 double RegressionAlgorithmKernel::predict ( const NICE::Vector & x )
 double RegressionAlgorithmKernel::predict ( const NICE::Vector & x )