Przeglądaj źródła

DTBOblique: removed memory leak & minor changes

Sven Sickert 10 lat temu
rodzic
commit
cc5127d84c

+ 28 - 33
classifier/fpclassifier/randomforest/DTBOblique.cpp

@@ -96,44 +96,44 @@ bool DTBOblique::entropyLeftRight (
 bool DTBOblique::adaptDataAndLabelForMultiClass (
         const int posClass,
         const int negClass,
-        NICE::Matrix & matX,
-        NICE::Vector & vecY )
+        NICE::Matrix & X,
+        NICE::Vector & y )
 {
     bool posHasExamples = false;
     bool negHasExamples = false;
 
     // One-vs-one: Transforming into {-1,0,+1} problem
     if ( useOneVsOne )
-        for ( int i = 0; i < vecY.size(); i++ )
+        for ( int i = 0; i < y.size(); i++ )
         {
-            if ( vecY[i] == posClass )
+            if ( y[i] == posClass )
             {
-                vecY[i] = 1.0;
+                y[i] = 1.0;
                 posHasExamples = true;
             }
-            else if ( vecY[i] == negClass )
+            else if ( y[i] == negClass )
             {
-                vecY[i] = -1.0;
+                y[i] = -1.0;
                 negHasExamples = true;
             }
             else
             {
-                vecY[i] = 0.0;
-                matX.setRow( i, NICE::Vector( matX.cols(), 0.0 ) );
+                y[i] = 0.0;
+                X.setRow( i, NICE::Vector( X.cols(), 0.0 ) );
             }
         }
     // One-vs-all: Transforming into {-1,+1} problem
     else
-        for ( int i = 0; i < vecY.size(); i++ )
+        for ( int i = 0; i < y.size(); i++ )
         {
-            if ( vecY[i] == posClass )
+            if ( y[i] == posClass )
             {
-                vecY[i] = 1.0;
+                y[i] = 1.0;
                 posHasExamples = true;
             }
             else
             {
-                vecY[i] = -1.0;
+                y[i] = -1.0;
                 negHasExamples = true;
             }
         }
@@ -149,17 +149,17 @@ void DTBOblique::getDataAndLabel(
         const FeaturePool &fp,
         const Examples &examples,
         const std::vector<int> &examples_selection,
-        NICE::Matrix & matX,
-        NICE::Vector & vecY,
-        NICE::Vector & weights )
+        NICE::Matrix & X,
+        NICE::Vector & y,
+        NICE::Vector & w )
 {
     ConvolutionFeature *f = (ConvolutionFeature*)fp.begin()->second;
     int amountParams = f->getParameterLength();
     int amountExamples = examples_selection.size();
 
-    NICE::Matrix X(amountExamples, amountParams, 0.0 );
-    NICE::Vector y(amountExamples, 0.0);
-    NICE::Vector w(amountExamples, 1.0);
+    X = NICE::Matrix(amountExamples, amountParams, 0.0 );
+    y = NICE::Vector(amountExamples, 0.0);
+    w = NICE::Vector(amountExamples, 1.0);
 
     int matIndex = 0;
     for ( vector<int>::const_iterator si = examples_selection.begin();
@@ -167,23 +167,20 @@ void DTBOblique::getDataAndLabel(
           si++ )
     {
         const pair<int, Example> & p = examples[*si];
-        const Example & ce = p.second;
+        const Example & ex = p.second;
 
-        NICE::Vector pixelRepr = f->getFeatureVector( &ce );
+        NICE::Vector pixelRepr = f->getFeatureVector( &ex );
 
         double label = p.first;
-        pixelRepr *= ce.weight;
+        pixelRepr *= ex.weight;
 
-        w.set    ( matIndex, ce.weight );
+        w.set    ( matIndex, ex.weight );
         y.set    ( matIndex, label );
         X.setRow ( matIndex, pixelRepr );
 
         matIndex++;
     }
 
-    matX = X;
-    vecY = y;
-    weights = w;
 }
 
 void DTBOblique::regularizeDataMatrix(
@@ -239,7 +236,7 @@ void DTBOblique::regularizeDataMatrix(
         case 3:
         {
             NICE::Vector q ( dim, (1.0-lambda) );
-            q[0] = 1;
+            q[0] = 1.0;
             NICE::Matrix Q;
             Q.tensorProduct(q,q);
             R.multiply(XTXreg,Q);
@@ -456,8 +453,7 @@ DecisionNode *DTBOblique::buildRecursive(
     for ( FeatureValuesUnsorted::const_iterator i = values.begin();
           i != values.end(); i++ )
     {
-        double value = i->first;
-        if ( value < bestSplitInfo.threshold )
+        if ( i->first < bestSplitInfo.threshold )
             examples_left.push_back ( i->third );
         else
             examples_right.push_back ( i->third );
@@ -465,7 +461,7 @@ DecisionNode *DTBOblique::buildRecursive(
 
 #ifdef DEBUGTREE
 //    node->f->store( std::cerr );
-    std::cerr << std::endl;
+//    std::cerr << std::endl;
     std::cerr << "DTBOblique: Information Gain: " << bestSplitInfo.informationGain
               << ", Left Entropy: " <<  bestSplitInfo.entropyLeft << ", Right Entropy: "
               << bestSplitInfo.entropyRight << std::endl;
@@ -487,9 +483,8 @@ DecisionNode *DTBOblique::buildRecursive(
 //#endif
     }
 
-    //TODO
-    //delete [] best_distribution_left;
-    //delete [] best_distribution_right;
+    delete [] bestSplitInfo.distLeft;
+    delete [] bestSplitInfo.distRight;
 
     // update lambda by heuristic [Laptev/Buhmann, 2014]
     double lambdaLeft = lambdaCurrent *

+ 3 - 3
classifier/fpclassifier/randomforest/DTBOblique.h

@@ -102,9 +102,9 @@ class DTBOblique : public DecisionTreeBuilder
             const FeaturePool &fp,
             const Examples &examples,
             const std::vector<int> & examples_selection,
-            NICE::Matrix &matX,
-            NICE::Vector &vecY,
-            NICE::Vector &weights );
+            NICE::Matrix &X,
+            NICE::Vector &y,
+            NICE::Vector &w );
 
     /**
      * @brief return a regularization matrix of size (dimParams)x(dimParams)