Эх сурвалжийг харах

renamed variable for clarification

Sven Sickert 10 жил өмнө
parent
commit
9e79bdfba6

+ 9 - 13
classifier/fpclassifier/randomforest/DTBObliqueLS.cpp

@@ -15,7 +15,7 @@
 
 using namespace OBJREC;
 
-#define DEBUGTREE
+//#define DEBUGTREE
 
 
 using namespace std;
@@ -270,7 +270,7 @@ void DTBObliqueLS::regularizeDataMatrix(
 void DTBObliqueLS::findBestSplitThreshold (
         FeatureValuesUnsorted &values,
         SplitInfo &bestSplitInfo,
-        const NICE::Vector &beta,
+        const NICE::Vector &params,
         const double &e,
         const int &maxClassNo )
 {
@@ -319,7 +319,7 @@ void DTBObliqueLS::findBestSplitThreshold (
         {
             bestSplitInfo.informationGain = ig;
             bestSplitInfo.threshold = threshold;
-            bestSplitInfo.params = beta;
+            bestSplitInfo.params = params;
 
             for ( int k = 0 ; k <= maxClassNo ; k++ )
             {
@@ -348,10 +348,8 @@ DecisionNode *DTBObliqueLS::buildRecursive(
         double lambdaCurrent )
 {
 
-#ifdef DEBUGTREE
     std::cerr << "DTBObliqueLS: Examples: " << (int)examples_selection.size()
               << ", Depth: " << (int)depth << ", Entropy: " << e << std::endl;
-#endif
 
     // initialize new node
     DecisionNode *node = new DecisionNode ();
@@ -385,7 +383,7 @@ DecisionNode *DTBObliqueLS::buildRecursive(
 
     // Creating data matrix X and label vector y
     NICE::Matrix X;
-    NICE::Vector y, beta, weights;
+    NICE::Vector y, params, weights;
     getDataAndLabel( fp, examples, examples_selection, X, y, weights );
 
     // Transforming into multi-class problem
@@ -401,9 +399,7 @@ DecisionNode *DTBObliqueLS::buildRecursive(
         negClass = posClass;
 
         while ( posClass == negClass )
-        {
             negClass = rand() % (maxClassNo+1);
-        }
 
         yCur = y;
         XCur = X;
@@ -422,22 +418,22 @@ DecisionNode *DTBObliqueLS::buildRecursive(
     temp = XTXr * XCur.transpose();
 
     // Solve system of linear equations in a least squares manner
-    beta.multiply(temp,yCur,false);
+    params.multiply(temp,yCur,false);
 
     // Updating parameter vector in convolutional feature
-    f->setParameterVector( beta );
+    f->setParameterVector( params );
 
     // Feature Values
     values.clear();
     f->calcFeatureValues( examples, examples_selection, values);
 
     // complete search for threshold
-    findBestSplitThreshold ( values, bestSplitInfo, beta, e, maxClassNo );
+    findBestSplitThreshold ( values, bestSplitInfo, params, e, maxClassNo );
 
 //    f->setRandomParameterVector();
-//    beta = f->getParameterVector();
+//    params = f->getParameterVector();
 //    f->calcFeatureValues( examples, examples_selection, values);
-//    findBestSplitThreshold ( values, bestSplitInfo, beta, e, maxClassNo );
+//    findBestSplitThreshold ( values, bestSplitInfo, params, e, maxClassNo );
 
     // supress strange behaviour for values near zero (8.88178e-16)
     if (bestSplitInfo.entropyLeft < 1.0e-10 ) bestSplitInfo.entropyLeft = 0.0;

+ 1 - 1
classifier/fpclassifier/randomforest/DTBObliqueLS.h

@@ -132,7 +132,7 @@ class DTBObliqueLS : public DecisionTreeBuilder
     void findBestSplitThreshold (
             FeatureValuesUnsorted & values,
             SplitInfo & bestSplitInfo,
-            const NICE::Vector & beta,
+            const NICE::Vector & params,
             const double & e,
             const int & maxClassNo );