Quellcode durchsuchen

DTBOblique: fixed wrong var type

Sven Sickert vor 10 Jahren
Ursprung
Commit
db08e82483

+ 24 - 18
classifier/fpclassifier/randomforest/DTBOblique.cpp

@@ -23,15 +23,19 @@ using namespace NICE;
 
 DTBOblique::DTBOblique ( const Config *conf, string section )
 {
-    split_steps = conf->gI(section, "split_steps", 20 );
-    max_depth = conf->gI(section, "max_depth", 10 );
-    minimum_information_gain = conf->gD(section, "minimum_information_gain", 0.0000001 );
-    minimum_entropy = conf->gD(section, "minimum_entropy", 0.00001 );
-    use_shannon_entropy = conf->gB(section, "use_shannon_entropy", false );
-    min_examples = conf->gI(section, "min_examples", 50);
-    save_indices = conf->gB(section, "save_indices", false);
-    lambdaInit = conf->gD(section, "lambda_init", 0.5 );
-    regularizationType = conf->gI(section, "regularization_type", 1 );
+    saveIndices = conf->gB( section, "save_indices", false);
+    useShannonEntropy = conf->gB( section, "use_shannon_entropy", false );
+    useOneVsOne = conf->gB( section, "use_one_vs_one", false );
+
+    splitSteps = conf->gI( section, "split_steps", 20 );
+    maxDepth = conf->gI( section, "max_depth", 10 );
+    minExamples = conf->gI( section, "min_examples", 50);
+    regularizationType = conf->gI( section, "regularization_type", 1 );
+
+    minimumEntropy = conf->gD( section, "minimum_entropy", 10e-5 );
+    minimumInformationGain = conf->gD( section, "minimum_information_gain", 10e-7 );
+    lambdaInit = conf->gD( section, "lambda_init", 0.5 );
+
 }
 
 DTBOblique::~DTBOblique()
@@ -219,10 +223,10 @@ DecisionNode *DTBOblique::buildRecursive(
     DecisionNode *node = new DecisionNode ();
     node->distribution = distribution;
 
-    // stop criteria: max_depth, min_examples, min_entropy
-    if (    ( e <= minimum_entropy )
-         || ( (int)examples_selection.size() < min_examples )
-         || ( depth > max_depth ) )
+    // stop criteria: maxDepth, minExamples, min_entropy
+    if (    ( e <= minimumEntropy )
+         || ( (int)examples_selection.size() < minExamples )
+         || ( depth > maxDepth ) )
 
     {
 #ifdef DEBUGTREE
@@ -285,6 +289,8 @@ DecisionNode *DTBOblique::buildRecursive(
             }
         }
 
+        // TODO: One-vs-one: Transforming into {-1,0,+1} problem
+
         // is there a positive example for current class in current set?
         if (!hasExamples) continue;
 
@@ -304,10 +310,10 @@ DecisionNode *DTBOblique::buildRecursive(
         if ( maxValue - minValue < 1e-7 )
             std::cerr << "DTBOblique: Difference between min and max of features values to small!" << std::endl;
 
-        // get best thresholds by complete search
-        for ( int i = 0; i < split_steps; i++ )
+        // get best thresholds using complete search
+        for ( int i = 0; i < splitSteps; i++ )
         {
-            double threshold = (i * (maxValue - minValue ) / (double)split_steps)
+            double threshold = (i * (maxValue - minValue ) / (double)splitSteps)
                                 + minValue;
             // preparations
             double el, er;
@@ -330,7 +336,7 @@ DecisionNode *DTBOblique::buildRecursive(
             double pl = (count_left) / (count_left + count_right);
             double ig = e - pl*el - (1-pl)*er;
 
-            if ( use_shannon_entropy )
+            if ( useShannonEntropy )
             {
                 double esplit = - ( pl*log(pl) + (1-pl)*log(1-pl) );
                 ig = 2*ig / ( e + esplit );
@@ -362,7 +368,7 @@ DecisionNode *DTBOblique::buildRecursive(
     delete [] distribution_right;
 
     // stop criteria: minimum information gain
-    if ( best_ig < minimum_information_gain )
+    if ( best_ig < minimumInformationGain )
     {
 #ifdef DEBUGTREE
         std::cerr << "DTBOblique: Minimum information gain reached!" << std::endl;

+ 17 - 14
classifier/fpclassifier/randomforest/DTBOblique.h

@@ -29,33 +29,36 @@ class DTBOblique : public DecisionTreeBuilder
     /////////////////////////
     /////////////////////////
 
+    /** Whether to use shannon entropy or not */
+    bool useShannonEntropy;
+
+    /** Whether to save indices in leaves or not */
+    bool saveIndices;
+
+    /** Whether to use one-vs-one or one-vs-all for multiclass scenarios */
+    bool useOneVsOne;
+
     /** Amount of steps for complete search for best threshold */
-    int split_steps;
+    int splitSteps;
 
     /** Maximum allowed depth of a tree */
-    int max_depth;
+    int maxDepth;
 
     /* Minimum amount of features in a leaf node */
-    int min_examples;
+    int minExamples;
+
+    /** Regularization type */
+    int regularizationType;
 
     /** Minimum entropy to continue with splitting */
-    int minimum_entropy;
+    double minimumEntropy;
 
     /** Minimum information gain to continue with splitting */
-    int minimum_information_gain;
-
-    /** Whether to use shannon entropy or not */
-    bool use_shannon_entropy;
-
-    /** Whether to save indices in leaves or not */
-    bool save_indices;
+    double minimumInformationGain;
 
     /** Regularization parameter */
     double lambdaInit;
 
-    /** Regularization type */
-    int regularizationType;
-
     /////////////////////////
     /////////////////////////
     //  PROTECTED METHODS  //