|
@@ -16,7 +16,6 @@
|
|
|
using namespace OBJREC;
|
|
|
|
|
|
#define DEBUGTREE
|
|
|
-#undef DETAILTREE
|
|
|
|
|
|
|
|
|
using namespace std;
|
|
@@ -31,7 +30,7 @@ DTBRandomOblique::DTBRandomOblique ( const Config *conf, string section )
|
|
|
use_shannon_entropy = conf->gB(section, "use_shannon_entropy", false );
|
|
|
min_examples = conf->gI(section, "min_examples", 50);
|
|
|
save_indices = conf->gB(section, "save_indices", false);
|
|
|
- lambda = conf->gD(section, "lambda", 0.5 );
|
|
|
+ lambdaInit = conf->gD(section, "lambdaInit", 0.5 );
|
|
|
}
|
|
|
|
|
|
DTBRandomOblique::~DTBRandomOblique()
|
|
@@ -140,7 +139,8 @@ DecisionNode *DTBRandomOblique::buildRecursive(
|
|
|
FullVector & distribution,
|
|
|
double e,
|
|
|
int maxClassNo,
|
|
|
- int depth)
|
|
|
+ int depth,
|
|
|
+ double lambdaCurrent )
|
|
|
{
|
|
|
|
|
|
#ifdef DEBUGTREE
|
|
@@ -166,19 +166,17 @@ DecisionNode *DTBRandomOblique::buildRecursive(
|
|
|
}
|
|
|
|
|
|
// refresh/set X and y
|
|
|
- NICE::Matrix X;
|
|
|
- NICE::Vector y;
|
|
|
+ NICE::Matrix X, G;
|
|
|
+ NICE::Vector y, beta;
|
|
|
getDataAndLabel( fp, examples, examples_selection, X, y );
|
|
|
- NICE::Matrix XTX = X.transpose()*X;
|
|
|
- XTX.addDiagonal ( NICE::Vector( XTX.rows(), lambda) );
|
|
|
|
|
|
- NICE::Matrix G;
|
|
|
- NICE::Vector beta;
|
|
|
+ // least squares solution
|
|
|
+ NICE::Matrix XTX = X.transpose()*X;
|
|
|
+ XTX.addDiagonal ( NICE::Vector( XTX.rows(), lambdaCurrent) );
|
|
|
choleskyDecomp(XTX, G);
|
|
|
choleskyInvert(G, XTX);
|
|
|
NICE::Matrix temp = XTX * X.transpose();
|
|
|
beta.multiply(temp,y,false);
|
|
|
-// choleskySolve(G, y, beta );
|
|
|
|
|
|
// variables
|
|
|
double best_threshold = 0.0;
|
|
@@ -205,7 +203,7 @@ DecisionNode *DTBRandomOblique::buildRecursive(
|
|
|
if ( maxValue - minValue < 1e-7 )
|
|
|
std::cerr << "DTBRandomOblique: Difference between min and max of features values to small!" << std::endl;
|
|
|
|
|
|
- // randomly chosen thresholds
|
|
|
+ // get best thresholds by complete search
|
|
|
for ( int i = 0; i < random_split_tests; i++ )
|
|
|
{
|
|
|
double threshold = (i * (maxValue - minValue ) / (double)random_split_tests)
|
|
@@ -237,11 +235,6 @@ DecisionNode *DTBRandomOblique::buildRecursive(
|
|
|
ig = 2*ig / ( e + esplit );
|
|
|
}
|
|
|
|
|
|
-#ifdef DETAILTREE
|
|
|
- std::cerr << "Testing split #" << i << ": t=" << threshold
|
|
|
- << " ig=" << ig << std::endl;
|
|
|
-#endif
|
|
|
-
|
|
|
if ( ig > best_ig )
|
|
|
{
|
|
|
best_ig = ig;
|
|
@@ -323,15 +316,27 @@ DecisionNode *DTBRandomOblique::buildRecursive(
|
|
|
delete [] best_distribution_left;
|
|
|
delete [] best_distribution_right;
|
|
|
|
|
|
+ // update lambda by heuristic [Laptev/Buhmann, 2014]
|
|
|
+ double lambdaLeft = lambdaCurrent *
|
|
|
+ pow(((double)examples_selection.size()/(double)examples_left.size()),(2./f->getParameterLength()));
|
|
|
+ double lambdaRight = lambdaCurrent *
|
|
|
+ pow(((double)examples_selection.size()/(double)examples_right.size()),(2./f->getParameterLength()));
|
|
|
+
|
|
|
+#ifdef DEBUGTREE
|
|
|
+ std::cerr << "regularization parameter lambda: left " << lambdaLeft
|
|
|
+ << " right " << lambdaRight << std::endl;
|
|
|
+
|
|
|
+#endif
|
|
|
+
|
|
|
/** Recursion */
|
|
|
// left child
|
|
|
node->left = buildRecursive ( fp, examples, examples_left,
|
|
|
distribution_left_sparse, best_entropy_left,
|
|
|
- maxClassNo, depth+1 );
|
|
|
+ maxClassNo, depth+1, lambdaLeft );
|
|
|
// right child
|
|
|
node->right = buildRecursive ( fp, examples, examples_right,
|
|
|
distribution_right_sparse, best_entropy_right,
|
|
|
- maxClassNo, depth+1 );
|
|
|
+ maxClassNo, depth+1, lambdaRight );
|
|
|
|
|
|
return node;
|
|
|
}
|
|
@@ -369,5 +374,6 @@ DecisionNode *DTBRandomOblique::build ( const FeaturePool & fp,
|
|
|
entropy /= sum;
|
|
|
entropy += log(sum);
|
|
|
|
|
|
- return buildRecursive ( fp, examples, all, distribution, entropy, maxClassNo, 0 );
|
|
|
+ return buildRecursive ( fp, examples, all, distribution,
|
|
|
+ entropy, maxClassNo, 0, lambdaInit );
|
|
|
}
|