|
@@ -93,6 +93,61 @@ bool DTBOblique::entropyLeftRight (
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
+void DTBOblique::adaptDataAndLabelForMultiClass (
|
|
|
+ const int posClass,
|
|
|
+ const int negClass,
|
|
|
+ NICE::Matrix & matX,
|
|
|
+ NICE::Vector & vecY,
|
|
|
+ bool & posHasExamples,
|
|
|
+ bool & negHasExamples )
|
|
|
+{
|
|
|
+ int posCount = 0;
|
|
|
+ int negCount = 0;
|
|
|
+ int outCount = 0;
|
|
|
+
|
|
|
+ // One-vs-one: Transforming into {-1,0,+1} problem
|
|
|
+ if ( useOneVsOne )
|
|
|
+ for ( int i = 0; i < vecY.size(); i++ )
|
|
|
+ {
|
|
|
+ if ( vecY[i] == posClass )
|
|
|
+ {
|
|
|
+ vecY[i] = 1.0;
|
|
|
+ posHasExamples = true;
|
|
|
+ posCount++;
|
|
|
+ }
|
|
|
+ else if ( vecY[i] == negClass )
|
|
|
+ {
|
|
|
+ vecY[i] = -1.0;
|
|
|
+ negHasExamples = true;
|
|
|
+ negCount++;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ vecY[i] = 0.0;
|
|
|
+ matX.setRow( i, NICE::Vector( matX.cols(), 0.0 ) );
|
|
|
+ outCount++;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // One-vs-all: Transforming into {-1,+1} problem
|
|
|
+ else
|
|
|
+ for ( int i = 0; i < vecY.size(); i++ )
|
|
|
+ {
|
|
|
+ if ( vecY[i] == posClass )
|
|
|
+ {
|
|
|
+ vecY[i] = 1.0;
|
|
|
+ posHasExamples = true;
|
|
|
+ posCount++;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ vecY[i] = -1.0;
|
|
|
+ negHasExamples = true;
|
|
|
+ negCount++;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
/** refresh data matrix X and label vector y */
|
|
|
void DTBOblique::getDataAndLabel(
|
|
|
const FeaturePool &fp,
|
|
@@ -118,7 +173,7 @@ void DTBOblique::getDataAndLabel(
|
|
|
|
|
|
NICE::Vector pixelRepr = f->getFeatureVector( &ce );
|
|
|
|
|
|
- double label = p.first * ce.weight;
|
|
|
+ double label = p.first;// * ce.weight;
|
|
|
pixelRepr *= ce.weight;
|
|
|
|
|
|
y.set( matIndex, label );
|
|
@@ -315,98 +370,43 @@ DecisionNode *DTBOblique::buildRecursive(
|
|
|
bestSplitInfo.entropyLeft = 0.0;
|
|
|
bestSplitInfo.entropyRight = 0.0;
|
|
|
|
|
|
-// double best_threshold = 0.0;
|
|
|
-// double best_ig = -1.0;
|
|
|
-// double *best_distribution_left = new double [maxClassNo+1];
|
|
|
-// double *best_distribution_right = new double [maxClassNo+1];
|
|
|
-// double best_entropy_left = 0.0;
|
|
|
-// double best_entropy_right = 0.0;
|
|
|
-
|
|
|
ConvolutionFeature *f = (ConvolutionFeature*)fp.begin()->second;
|
|
|
bestSplitInfo.params = f->getParameterVector();
|
|
|
|
|
|
// Creating data matrix X and label vector y
|
|
|
- NICE::Matrix X, XTXr, G, temp;
|
|
|
+ NICE::Matrix X;
|
|
|
NICE::Vector y, beta;
|
|
|
getDataAndLabel( fp, examples, examples_selection, X, y );
|
|
|
|
|
|
- // Preparing system of linear equations
|
|
|
- regularizeDataMatrix( X, XTXr, regularizationType, lambdaCurrent );
|
|
|
- choleskyDecomp(XTXr, G);
|
|
|
- choleskyInvert(G, XTXr);
|
|
|
- temp = XTXr * X.transpose();
|
|
|
-
|
|
|
-
|
|
|
- if ( useOneVsOne )
|
|
|
+ // Transforming into multi-class problem
|
|
|
+ for ( int posClass = 0; posClass <= maxClassNo; posClass++ )
|
|
|
{
|
|
|
- // One-vs-one: Transforming into {-1,0,+1} problem
|
|
|
- for ( int curClass = 0; curClass <= maxClassNo; curClass++ )
|
|
|
- for ( int opClass = 0; opClass <= maxClassNo; opClass++ )
|
|
|
- {
|
|
|
- if ( curClass == opClass ) continue;
|
|
|
-
|
|
|
- NICE::Vector yCur ( y.size(), 0.0 );
|
|
|
- int idx = 0;
|
|
|
- bool curHasExamples = false;
|
|
|
- bool opHasExamples = false;
|
|
|
-
|
|
|
- for ( vector<int>::const_iterator si = examples_selection.begin();
|
|
|
- si != examples_selection.end();
|
|
|
- si++, idx++ )
|
|
|
- {
|
|
|
- const pair<int, Example> & p = examples[*si];
|
|
|
- if ( p.first == curClass )
|
|
|
- {
|
|
|
- yCur.set( idx, 1.0 );
|
|
|
- curHasExamples = true;
|
|
|
- }
|
|
|
- else if ( p.first == opClass )
|
|
|
- {
|
|
|
- yCur.set( idx, -1.0 );
|
|
|
- opHasExamples = true;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // are there positive examples for current and opposition class in current set?
|
|
|
- if ( !curHasExamples || !opHasExamples ) continue;
|
|
|
-
|
|
|
- // Solve system of linear equations in a least squares manner
|
|
|
- beta.multiply(temp,yCur,false);
|
|
|
-
|
|
|
- // Updating parameter vector in convolutional feature
|
|
|
- f->setParameterVector( beta );
|
|
|
-
|
|
|
- // Feature Values
|
|
|
- values.clear();
|
|
|
- f->calcFeatureValues( examples, examples_selection, values);
|
|
|
-
|
|
|
- // complete search for threshold
|
|
|
- findBestSplitThreshold ( values, bestSplitInfo, beta, e,
|
|
|
- maxClassNo );
|
|
|
- }
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- // One-vs-all: Transforming into {-1,+1} problem
|
|
|
- for ( int curClass = 0; curClass <= maxClassNo; curClass++ )
|
|
|
+ bool gotInnerIteration = false;
|
|
|
+ for ( int negClass = 0; negClass <= maxClassNo; negClass++ )
|
|
|
{
|
|
|
- NICE::Vector yCur ( y.size(), -1.0 );
|
|
|
- int idx = 0;
|
|
|
- bool hasExamples = false;
|
|
|
- for ( vector<int>::const_iterator si = examples_selection.begin();
|
|
|
- si != examples_selection.end();
|
|
|
- si++, idx++ )
|
|
|
- {
|
|
|
- const pair<int, Example> & p = examples[*si];
|
|
|
- if ( p.first == curClass )
|
|
|
- {
|
|
|
- yCur.set( idx, 1.0 );
|
|
|
- hasExamples = true;
|
|
|
- }
|
|
|
- }
|
|
|
+ if ( posClass == negClass ) continue;
|
|
|
+
|
|
|
+ NICE::Vector yCur = y;
|
|
|
+ NICE::Matrix XCur = X;
|
|
|
+
|
|
|
+ bool posHasExamples = false;
|
|
|
+ bool negHasExamples = false;
|
|
|
|
|
|
- // is there a positive example for current class in current set?
|
|
|
- if (!hasExamples) continue;
|
|
|
+ adaptDataAndLabelForMultiClass(
|
|
|
+ posClass, negClass, XCur, yCur, posHasExamples, negHasExamples );
|
|
|
+
|
|
|
+ // are there examples for positive and negative class?
|
|
|
+ if ( !posHasExamples || !negHasExamples ) continue;
|
|
|
+
|
|
|
+ // one-vs-all setting: only one iteration for inner loop
|
|
|
+ if ( !useOneVsOne && gotInnerIteration ) continue;
|
|
|
+
|
|
|
+ // Preparing system of linear equations
|
|
|
+ NICE::Matrix XTXr, G, temp;
|
|
|
+ regularizeDataMatrix( XCur, XTXr, regularizationType, lambdaCurrent );
|
|
|
+ choleskyDecomp(XTXr, G);
|
|
|
+ choleskyInvert(G, XTXr);
|
|
|
+ temp = XTXr * XCur.transpose();
|
|
|
|
|
|
// Solve system of linear equations in a least squares manner
|
|
|
beta.multiply(temp,yCur,false);
|
|
@@ -421,10 +421,10 @@ DecisionNode *DTBOblique::buildRecursive(
|
|
|
// complete search for threshold
|
|
|
findBestSplitThreshold ( values, bestSplitInfo, beta, e, maxClassNo );
|
|
|
|
|
|
+ gotInnerIteration = true;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-
|
|
|
// supress strange behaviour for values near zero (8.88178e-16)
|
|
|
if (bestSplitInfo.entropyLeft < 1.0e-10 ) bestSplitInfo.entropyLeft = 0.0;
|
|
|
if (bestSplitInfo.entropyRight < 1.0e-10 ) bestSplitInfo.entropyRight = 0.0;
|