|
@@ -93,17 +93,14 @@ bool DTBOblique::entropyLeftRight (
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
-void DTBOblique::adaptDataAndLabelForMultiClass (
|
|
|
+bool DTBOblique::adaptDataAndLabelForMultiClass (
|
|
|
const int posClass,
|
|
|
const int negClass,
|
|
|
NICE::Matrix & matX,
|
|
|
- NICE::Vector & vecY,
|
|
|
- bool & posHasExamples,
|
|
|
- bool & negHasExamples )
|
|
|
+ NICE::Vector & vecY )
|
|
|
{
|
|
|
- int posCount = 0;
|
|
|
- int negCount = 0;
|
|
|
- int outCount = 0;
|
|
|
+ bool posHasExamples = false;
|
|
|
+ bool negHasExamples = false;
|
|
|
|
|
|
// One-vs-one: Transforming into {-1,0,+1} problem
|
|
|
if ( useOneVsOne )
|
|
@@ -113,19 +110,16 @@ void DTBOblique::adaptDataAndLabelForMultiClass (
|
|
|
{
|
|
|
vecY[i] = 1.0;
|
|
|
posHasExamples = true;
|
|
|
- posCount++;
|
|
|
}
|
|
|
else if ( vecY[i] == negClass )
|
|
|
{
|
|
|
vecY[i] = -1.0;
|
|
|
negHasExamples = true;
|
|
|
- negCount++;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
vecY[i] = 0.0;
|
|
|
matX.setRow( i, NICE::Vector( matX.cols(), 0.0 ) );
|
|
|
- outCount++;
|
|
|
}
|
|
|
}
|
|
|
// One-vs-all: Transforming into {-1,+1} problem
|
|
@@ -136,16 +130,18 @@ void DTBOblique::adaptDataAndLabelForMultiClass (
|
|
|
{
|
|
|
vecY[i] = 1.0;
|
|
|
posHasExamples = true;
|
|
|
- posCount++;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
vecY[i] = -1.0;
|
|
|
negHasExamples = true;
|
|
|
- negCount++;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ if ( posHasExamples && negHasExamples )
|
|
|
+ return true;
|
|
|
+ else
|
|
|
+ return false;
|
|
|
}
|
|
|
|
|
|
/** refresh data matrix X and label vector y */
|
|
@@ -154,7 +150,8 @@ void DTBOblique::getDataAndLabel(
|
|
|
const Examples &examples,
|
|
|
const std::vector<int> &examples_selection,
|
|
|
NICE::Matrix & matX,
|
|
|
- NICE::Vector & vecY )
|
|
|
+ NICE::Vector & vecY,
|
|
|
+ NICE::Vector & weights )
|
|
|
{
|
|
|
ConvolutionFeature *f = (ConvolutionFeature*)fp.begin()->second;
|
|
|
int amountParams = f->getParameterLength();
|
|
@@ -162,6 +159,7 @@ void DTBOblique::getDataAndLabel(
|
|
|
|
|
|
NICE::Matrix X(amountExamples, amountParams, 0.0 );
|
|
|
NICE::Vector y(amountExamples, 0.0);
|
|
|
+ NICE::Vector w(amountExamples, 1.0);
|
|
|
|
|
|
int matIndex = 0;
|
|
|
for ( vector<int>::const_iterator si = examples_selection.begin();
|
|
@@ -173,17 +171,19 @@ void DTBOblique::getDataAndLabel(
|
|
|
|
|
|
NICE::Vector pixelRepr = f->getFeatureVector( &ce );
|
|
|
|
|
|
- double label = p.first;// * ce.weight;
|
|
|
+ double label = p.first;
|
|
|
pixelRepr *= ce.weight;
|
|
|
|
|
|
- y.set( matIndex, label );
|
|
|
- X.setRow(matIndex,pixelRepr);
|
|
|
+ w.set ( matIndex, ce.weight );
|
|
|
+ y.set ( matIndex, label );
|
|
|
+ X.setRow ( matIndex, pixelRepr );
|
|
|
|
|
|
matIndex++;
|
|
|
}
|
|
|
|
|
|
matX = X;
|
|
|
vecY = y;
|
|
|
+ weights = w;
|
|
|
}
|
|
|
|
|
|
void DTBOblique::regularizeDataMatrix(
|
|
@@ -375,8 +375,8 @@ DecisionNode *DTBOblique::buildRecursive(
|
|
|
|
|
|
// Creating data matrix X and label vector y
|
|
|
NICE::Matrix X;
|
|
|
- NICE::Vector y, beta;
|
|
|
- getDataAndLabel( fp, examples, examples_selection, X, y );
|
|
|
+ NICE::Vector y, beta, weights;
|
|
|
+ getDataAndLabel( fp, examples, examples_selection, X, y, weights );
|
|
|
|
|
|
// Transforming into multi-class problem
|
|
|
for ( int posClass = 0; posClass <= maxClassNo; posClass++ )
|
|
@@ -389,14 +389,13 @@ DecisionNode *DTBOblique::buildRecursive(
|
|
|
NICE::Vector yCur = y;
|
|
|
NICE::Matrix XCur = X;
|
|
|
|
|
|
- bool posHasExamples = false;
|
|
|
- bool negHasExamples = false;
|
|
|
+ bool hasExamples = adaptDataAndLabelForMultiClass(
|
|
|
+ posClass, negClass, XCur, yCur );
|
|
|
|
|
|
- adaptDataAndLabelForMultiClass(
|
|
|
- posClass, negClass, XCur, yCur, posHasExamples, negHasExamples );
|
|
|
+ yCur *= weights;
|
|
|
|
|
|
// are there examples for positive and negative class?
|
|
|
- if ( !posHasExamples || !negHasExamples ) continue;
|
|
|
+ if ( !hasExamples ) continue;
|
|
|
|
|
|
// one-vs-all setting: only one iteration for inner loop
|
|
|
if ( !useOneVsOne && gotInnerIteration ) continue;
|
|
@@ -465,7 +464,7 @@ DecisionNode *DTBOblique::buildRecursive(
|
|
|
}
|
|
|
|
|
|
#ifdef DEBUGTREE
|
|
|
- node->f->store( std::cerr );
|
|
|
+// node->f->store( std::cerr );
|
|
|
std::cerr << std::endl;
|
|
|
std::cerr << "DTBOblique: Information Gain: " << bestSplitInfo.informationGain
|
|
|
<< ", Left Entropy: " << bestSplitInfo.entropyLeft << ", Right Entropy: "
|
|
@@ -482,10 +481,10 @@ DecisionNode *DTBOblique::buildRecursive(
|
|
|
distribution_left_sparse[k] = l;
|
|
|
if ( r != 0 )
|
|
|
distribution_right_sparse[k] = r;
|
|
|
-#ifdef DEBUGTREE
|
|
|
- std::cerr << "DTBOblique: Split of Class " << k << " ("
|
|
|
- << l << " <-> " << r << ") " << std::endl;
|
|
|
-#endif
|
|
|
+//#ifdef DEBUGTREE
|
|
|
+// std::cerr << "DTBOblique: Split of Class " << k << " ("
|
|
|
+// << l << " <-> " << r << ") " << std::endl;
|
|
|
+//#endif
|
|
|
}
|
|
|
|
|
|
//TODO
|