|
@@ -7,7 +7,6 @@
|
|
|
|
|
|
#include "vislearning/cbaselib/CachedExample.h"
|
|
|
#include "vislearning/cbaselib/PascalResults.h"
|
|
|
-//#include "vislearning/baselib/ColorSpace.h"
|
|
|
#include "vislearning/baselib/cc.h"
|
|
|
#include "segmentation/RSMeanShift.h"
|
|
|
#include "segmentation/RSGraphBased.h"
|
|
@@ -19,12 +18,16 @@
|
|
|
|
|
|
#include "core/basics/Timer.h"
|
|
|
#include "core/basics/vectorio.h"
|
|
|
+#include "core/basics/quadruplet.h"
|
|
|
#include "core/image/FilterT.h"
|
|
|
|
|
|
#include <omp.h>
|
|
|
#include <iostream>
|
|
|
|
|
|
-//#define DEBUG
|
|
|
+#define VERBOSE
|
|
|
+#undef DEBUG
|
|
|
+#undef VISUALIZE
|
|
|
+#undef WRITEREGIONS
|
|
|
|
|
|
using namespace OBJREC;
|
|
|
using namespace std;
|
|
@@ -34,7 +37,10 @@ SemSegContextTree::SemSegContextTree ( const Config *conf, const MultiDataset *m
|
|
|
: SemanticSegmentation ( conf, & ( md->getClassNames ( "train" ) ) )
|
|
|
{
|
|
|
this->conf = conf;
|
|
|
+
|
|
|
string section = "SSContextTree";
|
|
|
+ string featsec = "Features";
|
|
|
+
|
|
|
lfcw = new LFColorWeijer ( conf );
|
|
|
firstiteration = true;
|
|
|
|
|
@@ -44,12 +50,16 @@ SemSegContextTree::SemSegContextTree ( const Config *conf, const MultiDataset *m
|
|
|
|
|
|
maxDepth = conf->gI ( section, "max_depth", 10 );
|
|
|
|
|
|
- windowSize = conf->gI ( section, "window_size", 16 );
|
|
|
+ windowSize = conf->gI ( section, "window_size", 15 );
|
|
|
+
|
|
|
+ contextMultiplier = conf->gI ( section, "context_multiplier", 3 );
|
|
|
|
|
|
featsPerSplit = conf->gI ( section, "feats_per_split", 200 );
|
|
|
|
|
|
useShannonEntropy = conf->gB ( section, "use_shannon_entropy", true );
|
|
|
|
|
|
+ ftypes = conf->gI ( section, "feat_types", 5 );
|
|
|
+
|
|
|
nbTrees = conf->gI ( section, "amount_trees", 1 );
|
|
|
|
|
|
string segmentationtype = conf->gS ( section, "segmentation_type", "slic" );
|
|
@@ -59,20 +69,16 @@ SemSegContextTree::SemSegContextTree ( const Config *conf, const MultiDataset *m
|
|
|
cndir = conf->gS ( "SSContextTree", "cndir", "" );
|
|
|
|
|
|
if ( useCategorization && cndir == "" )
|
|
|
- {
|
|
|
fasthik = new GPHIKClassifier ( conf );
|
|
|
- }
|
|
|
else
|
|
|
- {
|
|
|
fasthik = NULL;
|
|
|
- }
|
|
|
|
|
|
randomTests = conf->gI ( section, "random_tests", 10 );
|
|
|
|
|
|
saveLoadData = conf->gB ( "debug", "save_load_data", false );
|
|
|
fileLocation = conf->gS ( "debug", "datafile", "tmp.txt" );
|
|
|
|
|
|
- useRegionFeature = conf->gB ( section, "use_region_feat", false );
|
|
|
+ useRegionFeature = conf->gB ( featsec, "use_region_feat", false );
|
|
|
pixelWiseLabeling = conf->gB ( section, "pixelWiseLabeling", false );
|
|
|
|
|
|
if ( segmentationtype == "meanshift" )
|
|
@@ -90,10 +96,6 @@ SemSegContextTree::SemSegContextTree ( const Config *conf, const MultiDataset *m
|
|
|
else
|
|
|
throw ( "no valid segmenation_type\n please choose between none, meanshift, slic and felzenszwalb\n" );
|
|
|
|
|
|
- ftypes = conf->gI ( section, "feat_types", 5 );
|
|
|
-
|
|
|
- string featsec = "Features";
|
|
|
-
|
|
|
// feature extraction prototypes
|
|
|
vector<Operation*> tops0, tops1, tops2;
|
|
|
|
|
@@ -153,13 +155,6 @@ SemSegContextTree::SemSegContextTree ( const Config *conf, const MultiDataset *m
|
|
|
|
|
|
// geometric features of hoiem
|
|
|
useHoiemFeatures = conf->gB ( featsec, "use_hoiem_features", false );
|
|
|
- if ( useHoiemFeatures )
|
|
|
- {
|
|
|
- hoiemDirectory = conf->gS ( featsec, "hoiem_directory" );
|
|
|
- }
|
|
|
-
|
|
|
- opOverview = vector<int> ( NBOPERATIONS, 0 );
|
|
|
- contextOverview = vector<vector<double> > ( maxDepth, vector<double> ( 2, 0.0 ) );
|
|
|
|
|
|
classnames = md->getClassNames ( "train" );
|
|
|
|
|
@@ -197,7 +192,7 @@ double SemSegContextTree::getBestSplit (
|
|
|
splitop = NULL;
|
|
|
splitval = -1.0;
|
|
|
|
|
|
- set<vector<int> >selFeats;
|
|
|
+ vector<quadruplet<int,int,int,int> > selFeats;
|
|
|
map<int, int> e;
|
|
|
int featcounter = forest[tree][node].featcounter;
|
|
|
|
|
@@ -225,9 +220,7 @@ double SemSegContextTree::getBestSplit (
|
|
|
int zsize = ( int ) nodeIndices[iCounter].depth();
|
|
|
|
|
|
for ( int x = 0; x < xsize; x++ )
|
|
|
- {
|
|
|
for ( int y = 0; y < ysize; y++ )
|
|
|
- {
|
|
|
for ( int z = 0; z < zsize; z++ )
|
|
|
{
|
|
|
if ( nodeIndices[iCounter].get ( x, y, z, tree ) == node )
|
|
@@ -240,40 +233,28 @@ double SemSegContextTree::getBestSplit (
|
|
|
|
|
|
if ( randD < fraction[labelmap[cn]] )
|
|
|
{
|
|
|
- vector<int> tmp ( 4, 0 );
|
|
|
- tmp[0] = iCounter;
|
|
|
- tmp[1] = x;
|
|
|
- tmp[2] = y;
|
|
|
- tmp[3] = z;
|
|
|
+ quadruplet<int,int,int,int> quad( iCounter, x, y, z );
|
|
|
featcounter++;
|
|
|
- selFeats.insert ( tmp );
|
|
|
+ selFeats.push_back ( quad );
|
|
|
e[cn]++;
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
- }
|
|
|
- }
|
|
|
}
|
|
|
|
|
|
- map<int, int>::iterator mapit;
|
|
|
-
|
|
|
// global entropy
|
|
|
double globent = 0.0;
|
|
|
-
|
|
|
- for ( mapit = e.begin() ; mapit != e.end(); mapit++ )
|
|
|
+ for ( map<int, int>::iterator mapit = e.begin() ; mapit != e.end(); mapit++ )
|
|
|
{
|
|
|
double p = ( double ) ( *mapit ).second / ( double ) featcounter;
|
|
|
globent += p * log2 ( p );
|
|
|
}
|
|
|
-
|
|
|
globent = -globent;
|
|
|
|
|
|
if ( globent < 0.5 )
|
|
|
- {
|
|
|
return 0.0;
|
|
|
- }
|
|
|
|
|
|
- // pointers to all selected features
|
|
|
+ // pointers to all randomly chosen features
|
|
|
std::vector<Operation*> featsel;
|
|
|
|
|
|
for ( int i = 0; i < featsPerSplit; i++ )
|
|
@@ -294,7 +275,7 @@ double SemSegContextTree::getBestSplit (
|
|
|
if ( ft > 1 )
|
|
|
{
|
|
|
//use larger window size for context features
|
|
|
- tmpws *= 3;
|
|
|
+ tmpws *= contextMultiplier;
|
|
|
}
|
|
|
|
|
|
// use region feature only with reasonable pre-segmentation
|
|
@@ -320,7 +301,7 @@ double SemSegContextTree::getBestSplit (
|
|
|
z1 = -abs(z1);
|
|
|
z2 = -abs(z2);
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
/* random feature maps (channels) */
|
|
|
int f1, f2;
|
|
|
f1 = ( int ) ( rand() % channelsPerType[ft].size() );
|
|
@@ -330,16 +311,16 @@ double SemSegContextTree::getBestSplit (
|
|
|
f2 = f1;
|
|
|
f1 = channelsPerType[ft][f1];
|
|
|
f2 = channelsPerType[ft][f2];
|
|
|
-
|
|
|
+
|
|
|
if ( ft == 1 )
|
|
|
{
|
|
|
int classes = ( int ) regionProbs[0][0].size();
|
|
|
f2 = ( int ) ( rand() % classes );
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
/* random extraction method (operation) */
|
|
|
int o = ( int ) ( rand() % ops[ft].size() );
|
|
|
-
|
|
|
+
|
|
|
Operation *op = ops[ft][o]->clone();
|
|
|
op->set ( x1, y1, z1, x2, y2, z2, f1, f2, ft );
|
|
|
|
|
@@ -359,25 +340,28 @@ double SemSegContextTree::getBestSplit (
|
|
|
|
|
|
double maxval = -numeric_limits<double>::max();
|
|
|
double minval = numeric_limits<double>::max();
|
|
|
- for ( set<vector<int> >::const_iterator it = selFeats.begin();
|
|
|
- it != selFeats.end(); it++ )
|
|
|
+ int counter = 0;
|
|
|
+ for ( vector<quadruplet<int,int,int,int> >::const_iterator it = selFeats.begin();
|
|
|
+ it != selFeats.end(); it++, counter++ )
|
|
|
{
|
|
|
Features feat;
|
|
|
- feat.feats = &feats[ ( *it ) [0]];
|
|
|
- feat.nIndices = &nodeIndices[ ( *it ) [0]];
|
|
|
+ feat.feats = &feats[ ( *it ).first ];
|
|
|
+ feat.nIndices = &nodeIndices[ ( *it ).first ];
|
|
|
feat.cTree = tree;
|
|
|
feat.tree = &forest[tree];
|
|
|
|
|
|
assert ( forest.size() > ( uint ) tree );
|
|
|
assert ( forest[tree][0].dist.size() > 0 );
|
|
|
|
|
|
- feat.rProbs = ®ionProbs[ ( *it ) [0]];
|
|
|
+ feat.rProbs = ®ionProbs[ ( *it ).first ];
|
|
|
|
|
|
- double val = featsel[f]->getVal ( feat, ( *it ) [1], ( *it ) [2], ( *it ) [3] );
|
|
|
+ double val = featsel[f]->getVal ( feat, ( *it ).second, ( *it ).third, ( *it ).fourth );
|
|
|
if ( !isfinite ( val ) )
|
|
|
{
|
|
|
- //cerr << "feat " << feat.feats->width() << " " << feat.feats->height() << " " << feat.feats->depth() << endl;
|
|
|
- //cerr << "non finite value " << val << " for " << featsel[f]->writeInfos() << endl << (*it) [1] << " " << (*it) [2] << " " << (*it) [3] << endl;
|
|
|
+#ifdef DEBUG
|
|
|
+ cerr << "feat " << feat.feats->width() << " " << feat.feats->height() << " " << feat.feats->depth() << endl;
|
|
|
+ cerr << "non finite value " << val << " for " << featsel[f]->writeInfos() << endl << (*it) [1] << " " << (*it) [2] << " " << (*it) [3] << endl;
|
|
|
+#endif
|
|
|
val = 0.0;
|
|
|
}
|
|
|
vals.push_back ( val );
|
|
@@ -391,19 +375,20 @@ double SemSegContextTree::getBestSplit (
|
|
|
// split values
|
|
|
for ( int run = 0 ; run < randomTests; run++ )
|
|
|
{
|
|
|
+ // choose threshold randomly
|
|
|
double sval = ( (double) rand() / (double) RAND_MAX*(maxval-minval) ) + minval;
|
|
|
|
|
|
map<int, int> eL, eR;
|
|
|
int counterL = 0, counterR = 0;
|
|
|
- int counter2 = 0;
|
|
|
+ counter = 0;
|
|
|
|
|
|
- for ( set<vector<int> >::const_iterator it2 = selFeats.begin();
|
|
|
- it2 != selFeats.end(); it2++, counter2++ )
|
|
|
+ for ( vector<quadruplet<int,int,int,int> >::const_iterator it2 = selFeats.begin();
|
|
|
+ it2 != selFeats.end(); it2++, counter++ )
|
|
|
{
|
|
|
- int cn = labels[ ( *it2 ) [0]].get ( ( *it2 ) [1], ( *it2 ) [2], ( *it2 ) [3] );
|
|
|
+ int cn = labels[ ( *it2 ).first ].get ( ( *it2 ).second, ( *it2 ).third, ( *it2 ).fourth );
|
|
|
//cout << "vals[counter2] " << vals[counter2] << " val: " << val << endl;
|
|
|
|
|
|
- if ( vals[counter2] < sval )
|
|
|
+ if ( vals[counter] < sval )
|
|
|
{
|
|
|
//left entropie:
|
|
|
eL[cn] = eL[cn] + 1;
|
|
@@ -418,14 +403,14 @@ double SemSegContextTree::getBestSplit (
|
|
|
}
|
|
|
|
|
|
double leftent = 0.0;
|
|
|
- for ( mapit = eL.begin() ; mapit != eL.end(); mapit++ )
|
|
|
+ for ( map<int, int>::iterator mapit = eL.begin() ; mapit != eL.end(); mapit++ )
|
|
|
{
|
|
|
double p = ( double ) ( *mapit ).second / ( double ) counterL;
|
|
|
leftent -= p * log2 ( p );
|
|
|
}
|
|
|
|
|
|
double rightent = 0.0;
|
|
|
- for ( mapit = eR.begin() ; mapit != eR.end(); mapit++ )
|
|
|
+ for ( map<int, int>::iterator mapit = eR.begin() ; mapit != eR.end(); mapit++ )
|
|
|
{
|
|
|
double p = ( double ) ( *mapit ).second / ( double ) counterR;
|
|
|
rightent -= p * log2 ( p );
|
|
@@ -462,7 +447,7 @@ double SemSegContextTree::getBestSplit (
|
|
|
}
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
- //cout << "globent: " << globent << " bestig " << bestig << " splitval: " << splitval << endl;
|
|
|
+ cout << "globent: " << globent << " bestig " << bestig << " splitval: " << splitval << endl;
|
|
|
#endif
|
|
|
return bestig;
|
|
|
}
|
|
@@ -617,7 +602,6 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
|
|
|
///////////////////////////// read input data /////////////////////////////////
|
|
|
///////////////////////////////////////////////////////////////////////////////
|
|
|
-
|
|
|
int depthCount = 0;
|
|
|
vector< string > filelist;
|
|
|
NICE::MultiChannelImageT<uchar> pixelLabels;
|
|
@@ -832,14 +816,10 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
a[i] /= ( double ) featcounter;
|
|
|
}
|
|
|
|
|
|
-#ifdef DEBUG
|
|
|
+#ifdef VERBOSE
|
|
|
+ cout << "\nDistribution:" << endl;
|
|
|
for ( int i = 0; i < ( int ) a.size(); i++ )
|
|
|
- {
|
|
|
- cout << "a[" << i << "]: " << a[i] << endl;
|
|
|
- }
|
|
|
-
|
|
|
- cout << "a.size: " << a.size() << endl;
|
|
|
-
|
|
|
+ cout << "class " << i << ": " << a[i] << endl;
|
|
|
#endif
|
|
|
|
|
|
depth = 0;
|
|
@@ -861,14 +841,13 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
vector<int> startnode ( nbTrees, 0 );
|
|
|
|
|
|
bool noNewSplit = false;
|
|
|
- //int baseFeatSize = allfeats[0].size();
|
|
|
|
|
|
timer.stop();
|
|
|
- cerr << "preprocessing finished in: " << timer.getLastAbsolute() << " seconds" << endl;
|
|
|
- timer.start();
|
|
|
+ cout << "\nTime for Pre-Processing: " << timer.getLastAbsolute() << " seconds\n" << endl;
|
|
|
|
|
|
- //////////////////////////// learn the classifier ///////////////////////////
|
|
|
+ //////////////////////////// train the classifier ///////////////////////////
|
|
|
/////////////////////////////////////////////////////////////////////////////
|
|
|
+ timer.start();
|
|
|
while ( !noNewSplit && depth < maxDepth )
|
|
|
{
|
|
|
depth++;
|
|
@@ -894,7 +873,7 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-#ifdef DEBUG
|
|
|
+#ifdef VERBOSE
|
|
|
Timer timerDepth;
|
|
|
timerDepth.start();
|
|
|
#endif
|
|
@@ -1138,9 +1117,9 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
|
|
|
if ( firstiteration ) firstiteration = false;
|
|
|
|
|
|
-#if DEBUG
|
|
|
+#ifdef VERBOSE
|
|
|
timerDepth.stop();
|
|
|
- cout << "time for depth " << depth << ": " << timerDepth.getLastAbsolute() << endl;
|
|
|
+ cout << "Depth " << depth << ": " << timerDepth.getLastAbsolute() << " seconds" <<endl;
|
|
|
#endif
|
|
|
|
|
|
lastNodeIndices.clear();
|
|
@@ -1148,16 +1127,14 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
}
|
|
|
|
|
|
timer.stop();
|
|
|
- cerr << "learning finished in: " << timer.getLastAbsolute() << " seconds" << endl;
|
|
|
- timer.start();
|
|
|
-
|
|
|
- cout << "uniquenumber " << uniquenumber << endl;
|
|
|
+ cout << "Time for Learning: " << timer.getLastAbsolute() << " seconds\n" << endl;
|
|
|
|
|
|
//////////////////////// classification using HIK ///////////////////////////
|
|
|
/////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
|
if ( useCategorization && fasthik != NULL )
|
|
|
{
|
|
|
+ timer.start();
|
|
|
uniquenumber = std::min ( shortsize, uniquenumber );
|
|
|
for ( uint i = 0; i < globalCategorFeats.size(); i++ )
|
|
|
{
|
|
@@ -1186,20 +1163,30 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
|
|
|
fasthik->train ( globalCategorFeats, ys );
|
|
|
|
|
|
+ timer.stop();
|
|
|
+ cerr << "Time for Categorization: " << timer.getLastAbsolute() << " seconds\n" << endl;
|
|
|
}
|
|
|
|
|
|
-#ifdef DEBUG
|
|
|
+#ifdef VERBOSE
|
|
|
+ // counter for used operations
|
|
|
+ vector<int> opOverview ( NBOPERATIONS, 0 );
|
|
|
+ // relative use of context vs raw features per tree level
|
|
|
+ vector<vector<double> > contextOverview ( maxDepth, vector<double> ( 2, 0.0 ) );
|
|
|
for ( int tree = 0; tree < nbTrees; tree++ )
|
|
|
{
|
|
|
int t = ( int ) forest[tree].size();
|
|
|
|
|
|
for ( int i = 0; i < t; i++ )
|
|
|
{
|
|
|
+#ifdef DEBUG
|
|
|
printf ( "tree[%i]: left: %i, right: %i", i, forest[tree][i].left, forest[tree][i].right );
|
|
|
+#endif
|
|
|
|
|
|
if ( !forest[tree][i].isleaf && forest[tree][i].left != -1 )
|
|
|
{
|
|
|
+#ifdef DEBUG
|
|
|
cout << ", feat: " << forest[tree][i].feat->writeInfos() << " ";
|
|
|
+#endif
|
|
|
opOverview[forest[tree][i].feat->getOps() ]++;
|
|
|
contextOverview[forest[tree][i].depth][ ( int ) forest[tree][i].feat->getContext() ]++;
|
|
|
}
|
|
@@ -1228,21 +1215,19 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- cout << "evaluation of featuretypes" << endl;
|
|
|
+ cout << "\nFEATURE USAGE" << endl;
|
|
|
+ cout << "#############\n" << endl;
|
|
|
+ // amount of used features per feature type
|
|
|
+ cout << "Types:" << endl;
|
|
|
for ( map<int, int>::const_iterator it = featTypeCounter.begin(); it != featTypeCounter.end(); it++ )
|
|
|
- {
|
|
|
- cerr << it->first << ": " << it->second << endl;
|
|
|
- }
|
|
|
-
|
|
|
- for ( uint c = 0; c < ops.size(); c++ )
|
|
|
- {
|
|
|
-
|
|
|
+ cout << it->first << ": " << it->second << endl;
|
|
|
+ // amount of used features per operation type
|
|
|
+ cout << "\nOperations:" << endl;
|
|
|
+ for ( uint c = 0; c < 3; c++ )
|
|
|
for ( int t = 0; t < ( int ) ops[c].size(); t++ )
|
|
|
- {
|
|
|
cout << ops[c][t]->writeInfos() << ": " << opOverview[ops[c][t]->getOps() ] << endl;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
+ // ratio of used context features per depth level
|
|
|
+ cout << "\nContext-Ratio:" << endl;
|
|
|
for ( int d = 0; d < maxDepth; d++ )
|
|
|
{
|
|
|
double sum = contextOverview[d][0] + contextOverview[d][1];
|
|
@@ -1252,13 +1237,10 @@ void SemSegContextTree::train ( const LabeledSet * trainp )
|
|
|
contextOverview[d][0] /= sum;
|
|
|
contextOverview[d][1] /= sum;
|
|
|
|
|
|
- cout << "depth: " << d << " woContext: " << contextOverview[d][0] << " wContext: " << contextOverview[d][1] << endl;
|
|
|
+ cout << "Depth [" << d << "] Normal: " << contextOverview[d][0] << " Context: " << contextOverview[d][1] << endl;
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
- timer.stop();
|
|
|
- cerr << "rest finished in: " << timer.getLastAbsolute() << " seconds" << endl;
|
|
|
- timer.start();
|
|
|
}
|
|
|
|
|
|
void SemSegContextTree::addFeatureMaps ( NICE::MultiChannelImage3DT<double> &imgData, const vector<string> &filelist, int &amountRegions )
|
|
@@ -1352,6 +1334,7 @@ void SemSegContextTree::addFeatureMaps ( NICE::MultiChannelImage3DT<double> &img
|
|
|
// read the geometric cues produced by Hoiem et al.
|
|
|
if ( useHoiemFeatures )
|
|
|
{
|
|
|
+ string hoiemDirectory = conf->gS ( "Features", "hoiem_directory" );
|
|
|
// we could also give the following set as a config option
|
|
|
string hoiemClasses_s = "sky 000 090-045 090-090 090-135 090 090-por 090-sol";
|
|
|
vector<string> hoiemClasses;
|
|
@@ -1417,8 +1400,6 @@ void SemSegContextTree::classify (
|
|
|
NICE::MultiChannelImage3DT<double> & probabilities,
|
|
|
const std::vector<std::string> & filelist )
|
|
|
{
|
|
|
- fprintf ( stderr, "ContextTree classification !\n" );
|
|
|
-
|
|
|
int xsize = imgData.width();
|
|
|
int ysize = imgData.height();
|
|
|
int zsize = imgData.depth();
|
|
@@ -1429,6 +1410,9 @@ void SemSegContextTree::classify (
|
|
|
firstiteration = true;
|
|
|
depth = 0;
|
|
|
|
|
|
+ Timer timer;
|
|
|
+ timer.start();
|
|
|
+
|
|
|
// classes occurred during training step
|
|
|
int classes = labelmapback.size();
|
|
|
// classes defined in config file
|
|
@@ -1639,7 +1623,6 @@ void SemSegContextTree::classify (
|
|
|
{
|
|
|
globalCategorFeat->setDim ( uniquenumber );
|
|
|
globalCategorFeat->normalize();
|
|
|
-// ClassificationResult cr = fasthik->classify ( globalCategorFeat );
|
|
|
int result;
|
|
|
SparseVector scores;
|
|
|
fasthik->classify( globalCategorFeat, result, scores );
|
|
@@ -1695,47 +1678,9 @@ void SemSegContextTree::classify (
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
-#undef VISUALIZE
|
|
|
-#ifdef VISUALIZE
|
|
|
- for ( int z = 0; z < zsize; z++ )
|
|
|
- {
|
|
|
- for ( int j = 0 ; j < ( int ) probabilities.channels(); j++ )
|
|
|
- {
|
|
|
- //cout << "class: " << j << endl;//" " << cn.text (j) << endl;
|
|
|
-
|
|
|
- NICE::Matrix tmp ( probabilities.height(), probabilities.width() );
|
|
|
- double maxval = -numeric_limits<double>::max();
|
|
|
- double minval = numeric_limits<double>::max();
|
|
|
-
|
|
|
|
|
|
- for ( int y = 0; y < probabilities.height(); y++ )
|
|
|
- for ( int x = 0; x < probabilities.width(); x++ )
|
|
|
- {
|
|
|
- double val = probabilities ( x, y, z, j );
|
|
|
- tmp ( y, x ) = val;
|
|
|
- maxval = std::max ( val, maxval );
|
|
|
- minval = std::min ( val, minval );
|
|
|
- }
|
|
|
- tmp ( 0, 0 ) = 1.0;
|
|
|
- tmp ( 0, 1 ) = 0.0;
|
|
|
-
|
|
|
- NICE::ColorImage imgrgb ( probabilities.width(), probabilities.height() );
|
|
|
- ICETools::convertToRGB ( tmp, imgrgb );
|
|
|
-
|
|
|
- cout << "maxval = " << maxval << " minval: " << minval << " for class " << j << endl; //cn.text (j) << endl;
|
|
|
-
|
|
|
- std::string s;
|
|
|
- std::stringstream out;
|
|
|
- out << "tmpprebmap" << z << "_" << j << ".ppm";
|
|
|
- s = out.str();
|
|
|
- imgrgb.write ( s );
|
|
|
- //showImage(imgrgb, "Ergebnis");
|
|
|
- //getchar();
|
|
|
- }
|
|
|
- }
|
|
|
- cout << "fertsch" << endl;
|
|
|
- getchar();
|
|
|
- cout << "weiter gehtsch" << endl;
|
|
|
+#ifdef VISUALIZE
|
|
|
+ getProbabilityMap( probabilities );
|
|
|
#endif
|
|
|
}
|
|
|
else
|
|
@@ -1856,7 +1801,6 @@ void SemSegContextTree::classify (
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-//#define WRITEREGIONS
|
|
|
#ifdef WRITEREGIONS
|
|
|
for ( int z = 0; z < zsize; z++ )
|
|
|
{
|
|
@@ -1905,8 +1849,10 @@ void SemSegContextTree::classify (
|
|
|
#endif
|
|
|
}
|
|
|
|
|
|
+ timer.stop();
|
|
|
+ cout << "\nTime for Classification: " << timer.getLastAbsolute() << endl;
|
|
|
+
|
|
|
delete globalCategorFeat;
|
|
|
- cout << "segmentation finished" << endl;
|
|
|
}
|
|
|
|
|
|
void SemSegContextTree::store ( std::ostream & os, int format ) const
|