|
@@ -19,31 +19,88 @@ using namespace std;
|
|
using namespace NICE;
|
|
using namespace NICE;
|
|
using namespace OBJREC;
|
|
using namespace OBJREC;
|
|
|
|
|
|
-void SemSegNovelty::init()
|
|
|
|
|
|
+SemSegNovelty::SemSegNovelty ( )
|
|
|
|
+ : SemanticSegmentation ( )
|
|
{
|
|
{
|
|
- globalMaxUncert = -numeric_limits<double>::max();
|
|
|
|
|
|
+ this->forbidden_classesTrain.clear();
|
|
|
|
+ this->forbidden_classesActiveLearning.clear();
|
|
|
|
+ this->classesInUse.clear();
|
|
|
|
+
|
|
|
|
+ this->globalMaxUncert = -numeric_limits<double>::max();
|
|
|
|
+
|
|
|
|
+ //we don't have queried any region so far
|
|
|
|
+ this->queriedRegions.clear();
|
|
|
|
+
|
|
|
|
+ this->featExtract = new LocalFeatureColorWeijer ();
|
|
|
|
+
|
|
|
|
+ // those two guys need to be NULL, since only one of them will be active later on
|
|
|
|
+ this->classifier = NULL;
|
|
|
|
+ this->vclassifier = NULL;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+SemSegNovelty::SemSegNovelty ( const Config * _conf,
|
|
|
|
+ const MultiDataset *md )
|
|
|
|
+{
|
|
|
|
+ SemanticSegmentation::setClassNames ( & ( md->getClassNames ( "train" ) ) );
|
|
|
|
+
|
|
|
|
+ this->initFromConfig( _conf );
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+SemSegNovelty::~SemSegNovelty()
|
|
|
|
+{
|
|
|
|
+ if(newTrainExamples.size() > 0)
|
|
|
|
+ {
|
|
|
|
+ // show most uncertain region
|
|
|
|
+ if (b_visualizeALimages)
|
|
|
|
+ showImage(maskedImg);
|
|
|
|
+
|
|
|
|
+ //incorporate new information into the classifier
|
|
|
|
+ if (classifier != NULL)
|
|
|
|
+ {
|
|
|
|
+ //NOTE dangerous!
|
|
|
|
+ classifier->addMultipleExamples(newTrainExamples);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ //store the classifier, such that we can read it again in the next round (if we like that)
|
|
|
|
+ classifier->save ( cache + "/classifier.data" );
|
|
|
|
+ }
|
|
|
|
|
|
- string section = "SemSegNovelty";
|
|
|
|
|
|
+ // clean-up
|
|
|
|
+ if ( classifier != NULL )
|
|
|
|
+ delete classifier;
|
|
|
|
+ if ( vclassifier != NULL )
|
|
|
|
+ delete vclassifier;
|
|
|
|
+ if ( featExtract != NULL )
|
|
|
|
+ delete featExtract;
|
|
|
|
+}
|
|
|
|
|
|
- featExtract = new LocalFeatureColorWeijer ( conf );
|
|
|
|
|
|
+void SemSegNovelty::initFromConfig(const Config* conf, const string _confSection)
|
|
|
|
+{
|
|
|
|
+ //first of all, call method of parent object
|
|
|
|
+ SemanticSegmentation::initFromConfig( conf );
|
|
|
|
+
|
|
|
|
+ featExtract->initFromConfig ( conf );
|
|
|
|
|
|
- this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
|
|
|
|
- this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
|
|
|
|
- this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
|
|
|
|
|
|
+ //save and read segmentation results from files
|
|
|
|
+ this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true );
|
|
|
|
+ //save the classifier to a file
|
|
|
|
+ this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true );
|
|
|
|
+ //read the classifier from a file
|
|
|
|
+ this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false );
|
|
|
|
|
|
//write uncertainty results in the same folder as done for the segmentation results
|
|
//write uncertainty results in the same folder as done for the segmentation results
|
|
resultdir = conf->gS("debug", "resultdir", "result");
|
|
resultdir = conf->gS("debug", "resultdir", "result");
|
|
cache = conf->gS ( "cache", "root", "" );
|
|
cache = conf->gS ( "cache", "root", "" );
|
|
|
|
|
|
|
|
|
|
- findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
|
|
|
|
- whs = conf->gI ( section, "window_size", 10 );
|
|
|
|
|
|
+ this->findMaximumUncert = conf->gB(_confSection, "findMaximumUncert", true);
|
|
|
|
+ this->whs = conf->gI ( _confSection, "window_size", 10 );
|
|
//distance to next descriptor during training
|
|
//distance to next descriptor during training
|
|
- trainWsize = conf->gI ( section, "train_window_size", 10 );
|
|
|
|
|
|
+ this->trainWsize = conf->gI ( _confSection, "train_window_size", 10 );
|
|
//distance to next descriptor during testing
|
|
//distance to next descriptor during testing
|
|
- testWSize = conf->gI (section, "test_window_size", 10);
|
|
|
|
|
|
+ this->testWSize = conf->gI (_confSection, "test_window_size", 10);
|
|
// select your segmentation method here
|
|
// select your segmentation method here
|
|
- string rsMethode = conf->gS ( section, "segmentation", "none" );
|
|
|
|
|
|
+ std::string rsMethode = conf->gS ( _confSection, "segmentation", "none" );
|
|
|
|
|
|
if(rsMethode == "none")
|
|
if(rsMethode == "none")
|
|
{
|
|
{
|
|
@@ -59,7 +116,7 @@ void SemSegNovelty::init()
|
|
}
|
|
}
|
|
|
|
|
|
//define which measure for "novelty" we want to use
|
|
//define which measure for "novelty" we want to use
|
|
- noveltyMethodString = conf->gS( section, "noveltyMethod", "gp-variance");
|
|
|
|
|
|
+ noveltyMethodString = conf->gS( _confSection, "noveltyMethod", "gp-variance");
|
|
if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
|
|
if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
|
|
{
|
|
{
|
|
this->noveltyMethod = GPVARIANCE;
|
|
this->noveltyMethod = GPVARIANCE;
|
|
@@ -108,90 +165,35 @@ void SemSegNovelty::init()
|
|
this->mostNoveltyWithMaxScores = true;
|
|
this->mostNoveltyWithMaxScores = true;
|
|
}
|
|
}
|
|
|
|
|
|
- //we don't have queried any region so far
|
|
|
|
- queriedRegions.clear();
|
|
|
|
- visualizeALimages = conf->gB(section, "visualizeALimages", false);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-SemSegNovelty::SemSegNovelty ( const Config * _conf,
|
|
|
|
- const MultiDataset *md )
|
|
|
|
- : SemanticSegmentation ( _conf, & ( md->getClassNames ( "train" ) ) )
|
|
|
|
-{
|
|
|
|
- this->conf = new NICE::Config ( *_conf );
|
|
|
|
-
|
|
|
|
- // set internal variables, default values, and all those funny things
|
|
|
|
- this->init ( );
|
|
|
|
|
|
+ b_visualizeALimages = conf->gB(_confSection, "visualizeALimages", false);
|
|
|
|
|
|
- std::string section = "SemSegNovelty";
|
|
|
|
|
|
|
|
- classifierString = conf->gS ( section, "classifier", "GPHIKClassifier" );
|
|
|
|
|
|
+ classifierString = conf->gS ( _confSection, "classifier", "GPHIKClassifier" );
|
|
classifier = NULL;
|
|
classifier = NULL;
|
|
vclassifier = NULL;
|
|
vclassifier = NULL;
|
|
if ( classifierString.compare("GPHIKClassifier") == 0)
|
|
if ( classifierString.compare("GPHIKClassifier") == 0)
|
|
{
|
|
{
|
|
//just to make sure, that we do NOT perform an optimization after every iteration step
|
|
//just to make sure, that we do NOT perform an optimization after every iteration step
|
|
//this would just take a lot of time, which is not desired so far
|
|
//this would just take a lot of time, which is not desired so far
|
|
- this->conf->sB( "GPHIKClassifier", "performOptimizationAfterIncrement", false );
|
|
|
|
- classifier = new GPHIKClassifierNICE ( this->conf, "GPHIKClassifier" );
|
|
|
|
|
|
+ //TODO edit this!
|
|
|
|
+ //this->conf->sB( "GPHIKClassifier", "performOptimizationAfterIncrement", false );
|
|
|
|
+ classifier = new GPHIKClassifierNICE ( conf, "GPHIKClassifier" );
|
|
}
|
|
}
|
|
else
|
|
else
|
|
- vclassifier = GenericClassifierSelection::selectVecClassifier ( this->conf, classifierString );
|
|
|
|
-
|
|
|
|
- if ( read_classifier )
|
|
|
|
|
|
+ vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ //check the same thing for the training classes - this is very specific to our setup
|
|
|
|
+ std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
|
|
|
|
+ if ( forbidden_classesTrain_s == "" )
|
|
{
|
|
{
|
|
- try
|
|
|
|
- {
|
|
|
|
- if ( classifier != NULL )
|
|
|
|
- {
|
|
|
|
- string classifierdst = "/classifier.data";
|
|
|
|
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
|
- classifier->read ( cache + classifierdst );
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- string classifierdst = "/veccl.data";
|
|
|
|
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
|
- vclassifier->read ( cache + classifierdst );
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- fprintf ( stderr, "SemSegNovelty:: successfully read\n" );
|
|
|
|
- }
|
|
|
|
- catch ( char *str )
|
|
|
|
- {
|
|
|
|
- cerr << "error reading data: " << str << endl;
|
|
|
|
- }
|
|
|
|
|
|
+ forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
|
|
}
|
|
}
|
|
- else
|
|
|
|
- {
|
|
|
|
- train ( md );
|
|
|
|
- }
|
|
|
|
|
|
+ this->classNames->getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
|
|
}
|
|
}
|
|
|
|
|
|
-SemSegNovelty::~SemSegNovelty()
|
|
|
|
-{
|
|
|
|
- if(newTrainExamples.size() > 0)
|
|
|
|
- {
|
|
|
|
- // show most uncertain region
|
|
|
|
- if (visualizeALimages)
|
|
|
|
- showImage(maskedImg);
|
|
|
|
-
|
|
|
|
- //incorporate new information into the classifier
|
|
|
|
- if (classifier != NULL)
|
|
|
|
- classifier->addMultipleExamples(newTrainExamples);
|
|
|
|
-
|
|
|
|
- //store the classifier, such that we can read it again in the next round (if we like that)
|
|
|
|
- classifier->save ( cache + "/classifier.data" );
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- // clean-up
|
|
|
|
- if ( classifier != NULL )
|
|
|
|
- delete classifier;
|
|
|
|
- if ( vclassifier != NULL )
|
|
|
|
- delete vclassifier;
|
|
|
|
- if ( featExtract != NULL )
|
|
|
|
- delete featExtract;
|
|
|
|
-}
|
|
|
|
|
|
|
|
|
|
|
|
void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix ®ions, int region, NICE::ColorImage &outimage)
|
|
void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix ®ions, int region, NICE::ColorImage &outimage)
|
|
@@ -230,202 +232,221 @@ void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Mat
|
|
|
|
|
|
void SemSegNovelty::train ( const MultiDataset *md )
|
|
void SemSegNovelty::train ( const MultiDataset *md )
|
|
{
|
|
{
|
|
- const LabeledSet train = * ( *md ) ["train"];
|
|
|
|
- const LabeledSet *trainp = &train;
|
|
|
|
-
|
|
|
|
- ////////////////////////
|
|
|
|
- // feature extraction //
|
|
|
|
- ////////////////////////
|
|
|
|
-
|
|
|
|
- //check the same thing for the training classes - this is very specific to our setup
|
|
|
|
- std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
|
|
|
|
- if ( forbidden_classesTrain_s == "" )
|
|
|
|
|
|
+ if ( this->read_classifier )
|
|
{
|
|
{
|
|
- forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
|
|
|
|
- }
|
|
|
|
- this->classNames->getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- ProgressBar pb ( "Local Feature Extraction" );
|
|
|
|
- pb.show();
|
|
|
|
-
|
|
|
|
- int imgnb = 0;
|
|
|
|
-
|
|
|
|
- Examples examples;
|
|
|
|
- examples.filename = "training";
|
|
|
|
|
|
+ try
|
|
|
|
+ {
|
|
|
|
+ if ( this->classifier != NULL )
|
|
|
|
+ {
|
|
|
|
+ string classifierdst = "/classifier.data";
|
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
|
+ classifier->read ( cache + classifierdst );
|
|
|
|
+ }
|
|
|
|
+ else
|
|
|
|
+ {
|
|
|
|
+ string classifierdst = "/veccl.data";
|
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
|
+ vclassifier->read ( cache + classifierdst );
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
|
|
- int featdim = -1;
|
|
|
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: successfully read\n" );
|
|
|
|
+ }
|
|
|
|
+ catch ( char *str )
|
|
|
|
+ {
|
|
|
|
+ cerr << "error reading data: " << str << endl;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ else
|
|
|
|
+ {
|
|
|
|
+ const LabeledSet train = * ( *md ) ["train"];
|
|
|
|
+ const LabeledSet *trainp = &train;
|
|
|
|
|
|
- classesInUse.clear();
|
|
|
|
|
|
+ ////////////////////////
|
|
|
|
+ // feature extraction //
|
|
|
|
+ ////////////////////////
|
|
|
|
|
|
- LOOP_ALL_S ( *trainp )
|
|
|
|
- {
|
|
|
|
- //EACH_S(classno, currentFile);
|
|
|
|
- EACH_INFO ( classno, info );
|
|
|
|
|
|
+ ProgressBar pb ( "Local Feature Extraction" );
|
|
|
|
+ pb.show();
|
|
|
|
+
|
|
|
|
+ int imgnb = 0;
|
|
|
|
|
|
- std::string currentFile = info.img();
|
|
|
|
|
|
+ Examples examples;
|
|
|
|
+ examples.filename = "training";
|
|
|
|
|
|
- CachedExample *ce = new CachedExample ( currentFile );
|
|
|
|
|
|
+ int featdim = -1;
|
|
|
|
|
|
- const LocalizationResult *locResult = info.localization();
|
|
|
|
- if ( locResult->size() <= 0 )
|
|
|
|
|
|
+ classesInUse.clear();
|
|
|
|
+
|
|
|
|
+ LOOP_ALL_S ( *trainp )
|
|
{
|
|
{
|
|
- fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
|
|
|
|
- currentFile.c_str() );
|
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
|
|
+ //EACH_S(classno, currentFile);
|
|
|
|
+ EACH_INFO ( classno, info );
|
|
|
|
|
|
- int xsize, ysize;
|
|
|
|
- ce->getImageSize ( xsize, ysize );
|
|
|
|
|
|
+ std::string currentFile = info.img();
|
|
|
|
|
|
- Image labels ( xsize, ysize );
|
|
|
|
- labels.set ( 0 );
|
|
|
|
- locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
|
|
|
|
|
|
+ CachedExample *ce = new CachedExample ( currentFile );
|
|
|
|
|
|
- NICE::ColorImage img;
|
|
|
|
- try {
|
|
|
|
- img = ColorImage ( currentFile );
|
|
|
|
- } catch ( Exception ) {
|
|
|
|
- cerr << "SemSegNovelty: error opening image file <" << currentFile << ">" << endl;
|
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
|
|
+ const LocalizationResult *locResult = info.localization();
|
|
|
|
+ if ( locResult->size() <= 0 )
|
|
|
|
+ {
|
|
|
|
+ fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
|
|
|
|
+ currentFile.c_str() );
|
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
|
|
- Globals::setCurrentImgFN ( currentFile );
|
|
|
|
|
|
+ int xsize, ysize;
|
|
|
|
+ ce->getImageSize ( xsize, ysize );
|
|
|
|
|
|
- MultiChannelImageT<double> feats;
|
|
|
|
|
|
+ Image labels ( xsize, ysize );
|
|
|
|
+ labels.set ( 0 );
|
|
|
|
+ locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
|
|
|
|
|
|
- // extract features
|
|
|
|
- featExtract->getFeats ( img, feats );
|
|
|
|
- featdim = feats.channels();
|
|
|
|
- feats.addChannel(featdim);
|
|
|
|
|
|
+ NICE::ColorImage img;
|
|
|
|
+ try {
|
|
|
|
+ img = ColorImage ( currentFile );
|
|
|
|
+ } catch ( Exception ) {
|
|
|
|
+ cerr << "SemSegNovelty: error opening image file <" << currentFile << ">" << endl;
|
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
|
|
- for (int c = 0; c < featdim; c++)
|
|
|
|
- {
|
|
|
|
- ImageT<double> tmp = feats[c];
|
|
|
|
- ImageT<double> tmp2 = feats[c+featdim];
|
|
|
|
|
|
+ Globals::setCurrentImgFN ( currentFile );
|
|
|
|
|
|
- NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
|
|
|
|
- }
|
|
|
|
- featdim += featdim;
|
|
|
|
|
|
+ MultiChannelImageT<double> feats;
|
|
|
|
|
|
- // compute integral images
|
|
|
|
- for ( int c = 0; c < featdim; c++ )
|
|
|
|
- {
|
|
|
|
- feats.calcIntegral ( c );
|
|
|
|
- }
|
|
|
|
|
|
+ // extract features
|
|
|
|
+ featExtract->getFeats ( img, feats );
|
|
|
|
+ featdim = feats.channels();
|
|
|
|
+ feats.addChannel(featdim);
|
|
|
|
|
|
- for ( int y = 0; y < ysize; y += trainWsize)
|
|
|
|
- {
|
|
|
|
- for ( int x = 0; x < xsize; x += trainWsize )
|
|
|
|
|
|
+ for (int c = 0; c < featdim; c++)
|
|
{
|
|
{
|
|
|
|
+ ImageT<double> tmp = feats[c];
|
|
|
|
+ ImageT<double> tmp2 = feats[c+featdim];
|
|
|
|
|
|
- int classnoTmp = labels.getPixel ( x, y );
|
|
|
|
-
|
|
|
|
- if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
|
|
|
|
- {
|
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (classesInUse.find(classnoTmp) == classesInUse.end())
|
|
|
|
- {
|
|
|
|
- classesInUse.insert(classnoTmp);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- Example example;
|
|
|
|
- example.vec = NULL;
|
|
|
|
- example.svec = new SparseVector ( featdim );
|
|
|
|
- for ( int f = 0; f < featdim; f++ )
|
|
|
|
- {
|
|
|
|
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
|
|
- if ( val > 1e-10 )
|
|
|
|
- ( *example.svec ) [f] = val;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- example.svec->normalize();
|
|
|
|
-
|
|
|
|
- example.position = imgnb;
|
|
|
|
- examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
|
|
|
|
|
|
+ NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
|
|
|
|
+ }
|
|
|
|
+ featdim += featdim;
|
|
|
|
|
|
|
|
+ // compute integral images
|
|
|
|
+ for ( int c = 0; c < featdim; c++ )
|
|
|
|
+ {
|
|
|
|
+ feats.calcIntegral ( c );
|
|
}
|
|
}
|
|
- }
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-
|
|
|
|
|
|
|
|
- delete ce;
|
|
|
|
- imgnb++;
|
|
|
|
- pb.update ( trainp->count() );
|
|
|
|
- }
|
|
|
|
|
|
+ for ( int y = 0; y < ysize; y += trainWsize)
|
|
|
|
+ {
|
|
|
|
+ for ( int x = 0; x < xsize; x += trainWsize )
|
|
|
|
+ {
|
|
|
|
+
|
|
|
|
+ int classnoTmp = labels.getPixel ( x, y );
|
|
|
|
+
|
|
|
|
+ if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
|
|
|
|
+ {
|
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (classesInUse.find(classnoTmp) == classesInUse.end())
|
|
|
|
+ {
|
|
|
|
+ classesInUse.insert(classnoTmp);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ Example example;
|
|
|
|
+ example.vec = NULL;
|
|
|
|
+ example.svec = new SparseVector ( featdim );
|
|
|
|
+ for ( int f = 0; f < featdim; f++ )
|
|
|
|
+ {
|
|
|
|
+ double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
|
|
+ if ( val > 1e-10 )
|
|
|
|
+ ( *example.svec ) [f] = val;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ example.svec->normalize();
|
|
|
|
+
|
|
|
|
+ example.position = imgnb;
|
|
|
|
+ examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
|
|
|
|
+
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ delete ce;
|
|
|
|
+ imgnb++;
|
|
|
|
+ pb.update ( trainp->count() );
|
|
|
|
+ }
|
|
|
|
|
|
- numberOfClasses = classesInUse.size();
|
|
|
|
- std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
|
|
|
|
- std::cerr << "classes in use: " << std::endl;
|
|
|
|
- for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
|
|
|
|
- {
|
|
|
|
- std::cerr << *it << " ";
|
|
|
|
- }
|
|
|
|
- std::cerr << std::endl;
|
|
|
|
|
|
+
|
|
|
|
+ numberOfClasses = classesInUse.size();
|
|
|
|
+ std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
|
|
|
|
+ std::cerr << "classes in use: " << std::endl;
|
|
|
|
+ for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
|
|
|
|
+ {
|
|
|
|
+ std::cerr << *it << " ";
|
|
|
|
+ }
|
|
|
|
+ std::cerr << std::endl;
|
|
|
|
|
|
- pb.hide();
|
|
|
|
|
|
+ pb.hide();
|
|
|
|
|
|
|
|
|
|
- //////////////////////
|
|
|
|
- // train classifier //
|
|
|
|
- //////////////////////
|
|
|
|
- FeaturePool fp;
|
|
|
|
|
|
+ //////////////////////
|
|
|
|
+ // train classifier //
|
|
|
|
+ //////////////////////
|
|
|
|
+ FeaturePool fp;
|
|
|
|
|
|
- Feature *f = new SparseVectorFeature ( featdim );
|
|
|
|
|
|
+ Feature *f = new SparseVectorFeature ( featdim );
|
|
|
|
|
|
- f->explode ( fp );
|
|
|
|
- delete f;
|
|
|
|
|
|
+ f->explode ( fp );
|
|
|
|
+ delete f;
|
|
|
|
|
|
- if ( classifier != NULL )
|
|
|
|
- {
|
|
|
|
- std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
|
|
|
|
- classifier->train ( fp, examples );
|
|
|
|
- std::cerr << "training finished" << std::endl;
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- LabeledSetVector lvec;
|
|
|
|
- convertExamplesToLSet ( examples, lvec );
|
|
|
|
- vclassifier->teach ( lvec );
|
|
|
|
-// if ( usegmm )
|
|
|
|
-// convertLSetToSparseExamples ( examples, lvec );
|
|
|
|
-// else
|
|
|
|
- std::cerr << "classifierString: " << classifierString << std::endl;
|
|
|
|
- if (this->classifierString.compare("nn") == 0)
|
|
|
|
|
|
+ if ( classifier != NULL )
|
|
{
|
|
{
|
|
- convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
|
|
|
|
|
|
+ std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
|
|
|
|
+ classifier->train ( fp, examples );
|
|
|
|
+ std::cerr << "training finished" << std::endl;
|
|
}
|
|
}
|
|
else
|
|
else
|
|
{
|
|
{
|
|
- convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
|
|
|
|
- }
|
|
|
|
- vclassifier->finishTeaching();
|
|
|
|
- }
|
|
|
|
|
|
+ LabeledSetVector lvec;
|
|
|
|
+ convertExamplesToLSet ( examples, lvec );
|
|
|
|
+ vclassifier->teach ( lvec );
|
|
|
|
+ // if ( usegmm )
|
|
|
|
+ // convertLSetToSparseExamples ( examples, lvec );
|
|
|
|
+ // else
|
|
|
|
+ std::cerr << "classifierString: " << classifierString << std::endl;
|
|
|
|
+ if (this->classifierString.compare("nn") == 0)
|
|
|
|
+ {
|
|
|
|
+ convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
|
|
|
|
+ }
|
|
|
|
+ else
|
|
|
|
+ {
|
|
|
|
+ convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
|
|
|
|
+ }
|
|
|
|
+ vclassifier->finishTeaching();
|
|
|
|
+ }
|
|
|
|
|
|
- fp.destroy();
|
|
|
|
|
|
+ fp.destroy();
|
|
|
|
|
|
- if ( save_classifier )
|
|
|
|
- {
|
|
|
|
- if ( classifier != NULL )
|
|
|
|
- classifier->save ( cache + "/classifier.data" );
|
|
|
|
- else
|
|
|
|
- vclassifier->save ( cache + "/veccl.data" );
|
|
|
|
- }
|
|
|
|
|
|
+ if ( save_classifier )
|
|
|
|
+ {
|
|
|
|
+ if ( classifier != NULL )
|
|
|
|
+ classifier->save ( cache + "/classifier.data" );
|
|
|
|
+ else
|
|
|
|
+ vclassifier->save ( cache + "/veccl.data" );
|
|
|
|
+ }
|
|
|
|
|
|
- ////////////
|
|
|
|
- //clean up//
|
|
|
|
- ////////////
|
|
|
|
- for ( int i = 0; i < ( int ) examples.size(); i++ )
|
|
|
|
- {
|
|
|
|
- examples[i].second.clean();
|
|
|
|
- }
|
|
|
|
- examples.clear();
|
|
|
|
|
|
+ ////////////
|
|
|
|
+ //clean up//
|
|
|
|
+ ////////////
|
|
|
|
+ for ( int i = 0; i < ( int ) examples.size(); i++ )
|
|
|
|
+ {
|
|
|
|
+ examples[i].second.clean();
|
|
|
|
+ }
|
|
|
|
+ examples.clear();
|
|
|
|
|
|
- cerr << "SemSeg training finished" << endl;
|
|
|
|
|
|
+ cerr << "SemSeg training finished" << endl;
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
@@ -548,7 +569,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
timer.stop();
|
|
timer.stop();
|
|
std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
|
|
std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
|
|
|
|
|
|
- if (visualizeALimages)
|
|
|
|
|
|
+ if (b_visualizeALimages)
|
|
{
|
|
{
|
|
ColorImage imgrgbTmp (xsize, ysize);
|
|
ColorImage imgrgbTmp (xsize, ysize);
|
|
ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
|
|
ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
|
|
@@ -692,7 +713,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
newTrainExamples = examples;
|
|
newTrainExamples = examples;
|
|
globalMaxUncert = maxNoveltyScore;
|
|
globalMaxUncert = maxNoveltyScore;
|
|
//prepare for later visualization
|
|
//prepare for later visualization
|
|
-// if (visualizeALimages)
|
|
|
|
|
|
+// if (b_visualizeALimages)
|
|
visualizeRegion(img,mask,maxUncertRegion,maskedImg);
|
|
visualizeRegion(img,mask,maxUncertRegion,maskedImg);
|
|
}
|
|
}
|
|
else
|
|
else
|
|
@@ -740,7 +761,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
|
|
|
noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
|
|
noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
|
|
|
|
|
|
- if (visualizeALimages)
|
|
|
|
|
|
+ if (b_visualizeALimages)
|
|
{
|
|
{
|
|
ICETools::convertToRGB ( noveltyImage, imgrgb );
|
|
ICETools::convertToRGB ( noveltyImage, imgrgb );
|
|
showImage(imgrgb, "Novelty Image");
|
|
showImage(imgrgb, "Novelty Image");
|
|
@@ -947,7 +968,10 @@ void SemSegNovelty::computeNoveltyByGPUncertainty( NICE::FloatImage & noveltyIm
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
{
|
|
{
|
|
|
|
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
+ double gpNoise = 0.01;
|
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
+
|
|
|
|
|
|
#pragma omp parallel for
|
|
#pragma omp parallel for
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1019,8 +1043,10 @@ void SemSegNovelty::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
{
|
|
{
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
-
|
|
|
|
|
|
+ double gpNoise = 0.01;
|
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
+
|
|
#pragma omp parallel for
|
|
#pragma omp parallel for
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
{
|
|
{
|
|
@@ -1084,7 +1110,9 @@ void SemSegNovelty::computeNoveltyByGPMeanRatio( NICE::FloatImage & noveltyImag
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
{
|
|
{
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
+ double gpNoise = 0.01;
|
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
#pragma omp parallel for
|
|
#pragma omp parallel for
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1162,7 +1190,9 @@ void SemSegNovelty::computeNoveltyByGPWeightAll( NICE::FloatImage & noveltyImag
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
{
|
|
{
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
+ double gpNoise = 0.01;
|
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
#pragma omp parallel for
|
|
#pragma omp parallel for
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1271,7 +1301,9 @@ void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyIm
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
{
|
|
{
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
+ double gpNoise = 0.01;
|
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
#pragma omp parallel for
|
|
#pragma omp parallel for
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1388,7 +1420,7 @@ void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyIm
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
-void SemSegNovelty::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
|
|
|
|
|
|
+void SemSegNovelty::addNewExample(const NICE::Vector& v_newExample, const int & newClassNo)
|
|
{
|
|
{
|
|
//accept the new class as valid information
|
|
//accept the new class as valid information
|
|
if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
|
|
if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
|
|
@@ -1405,13 +1437,19 @@ void SemSegNovelty::addNewExample(const NICE::Vector& newExample, const int & ne
|
|
//then add it to the classifier used
|
|
//then add it to the classifier used
|
|
if ( classifier != NULL )
|
|
if ( classifier != NULL )
|
|
{
|
|
{
|
|
- //TODO
|
|
|
|
|
|
+ if (this->classifierString.compare("GPHIKClassifier") == 0)
|
|
|
|
+ {
|
|
|
|
+ Example newExample;
|
|
|
|
+ SparseVector svec ( v_newExample );
|
|
|
|
+ newExample.svec = &svec;
|
|
|
|
+ static_cast<GPHIKClassifierNICE*>(classifier)->addExample ( newExample, newClassNo );
|
|
|
|
+ }
|
|
}
|
|
}
|
|
else //vclassifier
|
|
else //vclassifier
|
|
{
|
|
{
|
|
if (this->classifierString.compare("nn") == 0)
|
|
if (this->classifierString.compare("nn") == 0)
|
|
{
|
|
{
|
|
- vclassifier->teach ( newClassNo, newExample );
|
|
|
|
|
|
+ vclassifier->teach ( newClassNo, v_newExample );
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
@@ -1422,7 +1460,7 @@ void SemSegNovelty::addNovelExamples()
|
|
Timer timer;
|
|
Timer timer;
|
|
|
|
|
|
//show the image that contains the most novel region
|
|
//show the image that contains the most novel region
|
|
- if (visualizeALimages)
|
|
|
|
|
|
+ if (b_visualizeALimages)
|
|
showImage(maskedImg, "Most novel region");
|
|
showImage(maskedImg, "Most novel region");
|
|
|
|
|
|
timer.start();
|
|
timer.start();
|
|
@@ -1473,7 +1511,7 @@ void SemSegNovelty::addNovelExamples()
|
|
//then add the new features to the classifier used
|
|
//then add the new features to the classifier used
|
|
if ( classifier != NULL )
|
|
if ( classifier != NULL )
|
|
{
|
|
{
|
|
- if (this->classifierString.compare("ClassifierGPHIK") == 0)
|
|
|
|
|
|
+ if (this->classifierString.compare("GPHIKClassifier") == 0)
|
|
{
|
|
{
|
|
classifier->addMultipleExamples ( this->newTrainExamples );
|
|
classifier->addMultipleExamples ( this->newTrainExamples );
|
|
}
|
|
}
|
|
@@ -1574,25 +1612,80 @@ void SemSegNovelty::restore ( std::istream & is, int format )
|
|
if ( b_restoreVerbose )
|
|
if ( b_restoreVerbose )
|
|
std::cerr << " currently restore section " << tmp << " in SemSegNovelty" << std::endl;
|
|
std::cerr << " currently restore section " << tmp << " in SemSegNovelty" << std::endl;
|
|
|
|
|
|
- if ( tmp.compare("Config") == 0 )
|
|
|
|
|
|
+
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // FEATURE EXTRACTION //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ if ( tmp.compare("featExtract") == 0 )
|
|
|
|
+ {
|
|
|
|
+ featExtract->restore(is, format);
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("trainWsize") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> trainWsize;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("whs") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> whs;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("testWSize") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> testWSize;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // NOVELTY COMPUTATION //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ else if ( tmp.compare("noveltyMethod") == 0 )
|
|
{
|
|
{
|
|
- //TODO think about to put the config fix as first part in store restore... currently, its position is flexible
|
|
|
|
- // possibly obsolete safety checks
|
|
|
|
- if ( conf == NULL )
|
|
|
|
- conf = new Config;
|
|
|
|
- conf->clear();
|
|
|
|
-
|
|
|
|
- //we do not want to read until the end of the file
|
|
|
|
- conf->setIoUntilEndOfFile( false );
|
|
|
|
- //load every options we determined explicitely
|
|
|
|
- conf->restore(is, format);
|
|
|
|
-
|
|
|
|
- // set internal variables, default values, and all those funny things
|
|
|
|
- this->init();
|
|
|
|
-
|
|
|
|
|
|
+ unsigned int ui_noveltyMethod;
|
|
|
|
+ is >> ui_noveltyMethod;
|
|
|
|
+ this->noveltyMethod = static_cast<NoveltyMethod> ( ui_noveltyMethod );
|
|
|
|
+
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("noveltyMethodString") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> noveltyMethodString;
|
|
is >> tmp; // end of block
|
|
is >> tmp; // end of block
|
|
tmp = this->removeEndTag ( tmp );
|
|
tmp = this->removeEndTag ( tmp );
|
|
}
|
|
}
|
|
|
|
+ else if ( tmp.compare("globalMaxUncert") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> globalMaxUncert;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("mostNoveltyWithMaxScores") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> mostNoveltyWithMaxScores;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("findMaximumUncert") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> findMaximumUncert;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ //TODO maskedImg
|
|
|
|
+ else if ( tmp.compare("b_visualizeALimages") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> b_visualizeALimages;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // CLASSIFICATION STUFF //
|
|
|
|
+ ///////////////////////////////
|
|
else if ( tmp.compare("classifier") == 0 )
|
|
else if ( tmp.compare("classifier") == 0 )
|
|
{
|
|
{
|
|
std::string isNull;
|
|
std::string isNull;
|
|
@@ -1639,7 +1732,7 @@ void SemSegNovelty::restore ( std::istream & is, int format )
|
|
|
|
|
|
is >> tmp; // end of block
|
|
is >> tmp; // end of block
|
|
tmp = this->removeEndTag ( tmp );
|
|
tmp = this->removeEndTag ( tmp );
|
|
- }
|
|
|
|
|
|
+ }
|
|
else if ( tmp.compare("forbidden_classesTrain") == 0 )
|
|
else if ( tmp.compare("forbidden_classesTrain") == 0 )
|
|
{
|
|
{
|
|
is >> tmp; // size
|
|
is >> tmp; // size
|
|
@@ -1733,10 +1826,60 @@ void SemSegNovelty::restore ( std::istream & is, int format )
|
|
std::cerr << " skip restoring classesInUse" << std::endl;
|
|
std::cerr << " skip restoring classesInUse" << std::endl;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("numberOfClasses") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> numberOfClasses;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("read_classifier") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> read_classifier;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("save_classifier") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> save_classifier;
|
|
is >> tmp; // end of block
|
|
is >> tmp; // end of block
|
|
tmp = this->removeEndTag ( tmp );
|
|
tmp = this->removeEndTag ( tmp );
|
|
}
|
|
}
|
|
- else if ( tmp.compare("SemanticSegmentation") == 0 )
|
|
|
|
|
|
+ else if ( tmp.compare("cache") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> cache;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ else if ( tmp.compare("resultdir") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> resultdir;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ //TODO newTrainExamples
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // SEGMENTATION STUFF //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ //TODO regionSeg
|
|
|
|
+ //NOTE regionSeg seems really important to keep track off
|
|
|
|
+ else if ( tmp.compare("reuseSegmentation") == 0 )
|
|
|
|
+ {
|
|
|
|
+ is >> reuseSegmentation;
|
|
|
|
+ is >> tmp; // end of block
|
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
|
+ }
|
|
|
|
+ //TODO queriedRegions
|
|
|
|
+ //NOTE queriedRegions seems really important to keep track off
|
|
|
|
+ //
|
|
|
|
+ //TODO currentRegionToQuery
|
|
|
|
+ //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // PARENT OBJECT //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ else if ( tmp.compare("SemSegNovelty--Parent") == 0 )
|
|
{
|
|
{
|
|
// restore parent object
|
|
// restore parent object
|
|
SemanticSegmentation::restore(is);
|
|
SemanticSegmentation::restore(is);
|
|
@@ -1765,17 +1908,90 @@ void SemSegNovelty::store ( std::ostream & os, int format ) const
|
|
// show starting point
|
|
// show starting point
|
|
os << this->createStartTag( "SemSegNovelty" ) << std::endl;
|
|
os << this->createStartTag( "SemSegNovelty" ) << std::endl;
|
|
|
|
|
|
- os.precision (numeric_limits<double>::digits10 + 1);
|
|
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // FEATURE EXTRACTION //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ os << this->createStartTag( "featExtract" ) << std::endl;
|
|
|
|
+ featExtract->store ( os );
|
|
|
|
+ os << this->createStartTag( "featExtract" ) << std::endl;
|
|
|
|
|
|
- os << this->createStartTag( "Config" ) << std::endl;
|
|
|
|
- //we do not want to read until end of file for restoring
|
|
|
|
- conf->setIoUntilEndOfFile(false);
|
|
|
|
- conf->store(os,format);
|
|
|
|
- os << this->createEndTag( "Config" ) << std::endl;
|
|
|
|
|
|
+ os << this->createStartTag( "trainWsize" ) << std::endl;
|
|
|
|
+ os << this->trainWsize << std::endl;
|
|
|
|
+ os << this->createStartTag( "trainWsize" ) << std::endl;
|
|
|
|
|
|
|
|
+ os << this->createStartTag( "whs" ) << std::endl;
|
|
|
|
+ os << this->whs << std::endl;
|
|
|
|
+ os << this->createStartTag( "whs" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "testWSize" ) << std::endl;
|
|
|
|
+ os << this->testWSize << std::endl;
|
|
|
|
+ os << this->createStartTag( "testWSize" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // NOVELTY COMPUTATION //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "noveltyMethod" ) << std::endl;
|
|
|
|
+ os << this->noveltyMethod << std::endl;
|
|
|
|
+ os << this->createStartTag( "noveltyMethod" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "noveltyMethodString" ) << std::endl;
|
|
|
|
+ os << this->noveltyMethodString << std::endl;
|
|
|
|
+ os << this->createStartTag( "noveltyMethodString" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "globalMaxUncert" ) << std::endl;
|
|
|
|
+ os << this->globalMaxUncert << std::endl;
|
|
|
|
+ os << this->createStartTag( "globalMaxUncert" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "mostNoveltyWithMaxScores" ) << std::endl;
|
|
|
|
+ os << this->mostNoveltyWithMaxScores << std::endl;
|
|
|
|
+ os << this->createStartTag( "mostNoveltyWithMaxScores" ) << std::endl;
|
|
|
|
|
|
|
|
+ os << this->createStartTag( "findMaximumUncert" ) << std::endl;
|
|
|
|
+ os << this->findMaximumUncert << std::endl;
|
|
|
|
+ os << this->createStartTag( "findMaximumUncert" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ //TODO maskedImg
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "b_visualizeALimages" ) << std::endl;
|
|
|
|
+ os << this->b_visualizeALimages << std::endl;
|
|
|
|
+ os << this->createStartTag( "b_visualizeALimages" ) << std::endl;
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // CLASSIFICATION STUFF //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "classifierString" ) << std::endl;
|
|
|
|
+ os << this->classifierString << std::endl;
|
|
|
|
+ os << this->createStartTag( "classifierString" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "classifier" ) << std::endl;
|
|
|
|
+ if ( this->classifier != NULL )
|
|
|
|
+ {
|
|
|
|
+ os << "NOTNULL" << std::endl;
|
|
|
|
+ classifier->store ( os, format );
|
|
|
|
+ }
|
|
|
|
+ else
|
|
|
|
+ {
|
|
|
|
+ os << "NULL" << std::endl;
|
|
|
|
+ }
|
|
|
|
+ os << this->createEndTag( "classifier" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ //
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "vclassifier" ) << std::endl;
|
|
|
|
+ if ( this->classifier != NULL )
|
|
|
|
+ {
|
|
|
|
+ os << "NOTNULL" << std::endl;
|
|
|
|
+ vclassifier->store ( os, format );
|
|
|
|
+ }
|
|
|
|
+ else
|
|
|
|
+ {
|
|
|
|
+ os << "NULL" << std::endl;
|
|
|
|
+ }
|
|
|
|
+ os << this->createEndTag( "vclassifier" ) << std::endl;
|
|
|
|
|
|
- // now, write all variables which might have changed over time compared to initial settings
|
|
|
|
|
|
|
|
os << this->createStartTag( "forbidden_classesTrain" ) << std::endl;
|
|
os << this->createStartTag( "forbidden_classesTrain" ) << std::endl;
|
|
os << "size: " << forbidden_classesTrain.size() << std::endl;
|
|
os << "size: " << forbidden_classesTrain.size() << std::endl;
|
|
@@ -1817,50 +2033,57 @@ void SemSegNovelty::store ( std::ostream & os, int format ) const
|
|
}
|
|
}
|
|
os << this->createEndTag( "classesInUse" ) << std::endl;
|
|
os << this->createEndTag( "classesInUse" ) << std::endl;
|
|
|
|
|
|
- //
|
|
|
|
|
|
|
|
- os << this->createStartTag( "classifier" ) << std::endl;
|
|
|
|
- if ( this->classifier != NULL )
|
|
|
|
- {
|
|
|
|
- os << "NOTNULL" << std::endl;
|
|
|
|
- classifier->store ( os, format );
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- os << "NULL" << std::endl;
|
|
|
|
- }
|
|
|
|
- os << this->createEndTag( "classifier" ) << std::endl;
|
|
|
|
|
|
+ os << this->createStartTag( "numberOfClasses" ) << std::endl;
|
|
|
|
+ os << this->numberOfClasses << std::endl;
|
|
|
|
+ os << this->createStartTag( "numberOfClasses" ) << std::endl;
|
|
|
|
|
|
- //
|
|
|
|
|
|
|
|
- os << this->createStartTag( "vclassifier" ) << std::endl;
|
|
|
|
- if ( this->classifier != NULL )
|
|
|
|
- {
|
|
|
|
- os << "NOTNULL" << std::endl;
|
|
|
|
- vclassifier->store ( os, format );
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- os << "NULL" << std::endl;
|
|
|
|
- }
|
|
|
|
- os << this->createEndTag( "vclassifier" ) << std::endl;
|
|
|
|
|
|
+ os << this->createStartTag( "read_classifier" ) << std::endl;
|
|
|
|
+ os << this->read_classifier << std::endl;
|
|
|
|
+ os << this->createStartTag( "read_classifier" ) << std::endl;
|
|
|
|
|
|
- //TODO
|
|
|
|
-/*
|
|
|
|
- os << this->createStartTag( "queriedRegions" ) << std::endl;
|
|
|
|
- os << "size: " << queriedRegions.size() << std::endl;
|
|
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "save_classifier" ) << std::endl;
|
|
|
|
+ os << this->save_classifier << std::endl;
|
|
|
|
+ os << this->createStartTag( "save_classifier" ) << std::endl;
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "cache" ) << std::endl;
|
|
|
|
+ os << this->cache << std::endl;
|
|
|
|
+ os << this->createStartTag( "cache" ) << std::endl;
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "resultdir" ) << std::endl;
|
|
|
|
+ os << this->resultdir << std::endl;
|
|
|
|
+ os << this->createStartTag( "resultdir" ) << std::endl;
|
|
|
|
|
|
- for ( std::map<std::string,std::set<int> >::const_iterator itQueriedReg = queriedRegions.begin();
|
|
|
|
- itQueriedReg != queriedRegions.end();
|
|
|
|
- itQueriedReg++
|
|
|
|
- )
|
|
|
|
- {
|
|
|
|
- os << *itForbClassTrain << " " << std::endl;
|
|
|
|
- }
|
|
|
|
- os << this->createEndTag( "queriedRegions" ) << std::endl; */
|
|
|
|
|
|
+ //TODO newTrainExamples
|
|
|
|
+
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // SEGMENTATION STUFF //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ //TODO regionSeg
|
|
|
|
+ //NOTE regionSeg seems really important to keep track off
|
|
|
|
+
|
|
|
|
+ os << this->createStartTag( "reuseSegmentation" ) << std::endl;
|
|
|
|
+ os << this->reuseSegmentation << std::endl;
|
|
|
|
+ os << this->createStartTag( "reuseSegmentation" ) << std::endl;
|
|
|
|
+
|
|
|
|
+ //TODO queriedRegions
|
|
|
|
+ //NOTE queriedRegions seems really important to keep track off
|
|
|
|
+ //std::map<std::string,std::set<int> >
|
|
|
|
+ //
|
|
|
|
+ //TODO currentRegionToQuery
|
|
|
|
|
|
- // store parent object
|
|
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ // PARENT OBJECT //
|
|
|
|
+ ///////////////////////////////
|
|
|
|
+ os << this->createStartTag( "SemSegNovelty--Parent" ) << std::endl;
|
|
SemanticSegmentation::store(os);
|
|
SemanticSegmentation::store(os);
|
|
|
|
+ os << this->createStartTag( "SemSegNovelty--Parent" ) << std::endl;
|
|
|
|
|
|
|
|
|
|
// done
|
|
// done
|