|
@@ -19,100 +19,121 @@ using namespace std;
|
|
|
using namespace NICE;
|
|
|
using namespace OBJREC;
|
|
|
|
|
|
-SemSegNovelty::SemSegNovelty ( const Config *conf,
|
|
|
+SemSegNovelty::SemSegNovelty ( )
|
|
|
+ : SemanticSegmentation ( )
|
|
|
+{
|
|
|
+ this->forbidden_classesTrain.clear();
|
|
|
+ this->forbidden_classesActiveLearning.clear();
|
|
|
+ this->classesInUse.clear();
|
|
|
+
|
|
|
+ this->globalMaxUncert = -numeric_limits<double>::max();
|
|
|
+
|
|
|
+ //we don't have queried any region so far
|
|
|
+ this->queriedRegions.clear();
|
|
|
+
|
|
|
+ this->featExtract = new LocalFeatureColorWeijer ();
|
|
|
+
|
|
|
+ // those two guys need to be NULL, since only one of them will be active later on
|
|
|
+ this->classifier = NULL;
|
|
|
+ this->vclassifier = NULL;
|
|
|
+
|
|
|
+ // this one here as well
|
|
|
+ this->regionSeg = NULL;
|
|
|
+}
|
|
|
+
|
|
|
+SemSegNovelty::SemSegNovelty ( const Config * _conf,
|
|
|
const MultiDataset *md )
|
|
|
- : SemanticSegmentation ( conf, & ( md->getClassNames ( "train" ) ) )
|
|
|
{
|
|
|
- this->conf = conf;
|
|
|
+ SemanticSegmentation::setClassNames ( & ( md->getClassNames ( "train" ) ) );
|
|
|
+
|
|
|
+ this->initFromConfig( _conf );
|
|
|
+}
|
|
|
|
|
|
- globalMaxUncert = -numeric_limits<double>::max();
|
|
|
+SemSegNovelty::~SemSegNovelty()
|
|
|
+{
|
|
|
+ if(newTrainExamples.size() > 0)
|
|
|
+ {
|
|
|
+ // show most uncertain region
|
|
|
+ if (b_visualizeALimages)
|
|
|
+ showImage(maskedImg);
|
|
|
+
|
|
|
+ //incorporate new information into the classifier
|
|
|
+ if (classifier != NULL)
|
|
|
+ {
|
|
|
+ //NOTE dangerous!
|
|
|
+ classifier->addMultipleExamples(newTrainExamples);
|
|
|
+ }
|
|
|
+
|
|
|
+ //store the classifier, such that we can read it again in the next round (if we like that)
|
|
|
+ classifier->save ( cache + "/classifier.data" );
|
|
|
+ }
|
|
|
|
|
|
- string section = "SemSegNovelty";
|
|
|
+ // clean-up
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // FEATURE EXTRACTION //
|
|
|
+ ///////////////////////////////
|
|
|
+ if ( featExtract != NULL )
|
|
|
+ delete featExtract;
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // CLASSIFICATION STUFF //
|
|
|
+ ///////////////////////////////
|
|
|
+ if ( classifier != NULL )
|
|
|
+ delete classifier;
|
|
|
+ if ( vclassifier != NULL )
|
|
|
+ delete vclassifier;
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // SEGMENTATION STUFF //
|
|
|
+ ///////////////////////////////
|
|
|
+ if ( this->regionSeg != NULL )
|
|
|
+ delete this->regionSeg;
|
|
|
+}
|
|
|
|
|
|
- featExtract = new LocalFeatureColorWeijer ( conf );
|
|
|
+void SemSegNovelty::initFromConfig(const Config* conf, const string _confSection)
|
|
|
+{
|
|
|
+ //first of all, call method of parent object
|
|
|
+ SemanticSegmentation::initFromConfig( conf );
|
|
|
+
|
|
|
+ featExtract->initFromConfig ( conf );
|
|
|
|
|
|
- this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
|
|
|
- this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
|
|
|
- this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
|
|
|
+ //save and read segmentation results from files
|
|
|
+ this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true );
|
|
|
+ //save the classifier to a file
|
|
|
+ this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true );
|
|
|
+ //read the classifier from a file
|
|
|
+ this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false );
|
|
|
|
|
|
//write uncertainty results in the same folder as done for the segmentation results
|
|
|
resultdir = conf->gS("debug", "resultdir", "result");
|
|
|
cache = conf->gS ( "cache", "root", "" );
|
|
|
|
|
|
-
|
|
|
- //stupid work around of the const attribute
|
|
|
- Config confCopy = *conf;
|
|
|
-
|
|
|
- //just to make sure, that we do NOT perform an optimization after every iteration step
|
|
|
- //this would just take a lot of time, which is not desired so far
|
|
|
- confCopy.sB("ClassifierGPHIK","performOptimizationAfterIncrement",false);
|
|
|
-
|
|
|
- classifierString = conf->gS ( section, "classifier", "ClassifierGPHIK" );
|
|
|
- classifier = NULL;
|
|
|
- vclassifier = NULL;
|
|
|
- if ( classifierString.compare("ClassifierGPHIK") == 0)
|
|
|
- classifier = new GPHIKClassifierNICE ( &confCopy, "ClassifierGPHIK" );
|
|
|
- else
|
|
|
- vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
|
|
|
-
|
|
|
-
|
|
|
|
|
|
- findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
|
|
|
- whs = conf->gI ( section, "window_size", 10 );
|
|
|
+ this->findMaximumUncert = conf->gB(_confSection, "findMaximumUncert", true);
|
|
|
+ this->whs = conf->gI ( _confSection, "window_size", 10 );
|
|
|
//distance to next descriptor during training
|
|
|
- trainWsize = conf->gI ( section, "train_window_size", 10 );
|
|
|
+ this->trainWsize = conf->gI ( _confSection, "train_window_size", 10 );
|
|
|
//distance to next descriptor during testing
|
|
|
- testWSize = conf->gI (section, "test_window_size", 10);
|
|
|
+ this->testWSize = conf->gI (_confSection, "test_window_size", 10);
|
|
|
// select your segmentation method here
|
|
|
- string rsMethode = conf->gS ( section, "segmentation", "none" );
|
|
|
+ this->s_rsMethode = conf->gS ( _confSection, "segmentation", "none" );
|
|
|
|
|
|
- if(rsMethode == "none")
|
|
|
+ if( this->s_rsMethode == "none" )
|
|
|
{
|
|
|
regionSeg = NULL;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
- RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod(conf, rsMethode);
|
|
|
+ RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod( conf, this->s_rsMethode );
|
|
|
if ( reuseSegmentation )
|
|
|
regionSeg = new RSCache ( conf, tmpRegionSeg );
|
|
|
else
|
|
|
regionSeg = tmpRegionSeg;
|
|
|
}
|
|
|
|
|
|
- cn = md->getClassNames ( "train" );
|
|
|
-
|
|
|
- if ( read_classifier )
|
|
|
- {
|
|
|
- try
|
|
|
- {
|
|
|
- if ( classifier != NULL )
|
|
|
- {
|
|
|
- string classifierdst = "/classifier.data";
|
|
|
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
- classifier->read ( cache + classifierdst );
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- string classifierdst = "/veccl.data";
|
|
|
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
- vclassifier->read ( cache + classifierdst );
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
- fprintf ( stderr, "SemSegNovelty:: successfully read\n" );
|
|
|
- }
|
|
|
- catch ( char *str )
|
|
|
- {
|
|
|
- cerr << "error reading data: " << str << endl;
|
|
|
- }
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- train ( md );
|
|
|
- }
|
|
|
-
|
|
|
//define which measure for "novelty" we want to use
|
|
|
- noveltyMethodString = conf->gS( section, "noveltyMethod", "gp-variance");
|
|
|
+ noveltyMethodString = conf->gS( _confSection, "noveltyMethod", "gp-variance");
|
|
|
if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
|
|
|
{
|
|
|
this->noveltyMethod = GPVARIANCE;
|
|
@@ -161,36 +182,37 @@ SemSegNovelty::SemSegNovelty ( const Config *conf,
|
|
|
this->mostNoveltyWithMaxScores = true;
|
|
|
}
|
|
|
|
|
|
- //we don't have queried any region so far
|
|
|
- queriedRegions.clear();
|
|
|
- visualizeALimages = conf->gB(section, "visualizeALimages", false);
|
|
|
-}
|
|
|
-
|
|
|
-SemSegNovelty::~SemSegNovelty()
|
|
|
-{
|
|
|
- if(newTrainExamples.size() > 0)
|
|
|
- {
|
|
|
- // show most uncertain region
|
|
|
- if (visualizeALimages)
|
|
|
- showImage(maskedImg);
|
|
|
-
|
|
|
- //incorporate new information into the classifier
|
|
|
- if (classifier != NULL)
|
|
|
- classifier->addMultipleExamples(newTrainExamples);
|
|
|
-
|
|
|
- //store the classifier, such that we can read it again in the next round (if we like that)
|
|
|
- classifier->save ( cache + "/classifier.data" );
|
|
|
+ b_visualizeALimages = conf->gB(_confSection, "visualizeALimages", false);
|
|
|
+
|
|
|
+
|
|
|
+ classifierString = conf->gS ( _confSection, "classifier", "GPHIKClassifier" );
|
|
|
+ classifier = NULL;
|
|
|
+ vclassifier = NULL;
|
|
|
+ if ( classifierString.compare("GPHIKClassifier") == 0)
|
|
|
+ {
|
|
|
+ //just to make sure, that we do NOT perform an optimization after every iteration step
|
|
|
+ //this would just take a lot of time, which is not desired so far
|
|
|
+ //TODO edit this!
|
|
|
+ //this->conf->sB( "GPHIKClassifier", "performOptimizationAfterIncrement", false );
|
|
|
+ classifier = new GPHIKClassifierNICE ( conf, "GPHIKClassifier" );
|
|
|
}
|
|
|
+ else
|
|
|
+ vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
|
|
|
|
|
|
- // clean-up
|
|
|
- if ( classifier != NULL )
|
|
|
- delete classifier;
|
|
|
- if ( vclassifier != NULL )
|
|
|
- delete vclassifier;
|
|
|
- if ( featExtract != NULL )
|
|
|
- delete featExtract;
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ //check the same thing for the training classes - this is very specific to our setup
|
|
|
+ std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
|
|
|
+ if ( forbidden_classesTrain_s == "" )
|
|
|
+ {
|
|
|
+ forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
|
|
|
+ }
|
|
|
+ this->classNames->getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
|
|
|
}
|
|
|
|
|
|
+
|
|
|
+
|
|
|
void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix ®ions, int region, NICE::ColorImage &outimage)
|
|
|
{
|
|
|
std::vector<uchar> color;
|
|
@@ -227,202 +249,221 @@ void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Mat
|
|
|
|
|
|
void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
{
|
|
|
- const LabeledSet train = * ( *md ) ["train"];
|
|
|
- const LabeledSet *trainp = &train;
|
|
|
-
|
|
|
- ////////////////////////
|
|
|
- // feature extraction //
|
|
|
- ////////////////////////
|
|
|
-
|
|
|
- //check the same thing for the training classes - this is very specific to our setup
|
|
|
- std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
|
|
|
- if ( forbidden_classesTrain_s == "" )
|
|
|
+ if ( this->read_classifier )
|
|
|
{
|
|
|
- forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
|
|
|
- }
|
|
|
- cn.getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
|
|
|
-
|
|
|
-
|
|
|
- ProgressBar pb ( "Local Feature Extraction" );
|
|
|
- pb.show();
|
|
|
-
|
|
|
- int imgnb = 0;
|
|
|
-
|
|
|
- Examples examples;
|
|
|
- examples.filename = "training";
|
|
|
+ try
|
|
|
+ {
|
|
|
+ if ( this->classifier != NULL )
|
|
|
+ {
|
|
|
+ string classifierdst = "/classifier.data";
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
+ classifier->read ( cache + classifierdst );
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ string classifierdst = "/veccl.data";
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
+ vclassifier->read ( cache + classifierdst );
|
|
|
+ }
|
|
|
+
|
|
|
|
|
|
- int featdim = -1;
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: successfully read\n" );
|
|
|
+ }
|
|
|
+ catch ( char *str )
|
|
|
+ {
|
|
|
+ cerr << "error reading data: " << str << endl;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ const LabeledSet train = * ( *md ) ["train"];
|
|
|
+ const LabeledSet *trainp = &train;
|
|
|
|
|
|
- classesInUse.clear();
|
|
|
+ ////////////////////////
|
|
|
+ // feature extraction //
|
|
|
+ ////////////////////////
|
|
|
|
|
|
- LOOP_ALL_S ( *trainp )
|
|
|
- {
|
|
|
- //EACH_S(classno, currentFile);
|
|
|
- EACH_INFO ( classno, info );
|
|
|
+ ProgressBar pb ( "Local Feature Extraction" );
|
|
|
+ pb.show();
|
|
|
+
|
|
|
+ int imgnb = 0;
|
|
|
|
|
|
- std::string currentFile = info.img();
|
|
|
+ Examples examples;
|
|
|
+ examples.filename = "training";
|
|
|
|
|
|
- CachedExample *ce = new CachedExample ( currentFile );
|
|
|
+ int featdim = -1;
|
|
|
|
|
|
- const LocalizationResult *locResult = info.localization();
|
|
|
- if ( locResult->size() <= 0 )
|
|
|
+ classesInUse.clear();
|
|
|
+
|
|
|
+ LOOP_ALL_S ( *trainp )
|
|
|
{
|
|
|
- fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
|
|
|
- currentFile.c_str() );
|
|
|
- continue;
|
|
|
- }
|
|
|
+ //EACH_S(classno, currentFile);
|
|
|
+ EACH_INFO ( classno, info );
|
|
|
|
|
|
- int xsize, ysize;
|
|
|
- ce->getImageSize ( xsize, ysize );
|
|
|
+ std::string currentFile = info.img();
|
|
|
|
|
|
- Image labels ( xsize, ysize );
|
|
|
- labels.set ( 0 );
|
|
|
- locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
|
|
|
+ CachedExample *ce = new CachedExample ( currentFile );
|
|
|
|
|
|
- NICE::ColorImage img;
|
|
|
- try {
|
|
|
- img = ColorImage ( currentFile );
|
|
|
- } catch ( Exception ) {
|
|
|
- cerr << "SemSegNovelty: error opening image file <" << currentFile << ">" << endl;
|
|
|
- continue;
|
|
|
- }
|
|
|
+ const LocalizationResult *locResult = info.localization();
|
|
|
+ if ( locResult->size() <= 0 )
|
|
|
+ {
|
|
|
+ fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
|
|
|
+ currentFile.c_str() );
|
|
|
+ continue;
|
|
|
+ }
|
|
|
|
|
|
- Globals::setCurrentImgFN ( currentFile );
|
|
|
+ int xsize, ysize;
|
|
|
+ ce->getImageSize ( xsize, ysize );
|
|
|
|
|
|
- MultiChannelImageT<double> feats;
|
|
|
+ Image labels ( xsize, ysize );
|
|
|
+ labels.set ( 0 );
|
|
|
+ locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
|
|
|
|
|
|
- // extract features
|
|
|
- featExtract->getFeats ( img, feats );
|
|
|
- featdim = feats.channels();
|
|
|
- feats.addChannel(featdim);
|
|
|
+ NICE::ColorImage img;
|
|
|
+ try {
|
|
|
+ img = ColorImage ( currentFile );
|
|
|
+ } catch ( Exception ) {
|
|
|
+ cerr << "SemSegNovelty: error opening image file <" << currentFile << ">" << endl;
|
|
|
+ continue;
|
|
|
+ }
|
|
|
|
|
|
- for (int c = 0; c < featdim; c++)
|
|
|
- {
|
|
|
- ImageT<double> tmp = feats[c];
|
|
|
- ImageT<double> tmp2 = feats[c+featdim];
|
|
|
+ Globals::setCurrentImgFN ( currentFile );
|
|
|
|
|
|
- NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
|
|
|
- }
|
|
|
- featdim += featdim;
|
|
|
+ MultiChannelImageT<double> feats;
|
|
|
|
|
|
- // compute integral images
|
|
|
- for ( int c = 0; c < featdim; c++ )
|
|
|
- {
|
|
|
- feats.calcIntegral ( c );
|
|
|
- }
|
|
|
+ // extract features
|
|
|
+ featExtract->getFeats ( img, feats );
|
|
|
+ featdim = feats.channels();
|
|
|
+ feats.addChannel(featdim);
|
|
|
|
|
|
- for ( int y = 0; y < ysize; y += trainWsize)
|
|
|
- {
|
|
|
- for ( int x = 0; x < xsize; x += trainWsize )
|
|
|
+ for (int c = 0; c < featdim; c++)
|
|
|
{
|
|
|
+ ImageT<double> tmp = feats[c];
|
|
|
+ ImageT<double> tmp2 = feats[c+featdim];
|
|
|
|
|
|
- int classnoTmp = labels.getPixel ( x, y );
|
|
|
-
|
|
|
- if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
|
|
|
- {
|
|
|
- continue;
|
|
|
- }
|
|
|
-
|
|
|
- if (classesInUse.find(classnoTmp) == classesInUse.end())
|
|
|
- {
|
|
|
- classesInUse.insert(classnoTmp);
|
|
|
- }
|
|
|
-
|
|
|
- Example example;
|
|
|
- example.vec = NULL;
|
|
|
- example.svec = new SparseVector ( featdim );
|
|
|
- for ( int f = 0; f < featdim; f++ )
|
|
|
- {
|
|
|
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
|
- if ( val > 1e-10 )
|
|
|
- ( *example.svec ) [f] = val;
|
|
|
- }
|
|
|
-
|
|
|
- example.svec->normalize();
|
|
|
-
|
|
|
- example.position = imgnb;
|
|
|
- examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
|
|
|
+ NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
|
|
|
+ }
|
|
|
+ featdim += featdim;
|
|
|
|
|
|
+ // compute integral images
|
|
|
+ for ( int c = 0; c < featdim; c++ )
|
|
|
+ {
|
|
|
+ feats.calcIntegral ( c );
|
|
|
}
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
|
|
|
- delete ce;
|
|
|
- imgnb++;
|
|
|
- pb.update ( trainp->count() );
|
|
|
- }
|
|
|
+ for ( int y = 0; y < ysize; y += trainWsize)
|
|
|
+ {
|
|
|
+ for ( int x = 0; x < xsize; x += trainWsize )
|
|
|
+ {
|
|
|
+
|
|
|
+ int classnoTmp = labels.getPixel ( x, y );
|
|
|
+
|
|
|
+ if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
|
|
|
+ {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (classesInUse.find(classnoTmp) == classesInUse.end())
|
|
|
+ {
|
|
|
+ classesInUse.insert(classnoTmp);
|
|
|
+ }
|
|
|
+
|
|
|
+ Example example;
|
|
|
+ example.vec = NULL;
|
|
|
+ example.svec = new SparseVector ( featdim );
|
|
|
+ for ( int f = 0; f < featdim; f++ )
|
|
|
+ {
|
|
|
+ double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
|
+ if ( val > 1e-10 )
|
|
|
+ ( *example.svec ) [f] = val;
|
|
|
+ }
|
|
|
+
|
|
|
+ example.svec->normalize();
|
|
|
+
|
|
|
+ example.position = imgnb;
|
|
|
+ examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
|
|
|
+
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ delete ce;
|
|
|
+ imgnb++;
|
|
|
+ pb.update ( trainp->count() );
|
|
|
+ }
|
|
|
|
|
|
- numberOfClasses = classesInUse.size();
|
|
|
- std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
|
|
|
- std::cerr << "classes in use: " << std::endl;
|
|
|
- for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
|
|
|
- {
|
|
|
- std::cerr << *it << " ";
|
|
|
- }
|
|
|
- std::cerr << std::endl;
|
|
|
+
|
|
|
+ numberOfClasses = classesInUse.size();
|
|
|
+ std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
|
|
|
+ std::cerr << "classes in use: " << std::endl;
|
|
|
+ for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
|
|
|
+ {
|
|
|
+ std::cerr << *it << " ";
|
|
|
+ }
|
|
|
+ std::cerr << std::endl;
|
|
|
|
|
|
- pb.hide();
|
|
|
+ pb.hide();
|
|
|
|
|
|
|
|
|
- //////////////////////
|
|
|
- // train classifier //
|
|
|
- //////////////////////
|
|
|
- FeaturePool fp;
|
|
|
+ //////////////////////
|
|
|
+ // train classifier //
|
|
|
+ //////////////////////
|
|
|
+ FeaturePool fp;
|
|
|
|
|
|
- Feature *f = new SparseVectorFeature ( featdim );
|
|
|
+ Feature *f = new SparseVectorFeature ( featdim );
|
|
|
|
|
|
- f->explode ( fp );
|
|
|
- delete f;
|
|
|
+ f->explode ( fp );
|
|
|
+ delete f;
|
|
|
|
|
|
- if ( classifier != NULL )
|
|
|
- {
|
|
|
- std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
|
|
|
- classifier->train ( fp, examples );
|
|
|
- std::cerr << "training finished" << std::endl;
|
|
|
- }
|
|
|
- else
|
|
|
- {
|
|
|
- LabeledSetVector lvec;
|
|
|
- convertExamplesToLSet ( examples, lvec );
|
|
|
- vclassifier->teach ( lvec );
|
|
|
-// if ( usegmm )
|
|
|
-// convertLSetToSparseExamples ( examples, lvec );
|
|
|
-// else
|
|
|
- std::cerr << "classifierString: " << classifierString << std::endl;
|
|
|
- if (this->classifierString.compare("nn") == 0)
|
|
|
+ if ( classifier != NULL )
|
|
|
{
|
|
|
- convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
|
|
|
+ std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
|
|
|
+ classifier->train ( fp, examples );
|
|
|
+ std::cerr << "training finished" << std::endl;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
- convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
|
|
|
- }
|
|
|
- vclassifier->finishTeaching();
|
|
|
- }
|
|
|
+ LabeledSetVector lvec;
|
|
|
+ convertExamplesToLSet ( examples, lvec );
|
|
|
+ vclassifier->teach ( lvec );
|
|
|
+ // if ( usegmm )
|
|
|
+ // convertLSetToSparseExamples ( examples, lvec );
|
|
|
+ // else
|
|
|
+ std::cerr << "classifierString: " << classifierString << std::endl;
|
|
|
+ if (this->classifierString.compare("nn") == 0)
|
|
|
+ {
|
|
|
+ convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
|
|
|
+ }
|
|
|
+ vclassifier->finishTeaching();
|
|
|
+ }
|
|
|
|
|
|
- fp.destroy();
|
|
|
+ fp.destroy();
|
|
|
|
|
|
- if ( save_classifier )
|
|
|
- {
|
|
|
- if ( classifier != NULL )
|
|
|
- classifier->save ( cache + "/classifier.data" );
|
|
|
- else
|
|
|
- vclassifier->save ( cache + "/veccl.data" );
|
|
|
- }
|
|
|
+ if ( save_classifier )
|
|
|
+ {
|
|
|
+ if ( classifier != NULL )
|
|
|
+ classifier->save ( cache + "/classifier.data" );
|
|
|
+ else
|
|
|
+ vclassifier->save ( cache + "/veccl.data" );
|
|
|
+ }
|
|
|
|
|
|
- ////////////
|
|
|
- //clean up//
|
|
|
- ////////////
|
|
|
- for ( int i = 0; i < ( int ) examples.size(); i++ )
|
|
|
- {
|
|
|
- examples[i].second.clean();
|
|
|
- }
|
|
|
- examples.clear();
|
|
|
+ ////////////
|
|
|
+ //clean up//
|
|
|
+ ////////////
|
|
|
+ for ( int i = 0; i < ( int ) examples.size(); i++ )
|
|
|
+ {
|
|
|
+ examples[i].second.clean();
|
|
|
+ }
|
|
|
+ examples.clear();
|
|
|
|
|
|
- cerr << "SemSeg training finished" << endl;
|
|
|
+ cerr << "SemSeg training finished" << endl;
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
|
|
@@ -433,7 +474,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
|
|
|
//segResult contains the GT labels when this method is called
|
|
|
// we simply store them in labels, to have an easy access to the GT information lateron
|
|
|
- Image labels = segresult;
|
|
|
+ NICE::Image labels = segresult;
|
|
|
//just to be sure that we do not have a GT-biased result :)
|
|
|
segresult.set(0);
|
|
|
|
|
@@ -445,7 +486,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
int xsize, ysize;
|
|
|
ce->getImageSize ( xsize, ysize );
|
|
|
|
|
|
- probabilities.reInit( xsize, ysize, cn.getMaxClassno() + 1);
|
|
|
+ probabilities.reInit( xsize, ysize, this->classNames->getMaxClassno() + 1);
|
|
|
probabilities.setAll ( 0.0 );
|
|
|
|
|
|
NICE::ColorImage img;
|
|
@@ -545,7 +586,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
timer.stop();
|
|
|
std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
|
|
|
|
|
|
- if (visualizeALimages)
|
|
|
+ if (b_visualizeALimages)
|
|
|
{
|
|
|
ColorImage imgrgbTmp (xsize, ysize);
|
|
|
ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
|
|
@@ -689,7 +730,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
newTrainExamples = examples;
|
|
|
globalMaxUncert = maxNoveltyScore;
|
|
|
//prepare for later visualization
|
|
|
-// if (visualizeALimages)
|
|
|
+// if (b_visualizeALimages)
|
|
|
visualizeRegion(img,mask,maxUncertRegion,maskedImg);
|
|
|
}
|
|
|
else
|
|
@@ -737,7 +778,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
|
|
|
noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
|
|
|
|
|
|
- if (visualizeALimages)
|
|
|
+ if (b_visualizeALimages)
|
|
|
{
|
|
|
ICETools::convertToRGB ( noveltyImage, imgrgb );
|
|
|
showImage(imgrgb, "Novelty Image");
|
|
@@ -944,7 +985,10 @@ void SemSegNovelty::computeNoveltyByGPUncertainty( NICE::FloatImage & noveltyIm
|
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
|
{
|
|
|
|
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
+ double gpNoise = 0.01;
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
+
|
|
|
|
|
|
#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1016,8 +1060,10 @@ void SemSegNovelty::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
|
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
|
{
|
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
-
|
|
|
+ double gpNoise = 0.01;
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
+
|
|
|
#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
|
{
|
|
@@ -1081,7 +1127,9 @@ void SemSegNovelty::computeNoveltyByGPMeanRatio( NICE::FloatImage & noveltyImag
|
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
|
{
|
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
+ double gpNoise = 0.01;
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1159,7 +1207,9 @@ void SemSegNovelty::computeNoveltyByGPWeightAll( NICE::FloatImage & noveltyImag
|
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
|
{
|
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
+ double gpNoise = 0.01;
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1268,7 +1318,9 @@ void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyIm
|
|
|
NICE::MultiChannelImageT<double> & probabilities,
|
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
|
{
|
|
|
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
+ double gpNoise = 0.01;
|
|
|
+ //TODO getMethod for GPHIK
|
|
|
+ //conf->gD("GPHIK", "noise", 0.01);
|
|
|
|
|
|
#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
@@ -1385,7 +1437,7 @@ void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyIm
|
|
|
}
|
|
|
|
|
|
|
|
|
-void SemSegNovelty::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
|
|
|
+void SemSegNovelty::addNewExample(const NICE::Vector& v_newExample, const int & newClassNo)
|
|
|
{
|
|
|
//accept the new class as valid information
|
|
|
if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
|
|
@@ -1402,13 +1454,19 @@ void SemSegNovelty::addNewExample(const NICE::Vector& newExample, const int & ne
|
|
|
//then add it to the classifier used
|
|
|
if ( classifier != NULL )
|
|
|
{
|
|
|
- //TODO
|
|
|
+ if (this->classifierString.compare("GPHIKClassifier") == 0)
|
|
|
+ {
|
|
|
+ Example newExample;
|
|
|
+ SparseVector svec ( v_newExample );
|
|
|
+ newExample.svec = &svec;
|
|
|
+ static_cast<GPHIKClassifierNICE*>(classifier)->addExample ( newExample, newClassNo );
|
|
|
+ }
|
|
|
}
|
|
|
else //vclassifier
|
|
|
{
|
|
|
if (this->classifierString.compare("nn") == 0)
|
|
|
{
|
|
|
- vclassifier->teach ( newClassNo, newExample );
|
|
|
+ vclassifier->teach ( newClassNo, v_newExample );
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@@ -1419,7 +1477,7 @@ void SemSegNovelty::addNovelExamples()
|
|
|
Timer timer;
|
|
|
|
|
|
//show the image that contains the most novel region
|
|
|
- if (visualizeALimages)
|
|
|
+ if (b_visualizeALimages)
|
|
|
showImage(maskedImg, "Most novel region");
|
|
|
|
|
|
timer.start();
|
|
@@ -1470,7 +1528,7 @@ void SemSegNovelty::addNovelExamples()
|
|
|
//then add the new features to the classifier used
|
|
|
if ( classifier != NULL )
|
|
|
{
|
|
|
- if (this->classifierString.compare("ClassifierGPHIK") == 0)
|
|
|
+ if (this->classifierString.compare("GPHIKClassifier") == 0)
|
|
|
{
|
|
|
classifier->addMultipleExamples ( this->newTrainExamples );
|
|
|
}
|
|
@@ -1517,3 +1575,632 @@ const Examples * SemSegNovelty::getNovelExamples() const
|
|
|
{
|
|
|
return &(this->newTrainExamples);
|
|
|
}
|
|
|
+
|
|
|
+///////////////////// INTERFACE PERSISTENT /////////////////////
|
|
|
+// interface specific methods for store and restore
|
|
|
+///////////////////// INTERFACE PERSISTENT /////////////////////
|
|
|
+
|
|
|
+void SemSegNovelty::restore ( std::istream & is, int format )
|
|
|
+{
|
|
|
+ //delete everything we knew so far...
|
|
|
+ this->clear();
|
|
|
+
|
|
|
+ bool b_restoreVerbose ( false );
|
|
|
+#ifdef B_RESTOREVERBOSE
|
|
|
+ b_restoreVerbose = true;
|
|
|
+#endif
|
|
|
+
|
|
|
+ if ( is.good() )
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " restore SemSegNovelty" << std::endl;
|
|
|
+
|
|
|
+ std::string tmp;
|
|
|
+ is >> tmp; //class name
|
|
|
+
|
|
|
+ if ( ! this->isStartTag( tmp, "SemSegNovelty" ) )
|
|
|
+ {
|
|
|
+ std::cerr << " WARNING - attempt to restore SemSegNovelty, but start flag " << tmp << " does not match! Aborting... " << std::endl;
|
|
|
+ throw;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (classifier != NULL)
|
|
|
+ {
|
|
|
+ delete classifier;
|
|
|
+ classifier = NULL;
|
|
|
+ }
|
|
|
+
|
|
|
+ is.precision (numeric_limits<double>::digits10 + 1);
|
|
|
+
|
|
|
+ bool b_endOfBlock ( false ) ;
|
|
|
+
|
|
|
+ while ( !b_endOfBlock )
|
|
|
+ {
|
|
|
+ is >> tmp; // start of block
|
|
|
+
|
|
|
+ if ( this->isEndTag( tmp, "SemSegNovelty" ) )
|
|
|
+ {
|
|
|
+ b_endOfBlock = true;
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+
|
|
|
+ tmp = this->removeStartTag ( tmp );
|
|
|
+
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " currently restore section " << tmp << " in SemSegNovelty" << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // FEATURE EXTRACTION //
|
|
|
+ ///////////////////////////////
|
|
|
+ if ( tmp.compare("featExtract") == 0 )
|
|
|
+ {
|
|
|
+ featExtract->restore(is, format);
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("trainWsize") == 0 )
|
|
|
+ {
|
|
|
+ is >> trainWsize;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("whs") == 0 )
|
|
|
+ {
|
|
|
+ is >> whs;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("testWSize") == 0 )
|
|
|
+ {
|
|
|
+ is >> testWSize;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ ///////////////////////////////
|
|
|
+ // NOVELTY COMPUTATION //
|
|
|
+ ///////////////////////////////
|
|
|
+ else if ( tmp.compare("noveltyMethod") == 0 )
|
|
|
+ {
|
|
|
+ unsigned int ui_noveltyMethod;
|
|
|
+ is >> ui_noveltyMethod;
|
|
|
+ this->noveltyMethod = static_cast<NoveltyMethod> ( ui_noveltyMethod );
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("noveltyMethodString") == 0 )
|
|
|
+ {
|
|
|
+ is >> noveltyMethodString;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("globalMaxUncert") == 0 )
|
|
|
+ {
|
|
|
+ is >> globalMaxUncert;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("mostNoveltyWithMaxScores") == 0 )
|
|
|
+ {
|
|
|
+ is >> mostNoveltyWithMaxScores;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("findMaximumUncert") == 0 )
|
|
|
+ {
|
|
|
+ is >> findMaximumUncert;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ //TODO maskedImg
|
|
|
+ else if ( tmp.compare("b_visualizeALimages") == 0 )
|
|
|
+ {
|
|
|
+ is >> b_visualizeALimages;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ ///////////////////////////////
|
|
|
+ // CLASSIFICATION STUFF //
|
|
|
+ ///////////////////////////////
|
|
|
+ else if ( tmp.compare("classifier") == 0 )
|
|
|
+ {
|
|
|
+ std::string isNull;
|
|
|
+ is >> isNull;
|
|
|
+
|
|
|
+ // check whether we originally used a classifier
|
|
|
+ if ( isNull.compare( "NULL" ) == 0 )
|
|
|
+ {
|
|
|
+ if ( classifier != NULL )
|
|
|
+ delete classifier;
|
|
|
+ classifier = NULL;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ if ( classifier == NULL )
|
|
|
+ classifier = new OBJREC::GPHIKClassifierNICE();
|
|
|
+
|
|
|
+ classifier->restore(is, format);
|
|
|
+ }
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("vclassifier") == 0 )
|
|
|
+ {
|
|
|
+ std::string isNull;
|
|
|
+ is >> isNull;
|
|
|
+
|
|
|
+ // check whether we originally used a vclassifier
|
|
|
+ if ( isNull.compare( "NULL" ) == 0 )
|
|
|
+ {
|
|
|
+ if ( vclassifier != NULL )
|
|
|
+ delete vclassifier;
|
|
|
+ vclassifier = NULL;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ fthrow ( NICE::Exception, "Restoring of VecClassifiers is not implemented yet!" );
|
|
|
+/* if ( vclassifier == NULL )
|
|
|
+ vclassifier = new OBJREC::VecClassifier();
|
|
|
+
|
|
|
+ vclassifier->restore(is, format); */
|
|
|
+ }
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("forbidden_classesTrain") == 0 )
|
|
|
+ {
|
|
|
+ is >> tmp; // size
|
|
|
+ int forbClTrainSize ( 0 );
|
|
|
+ is >> forbClTrainSize;
|
|
|
+
|
|
|
+ forbidden_classesTrain.clear();
|
|
|
+
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << "restore forbidden_classesTrain with size: " << forbClTrainSize << std::endl;
|
|
|
+
|
|
|
+ if ( forbClTrainSize > 0 )
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " restore forbidden_classesTrain" << std::endl;
|
|
|
+
|
|
|
+ for (int i = 0; i < forbClTrainSize; i++)
|
|
|
+ {
|
|
|
+ int classNo;
|
|
|
+ is >> classNo;
|
|
|
+ forbidden_classesTrain.insert ( classNo );
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " skip restoring forbidden_classesTrain" << std::endl;
|
|
|
+ }
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("forbidden_classesActiveLearning") == 0 )
|
|
|
+ {
|
|
|
+ is >> tmp; // size
|
|
|
+ int forbClALSize ( 0 );
|
|
|
+ is >> forbClALSize;
|
|
|
+
|
|
|
+ forbidden_classesActiveLearning.clear();
|
|
|
+
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << "restore forbidden_classesActiveLearning with size: " << forbClALSize << std::endl;
|
|
|
+
|
|
|
+ if ( forbClALSize > 0 )
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " restore forbidden_classesActiveLearning" << std::endl;
|
|
|
+
|
|
|
+ for (int i = 0; i < forbClALSize; i++)
|
|
|
+ {
|
|
|
+ int classNo;
|
|
|
+ is >> classNo;
|
|
|
+ forbidden_classesActiveLearning.insert ( classNo );
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " skip restoring forbidden_classesActiveLearning" << std::endl;
|
|
|
+ }
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("classesInUse") == 0 )
|
|
|
+ {
|
|
|
+ is >> tmp; // size
|
|
|
+ int clInUseSize ( 0 );
|
|
|
+ is >> clInUseSize;
|
|
|
+
|
|
|
+ classesInUse.clear();
|
|
|
+
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << "restore classesInUse with size: " << clInUseSize << std::endl;
|
|
|
+
|
|
|
+ if ( clInUseSize > 0 )
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " restore classesInUse" << std::endl;
|
|
|
+
|
|
|
+ for (int i = 0; i < clInUseSize; i++)
|
|
|
+ {
|
|
|
+ int classNo;
|
|
|
+ is >> classNo;
|
|
|
+ classesInUse.insert ( classNo );
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << " skip restoring classesInUse" << std::endl;
|
|
|
+ }
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("numberOfClasses") == 0 )
|
|
|
+ {
|
|
|
+ is >> numberOfClasses;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("read_classifier") == 0 )
|
|
|
+ {
|
|
|
+ is >> read_classifier;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("save_classifier") == 0 )
|
|
|
+ {
|
|
|
+ is >> save_classifier;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("cache") == 0 )
|
|
|
+ {
|
|
|
+ is >> cache;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("resultdir") == 0 )
|
|
|
+ {
|
|
|
+ is >> resultdir;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ //TODO newTrainExamples
|
|
|
+ ///////////////////////////////
|
|
|
+ // SEGMENTATION STUFF //
|
|
|
+ ///////////////////////////////
|
|
|
+ //TODO regionSeg
|
|
|
+ else if ( tmp.compare("s_rsMethode") == 0 )
|
|
|
+ {
|
|
|
+ is >> this->s_rsMethode;
|
|
|
+ // theoretically, we should properly store and restore the regionSeg object. However, its parent class does not provide
|
|
|
+ // a Persistent interface yet. Hence, we perform this tiny workaround which works, since regionSeg is not changed over time...
|
|
|
+ // only be aware of parameters originally set via config...
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ //NOTE regionSeg seems really important to keep track off
|
|
|
+ else if ( tmp.compare("reuseSegmentation") == 0 )
|
|
|
+ {
|
|
|
+ is >> reuseSegmentation;
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ else if ( tmp.compare("queriedRegions") == 0 )
|
|
|
+ {
|
|
|
+ is >> tmp; // size
|
|
|
+ int queriedRegionsSize ( 0 );
|
|
|
+ is >> queriedRegionsSize;
|
|
|
+ queriedRegions.clear();
|
|
|
+
|
|
|
+ if ( b_restoreVerbose )
|
|
|
+ std::cerr << "restore queriedRegions with size: " << queriedRegionsSize << std::endl;
|
|
|
+ for ( int i = 0; i < queriedRegionsSize; i++ )
|
|
|
+ {
|
|
|
+ // restore key
|
|
|
+ std::string key;
|
|
|
+ is >> key;
|
|
|
+
|
|
|
+ // restore values -- inner loop over sets
|
|
|
+ is >> tmp; // size
|
|
|
+ int regionsOfImgSize ( 0 );
|
|
|
+ is >> regionsOfImgSize;
|
|
|
+
|
|
|
+ std::set< int > regionsOfImg;
|
|
|
+ regionsOfImg.clear();
|
|
|
+
|
|
|
+ for (int i = 0; i < regionsOfImgSize; i++)
|
|
|
+ {
|
|
|
+ int idxRegion;
|
|
|
+ is >> idxRegion;
|
|
|
+ regionsOfImg.insert ( idxRegion );
|
|
|
+ }
|
|
|
+ queriedRegions.insert ( std::pair<std::string, std::set< int > > ( key, regionsOfImg ) );
|
|
|
+ }
|
|
|
+
|
|
|
+ is >> tmp; // end of block
|
|
|
+ tmp = this->removeEndTag ( tmp );
|
|
|
+ }
|
|
|
+ //
|
|
|
+ //TODO currentRegionToQuery
|
|
|
+ //
|
|
|
+ ///////////////////////////////
|
|
|
+ // PARENT OBJECT //
|
|
|
+ ///////////////////////////////
|
|
|
+ else if ( tmp.compare("SemSegNovelty--Parent") == 0 )
|
|
|
+ {
|
|
|
+ // restore parent object
|
|
|
+ SemanticSegmentation::restore(is);
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ std::cerr << "WARNING -- unexpected SemSegNovelty object -- " << tmp << " -- for restoration... aborting" << std::endl;
|
|
|
+ throw;
|
|
|
+ }
|
|
|
+
|
|
|
+ // INSTANTIATE (YET) NON-RESTORABLE OBJECTS
|
|
|
+ //TODO destructor of regionSeg is non-virtual so far - change this accordingly!
|
|
|
+ if ( this->regionSeg != NULL )
|
|
|
+ delete this->regionSeg;
|
|
|
+
|
|
|
+ if( this->s_rsMethode == "none" )
|
|
|
+ {
|
|
|
+ this->regionSeg = NULL;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ //NOTE using an empty config file might not be save...
|
|
|
+ NICE::Config tmpConfEmpty;
|
|
|
+ RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod( &tmpConfEmpty, this->s_rsMethode );
|
|
|
+ if ( reuseSegmentation )
|
|
|
+ this->regionSeg = new RSCache ( &tmpConfEmpty, tmpRegionSeg );
|
|
|
+ else
|
|
|
+ this->regionSeg = tmpRegionSeg;
|
|
|
+ }
|
|
|
+
|
|
|
+ // done restoration
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ std::cerr << "SemSegNovelty::restore -- InStream not initialized - restoring not possible!" << std::endl;
|
|
|
+ throw;
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+void SemSegNovelty::store ( std::ostream & os, int format ) const
|
|
|
+{
|
|
|
+ if (os.good())
|
|
|
+ {
|
|
|
+ // show starting point
|
|
|
+ os << this->createStartTag( "SemSegNovelty" ) << std::endl;
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // FEATURE EXTRACTION //
|
|
|
+ ///////////////////////////////
|
|
|
+ os << this->createStartTag( "featExtract" ) << std::endl;
|
|
|
+ featExtract->store ( os );
|
|
|
+ os << this->createStartTag( "featExtract" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "trainWsize" ) << std::endl;
|
|
|
+ os << this->trainWsize << std::endl;
|
|
|
+ os << this->createStartTag( "trainWsize" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "whs" ) << std::endl;
|
|
|
+ os << this->whs << std::endl;
|
|
|
+ os << this->createStartTag( "whs" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "testWSize" ) << std::endl;
|
|
|
+ os << this->testWSize << std::endl;
|
|
|
+ os << this->createStartTag( "testWSize" ) << std::endl;
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // NOVELTY COMPUTATION //
|
|
|
+ ///////////////////////////////
|
|
|
+
|
|
|
+ os << this->createStartTag( "noveltyMethod" ) << std::endl;
|
|
|
+ os << this->noveltyMethod << std::endl;
|
|
|
+ os << this->createStartTag( "noveltyMethod" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "noveltyMethodString" ) << std::endl;
|
|
|
+ os << this->noveltyMethodString << std::endl;
|
|
|
+ os << this->createStartTag( "noveltyMethodString" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "globalMaxUncert" ) << std::endl;
|
|
|
+ os << this->globalMaxUncert << std::endl;
|
|
|
+ os << this->createStartTag( "globalMaxUncert" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "mostNoveltyWithMaxScores" ) << std::endl;
|
|
|
+ os << this->mostNoveltyWithMaxScores << std::endl;
|
|
|
+ os << this->createStartTag( "mostNoveltyWithMaxScores" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "findMaximumUncert" ) << std::endl;
|
|
|
+ os << this->findMaximumUncert << std::endl;
|
|
|
+ os << this->createStartTag( "findMaximumUncert" ) << std::endl;
|
|
|
+
|
|
|
+ //TODO maskedImg
|
|
|
+
|
|
|
+ os << this->createStartTag( "b_visualizeALimages" ) << std::endl;
|
|
|
+ os << this->b_visualizeALimages << std::endl;
|
|
|
+ os << this->createStartTag( "b_visualizeALimages" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // CLASSIFICATION STUFF //
|
|
|
+ ///////////////////////////////
|
|
|
+
|
|
|
+ os << this->createStartTag( "classifierString" ) << std::endl;
|
|
|
+ os << this->classifierString << std::endl;
|
|
|
+ os << this->createStartTag( "classifierString" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "classifier" ) << std::endl;
|
|
|
+ if ( this->classifier != NULL )
|
|
|
+ {
|
|
|
+ os << "NOTNULL" << std::endl;
|
|
|
+ classifier->store ( os, format );
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ os << "NULL" << std::endl;
|
|
|
+ }
|
|
|
+ os << this->createEndTag( "classifier" ) << std::endl;
|
|
|
+
|
|
|
+ //
|
|
|
+
|
|
|
+ os << this->createStartTag( "vclassifier" ) << std::endl;
|
|
|
+ if ( this->classifier != NULL )
|
|
|
+ {
|
|
|
+ os << "NOTNULL" << std::endl;
|
|
|
+ vclassifier->store ( os, format );
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ os << "NULL" << std::endl;
|
|
|
+ }
|
|
|
+ os << this->createEndTag( "vclassifier" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ os << this->createStartTag( "forbidden_classesTrain" ) << std::endl;
|
|
|
+ os << "size: " << forbidden_classesTrain.size() << std::endl;
|
|
|
+
|
|
|
+ for ( std::set< int >::const_iterator itForbClassTrain = forbidden_classesTrain.begin();
|
|
|
+ itForbClassTrain != forbidden_classesTrain.end();
|
|
|
+ itForbClassTrain++
|
|
|
+ )
|
|
|
+ {
|
|
|
+ os << *itForbClassTrain << " " << std::endl;
|
|
|
+ }
|
|
|
+ os << this->createEndTag( "forbidden_classesTrain" ) << std::endl;
|
|
|
+
|
|
|
+ //
|
|
|
+
|
|
|
+ os << this->createStartTag( "forbidden_classesActiveLearning" ) << std::endl;
|
|
|
+ os << "size: " << forbidden_classesActiveLearning.size() << std::endl;
|
|
|
+
|
|
|
+ for ( std::set< int >::const_iterator itForbClassAL = forbidden_classesActiveLearning.begin();
|
|
|
+ itForbClassAL != forbidden_classesActiveLearning.end();
|
|
|
+ itForbClassAL++
|
|
|
+ )
|
|
|
+ {
|
|
|
+ os << *itForbClassAL << " " << std::endl;
|
|
|
+ }
|
|
|
+ os << this->createEndTag( "forbidden_classesActiveLearning" ) << std::endl;
|
|
|
+
|
|
|
+ //
|
|
|
+
|
|
|
+ os << this->createStartTag( "classesInUse" ) << std::endl;
|
|
|
+ os << "size: " << classesInUse.size() << std::endl;
|
|
|
+
|
|
|
+ for ( std::set< int >::const_iterator itClassesInUse = classesInUse.begin();
|
|
|
+ itClassesInUse != classesInUse.end();
|
|
|
+ itClassesInUse++
|
|
|
+ )
|
|
|
+ {
|
|
|
+ os << *itClassesInUse << " " << std::endl;
|
|
|
+ }
|
|
|
+ os << this->createEndTag( "classesInUse" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ os << this->createStartTag( "numberOfClasses" ) << std::endl;
|
|
|
+ os << this->numberOfClasses << std::endl;
|
|
|
+ os << this->createStartTag( "numberOfClasses" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ os << this->createStartTag( "read_classifier" ) << std::endl;
|
|
|
+ os << this->read_classifier << std::endl;
|
|
|
+ os << this->createStartTag( "read_classifier" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ os << this->createStartTag( "save_classifier" ) << std::endl;
|
|
|
+ os << this->save_classifier << std::endl;
|
|
|
+ os << this->createStartTag( "save_classifier" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ os << this->createStartTag( "cache" ) << std::endl;
|
|
|
+ os << this->cache << std::endl;
|
|
|
+ os << this->createStartTag( "cache" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ os << this->createStartTag( "resultdir" ) << std::endl;
|
|
|
+ os << this->resultdir << std::endl;
|
|
|
+ os << this->createStartTag( "resultdir" ) << std::endl;
|
|
|
+
|
|
|
+ //TODO newTrainExamples
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // SEGMENTATION STUFF //
|
|
|
+ ///////////////////////////////
|
|
|
+
|
|
|
+ // theoretically, we should properly store and restore the regionSeg object. However, its parent class does not provide
|
|
|
+ // a Persistent interface yet. Hence, we perform this tiny workaround which works, since regionSeg is not changed over time...
|
|
|
+ // only be aware of parameters originally set via config...
|
|
|
+ os << this->createStartTag( "s_rsMethode" ) << std::endl;
|
|
|
+ os << this->s_rsMethode << std::endl;
|
|
|
+ os << this->createStartTag( "s_rsMethode" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "reuseSegmentation" ) << std::endl;
|
|
|
+ os << this->reuseSegmentation << std::endl;
|
|
|
+ os << this->createStartTag( "reuseSegmentation" ) << std::endl;
|
|
|
+
|
|
|
+ os << this->createStartTag( "queriedRegions" ) << std::endl;
|
|
|
+ os << "size: " << queriedRegions.size() << std::endl;
|
|
|
+ std::map< std::string, std::set< int > >::const_iterator itQueriedRegions = queriedRegions.begin();
|
|
|
+ for ( uint i = 0; i < queriedRegions.size(); i++ )
|
|
|
+ {
|
|
|
+ // store key
|
|
|
+ os << itQueriedRegions->first << std::endl;
|
|
|
+
|
|
|
+ // store values -- inner loop over sets
|
|
|
+ os << "size: " << ( itQueriedRegions->second ).size() << std::endl;
|
|
|
+
|
|
|
+ for ( std::set< int >::const_iterator itRegionsOfImg = ( itQueriedRegions->second ).begin();
|
|
|
+ itRegionsOfImg != ( itQueriedRegions->second ).end();
|
|
|
+ itRegionsOfImg++
|
|
|
+ )
|
|
|
+ {
|
|
|
+ os << *itRegionsOfImg << " " << std::endl;
|
|
|
+ }
|
|
|
+
|
|
|
+ itQueriedRegions++;
|
|
|
+ }
|
|
|
+ os << this->createStartTag( "queriedRegions" ) << std::endl;
|
|
|
+ //
|
|
|
+ //TODO currentRegionToQuery
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ ///////////////////////////////
|
|
|
+ // PARENT OBJECT //
|
|
|
+ ///////////////////////////////
|
|
|
+ os << this->createStartTag( "SemSegNovelty--Parent" ) << std::endl;
|
|
|
+ SemanticSegmentation::store(os);
|
|
|
+ os << this->createStartTag( "SemSegNovelty--Parent" ) << std::endl;
|
|
|
+
|
|
|
+
|
|
|
+ // done
|
|
|
+ os << this->createEndTag( "SemSegNovelty" ) << std::endl;
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ std::cerr << "OutStream not initialized - storing not possible!" << std::endl;
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+void SemSegNovelty::clear ()
|
|
|
+{
|
|
|
+ //TODO
|
|
|
+}
|