|
@@ -3,13 +3,16 @@
|
|
|
|
|
|
#include "SemSegNovelty.h"
|
|
|
|
|
|
-#include "core/image/FilterT.h"
|
|
|
-#include "gp-hik-exp/GPHIKClassifierNICE.h"
|
|
|
-#include "vislearning/baselib/ICETools.h"
|
|
|
-#include "vislearning/baselib/Globals.h"
|
|
|
-#include "vislearning/features/fpfeatures/SparseVectorFeature.h"
|
|
|
-#include "core/basics/StringTools.h"
|
|
|
-#include "core/basics/Timer.h"
|
|
|
+#include <core/image/FilterT.h>
|
|
|
+#include <core/basics/numerictools.h>
|
|
|
+#include <core/basics/StringTools.h>
|
|
|
+#include <core/basics/Timer.h>
|
|
|
+
|
|
|
+#include <gp-hik-exp/GPHIKClassifierNICE.h>
|
|
|
+#include <vislearning/baselib/ICETools.h>
|
|
|
+#include <vislearning/baselib/Globals.h>
|
|
|
+#include <vislearning/features/fpfeatures/SparseVectorFeature.h>
|
|
|
+
|
|
|
#include "segmentation/GenericRegionSegmentationMethodSelection.h"
|
|
|
|
|
|
using namespace std;
|
|
@@ -28,14 +31,31 @@ SemSegNovelty::SemSegNovelty ( const Config *conf,
|
|
|
|
|
|
featExtract = new LFColorWeijer ( conf );
|
|
|
|
|
|
- save_cache = conf->gB ( "FPCPixel", "save_cache", true );
|
|
|
- read_cache = conf->gB ( "FPCPixel", "read_cache", false );
|
|
|
-// uncertdir = conf->gS("debug", "uncertainty", "uncertainty");
|
|
|
+ this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
|
|
|
+ this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
|
|
|
+ this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
|
|
|
+
|
|
|
//write uncertainty results in the same folder as done for the segmentation results
|
|
|
- uncertdir = conf->gS("debug", "resultdir", "result");
|
|
|
+ resultdir = conf->gS("debug", "resultdir", "result");
|
|
|
cache = conf->gS ( "cache", "root", "" );
|
|
|
|
|
|
- classifier = new GPHIKClassifierNICE ( conf, "ClassiferGPHIK" );;
|
|
|
+
|
|
|
+ //stupid work around of the const attribute
|
|
|
+ Config confCopy = *conf;
|
|
|
+
|
|
|
+ //just to make sure, that we do NOT perform an optimization after every iteration step
|
|
|
+ //this would just take a lot of time, which is not desired so far
|
|
|
+ confCopy.sB("ClassifierGPHIK","performOptimizationAfterIncrement",false);
|
|
|
+
|
|
|
+ classifierString = conf->gS ( section, "classifier", "ClassifierGPHIK" );
|
|
|
+ classifier = NULL;
|
|
|
+ vclassifier = NULL;
|
|
|
+ if ( classifierString.compare("ClassifierGPHIK") == 0)
|
|
|
+ classifier = new GPHIKClassifierNICE ( &confCopy, "ClassifierGPHIK" );
|
|
|
+ else
|
|
|
+ vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
|
|
|
+
|
|
|
+
|
|
|
|
|
|
findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
|
|
|
whs = conf->gI ( section, "window_size", 10 );
|
|
@@ -53,7 +73,7 @@ SemSegNovelty::SemSegNovelty ( const Config *conf,
|
|
|
else
|
|
|
{
|
|
|
RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod(conf, rsMethode);
|
|
|
- if ( save_cache )
|
|
|
+ if ( reuseSegmentation )
|
|
|
regionSeg = new RSCache ( conf, tmpRegionSeg );
|
|
|
else
|
|
|
regionSeg = tmpRegionSeg;
|
|
@@ -61,17 +81,23 @@ SemSegNovelty::SemSegNovelty ( const Config *conf,
|
|
|
|
|
|
cn = md->getClassNames ( "train" );
|
|
|
|
|
|
- if ( read_cache )
|
|
|
+ if ( read_classifier )
|
|
|
{
|
|
|
- string classifierdst = "/classifier.data";
|
|
|
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
-
|
|
|
try
|
|
|
{
|
|
|
if ( classifier != NULL )
|
|
|
{
|
|
|
+ string classifierdst = "/classifier.data";
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
classifier->read ( cache + classifierdst );
|
|
|
}
|
|
|
+ else
|
|
|
+ {
|
|
|
+ string classifierdst = "/veccl.data";
|
|
|
+ fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
|
|
|
+ vclassifier->read ( cache + classifierdst );
|
|
|
+ }
|
|
|
+
|
|
|
|
|
|
fprintf ( stderr, "SemSegNovelty:: successfully read\n" );
|
|
|
}
|
|
@@ -90,56 +116,84 @@ SemSegNovelty::SemSegNovelty ( const Config *conf,
|
|
|
if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
|
|
|
{
|
|
|
this->noveltyMethod = GPVARIANCE;
|
|
|
+ this->mostNoveltyWithMaxScores = true;
|
|
|
}
|
|
|
else if (noveltyMethodString.compare("gp-uncertainty") == 0) //novel = large uncertainty (mean / var)
|
|
|
{
|
|
|
this->noveltyMethod = GPUNCERTAINTY;
|
|
|
+ this->mostNoveltyWithMaxScores = false;
|
|
|
+ globalMaxUncert = numeric_limits<double>::max();
|
|
|
}
|
|
|
else if (noveltyMethodString.compare("gp-mean") == 0) //novel = small mean
|
|
|
{
|
|
|
this->noveltyMethod = GPMINMEAN;
|
|
|
+ this->mostNoveltyWithMaxScores = false;
|
|
|
+ globalMaxUncert = numeric_limits<double>::max();
|
|
|
}
|
|
|
else if (noveltyMethodString.compare("gp-meanRatio") == 0) //novel = small difference between mean of most plausible class and mean of snd
|
|
|
// most plausible class (not useful in binary settings)
|
|
|
{
|
|
|
this->noveltyMethod = GPMEANRATIO;
|
|
|
+ this->mostNoveltyWithMaxScores = false;
|
|
|
+ globalMaxUncert = numeric_limits<double>::max();
|
|
|
}
|
|
|
else if (noveltyMethodString.compare("gp-weightAll") == 0) // novel = large weight in alpha vector after updating the model (can be predicted exactly)
|
|
|
{
|
|
|
this->noveltyMethod = GPWEIGHTALL;
|
|
|
+ this->mostNoveltyWithMaxScores = true;
|
|
|
}
|
|
|
else if (noveltyMethodString.compare("gp-weightRatio") == 0) // novel = small difference between weights for alpha vectors
|
|
|
// with assumptions of GT label to be the most
|
|
|
// plausible against the second most plausible class
|
|
|
{
|
|
|
this->noveltyMethod = GPWEIGHTRATIO;
|
|
|
- }
|
|
|
+ this->mostNoveltyWithMaxScores = false;
|
|
|
+ globalMaxUncert = numeric_limits<double>::max();
|
|
|
+ }
|
|
|
+ else if (noveltyMethodString.compare("random") == 0)
|
|
|
+ {
|
|
|
+ initRand();
|
|
|
+ this->noveltyMethod = RANDOM;
|
|
|
+ }
|
|
|
else
|
|
|
{
|
|
|
this->noveltyMethod = GPVARIANCE;
|
|
|
+ this->mostNoveltyWithMaxScores = true;
|
|
|
}
|
|
|
+
|
|
|
+ //we don't have queried any region so far
|
|
|
+ queriedRegions.clear();
|
|
|
+ visualizeALimages = conf->gB(section, "visualizeALimages", false);
|
|
|
}
|
|
|
|
|
|
SemSegNovelty::~SemSegNovelty()
|
|
|
{
|
|
|
if(newTrainExamples.size() > 0)
|
|
|
{
|
|
|
- // most uncertain region
|
|
|
- showImage(maskedImg);
|
|
|
- //classifier->add(newTrainExamples)
|
|
|
+ // show most uncertain region
|
|
|
+ if (visualizeALimages)
|
|
|
+ showImage(maskedImg);
|
|
|
+
|
|
|
+ //incorporate new information into the classifier
|
|
|
+ if (classifier != NULL)
|
|
|
+ classifier->addMultipleExamples(newTrainExamples);
|
|
|
+
|
|
|
+ //store the classifier, such that we can read it again in the next round (if we like that)
|
|
|
classifier->save ( cache + "/classifier.data" );
|
|
|
}
|
|
|
|
|
|
// clean-up
|
|
|
if ( classifier != NULL )
|
|
|
delete classifier;
|
|
|
+ if ( vclassifier != NULL )
|
|
|
+ delete vclassifier;
|
|
|
if ( featExtract != NULL )
|
|
|
delete featExtract;
|
|
|
}
|
|
|
|
|
|
void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix ®ions, int region, NICE::ColorImage &outimage)
|
|
|
{
|
|
|
- vector<uchar> color;
|
|
|
+ std::vector<uchar> color;
|
|
|
color.push_back(255);
|
|
|
color.push_back(0);
|
|
|
color.push_back(0);
|
|
@@ -179,14 +233,7 @@ void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
////////////////////////
|
|
|
// feature extraction //
|
|
|
////////////////////////
|
|
|
-
|
|
|
- std::string forbidden_classes_s = conf->gS ( "analysis", "donttrain", "" );
|
|
|
- if ( forbidden_classes_s == "" )
|
|
|
- {
|
|
|
- forbidden_classes_s = conf->gS ( "analysis", "forbidden_classes", "" );
|
|
|
- }
|
|
|
- cn.getSelection ( forbidden_classes_s, forbidden_classes );
|
|
|
-
|
|
|
+
|
|
|
//check the same thing for the training classes - this is very specific to our setup
|
|
|
std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
|
|
|
if ( forbidden_classesTrain_s == "" )
|
|
@@ -194,7 +241,7 @@ void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
|
|
|
}
|
|
|
cn.getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
|
|
|
-
|
|
|
+
|
|
|
|
|
|
ProgressBar pb ( "Local Feature Extraction" );
|
|
|
pb.show();
|
|
@@ -264,7 +311,7 @@ void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
feats.calcIntegral ( c );
|
|
|
}
|
|
|
|
|
|
- for ( int y = 0; y < ysize; y += trainWsize )
|
|
|
+ for ( int y = 0; y < ysize; y += trainWsize)
|
|
|
{
|
|
|
for ( int x = 0; x < xsize; x += trainWsize )
|
|
|
{
|
|
@@ -295,8 +342,12 @@ void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
|
|
|
example.position = imgnb;
|
|
|
examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
|
|
|
+
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
|
|
|
delete ce;
|
|
|
imgnb++;
|
|
@@ -327,19 +378,39 @@ void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
delete f;
|
|
|
|
|
|
if ( classifier != NULL )
|
|
|
+ {
|
|
|
+ std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
|
|
|
classifier->train ( fp, examples );
|
|
|
+ std::cerr << "training finished" << std::endl;
|
|
|
+ }
|
|
|
else
|
|
|
{
|
|
|
- cerr << "no classifier selected?!" << endl;
|
|
|
- exit ( -1 );
|
|
|
- }
|
|
|
+ LabeledSetVector lvec;
|
|
|
+ convertExamplesToLSet ( examples, lvec );
|
|
|
+ vclassifier->teach ( lvec );
|
|
|
+// if ( usegmm )
|
|
|
+// convertLSetToSparseExamples ( examples, lvec );
|
|
|
+// else
|
|
|
+ std::cerr << "classifierString: " << classifierString << std::endl;
|
|
|
+ if (this->classifierString.compare("nn") == 0)
|
|
|
+ {
|
|
|
+ convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
|
|
|
+ }
|
|
|
+ vclassifier->finishTeaching();
|
|
|
+ }
|
|
|
|
|
|
fp.destroy();
|
|
|
|
|
|
- if ( save_cache )
|
|
|
+ if ( save_classifier )
|
|
|
{
|
|
|
if ( classifier != NULL )
|
|
|
classifier->save ( cache + "/classifier.data" );
|
|
|
+ else
|
|
|
+ vclassifier->save ( cache + "/veccl.data" );
|
|
|
}
|
|
|
|
|
|
////////////
|
|
@@ -354,12 +425,16 @@ void SemSegNovelty::train ( const MultiDataset *md )
|
|
|
cerr << "SemSeg training finished" << endl;
|
|
|
}
|
|
|
|
|
|
+
|
|
|
void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NICE::MultiChannelImageT<double> & probabilities )
|
|
|
-{
|
|
|
+{
|
|
|
Timer timer;
|
|
|
timer.start();
|
|
|
|
|
|
+ //segResult contains the GT labels when this method is called
|
|
|
+ // we simply store them in labels, to have an easy access to the GT information lateron
|
|
|
Image labels = segresult;
|
|
|
+ //just to be sure that we do not have a GT-biased result :)
|
|
|
segresult.set(0);
|
|
|
|
|
|
int featdim = -1;
|
|
@@ -402,16 +477,24 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
{
|
|
|
feats.calcIntegral ( c );
|
|
|
}
|
|
|
-
|
|
|
- FloatImage noveltyImage ( xsize, ysize );
|
|
|
- noveltyImage.set ( 0.0 );
|
|
|
|
|
|
timer.stop();
|
|
|
- cout << "first: " << timer.getLastAbsolute() << endl;
|
|
|
+ std::cout << "AL time for preparation: " << timer.getLastAbsolute() << std::endl;
|
|
|
|
|
|
timer.start();
|
|
|
- this->computeClassificationResults( feats, segresult, probabilities, xsize, ysize, featdim);
|
|
|
- timer.stop();
|
|
|
+ //classification results currently only needed to be computed separately if we use the vclassifier, i.e., the nearest neighbor used
|
|
|
+ // for the "novel feature learning" approach
|
|
|
+ //in all other settings, such as active sem seg in general, we do this within the novelty-computation-methods
|
|
|
+ if ( classifier == NULL )
|
|
|
+ {
|
|
|
+ this->computeClassificationResults( feats, segresult, probabilities, xsize, ysize, featdim);
|
|
|
+ }
|
|
|
+// timer.stop();
|
|
|
+//
|
|
|
+// std::cerr << "classification results computed" << std::endl;
|
|
|
+
|
|
|
+ FloatImage noveltyImage ( xsize, ysize );
|
|
|
+ noveltyImage.set ( 0.0 );
|
|
|
|
|
|
switch (noveltyMethod)
|
|
|
{
|
|
@@ -427,6 +510,7 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
}
|
|
|
case GPMINMEAN:
|
|
|
{
|
|
|
+ std::cerr << "compute novelty using the minimum mean" << std::endl;
|
|
|
this->computeNoveltyByGPMean( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
|
|
|
break;
|
|
|
}
|
|
@@ -445,6 +529,11 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
this->computeNoveltyByGPWeightRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
|
|
|
break;
|
|
|
}
|
|
|
+ case RANDOM:
|
|
|
+ {
|
|
|
+ this->computeNoveltyByRandom( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
|
|
|
+ break;
|
|
|
+ }
|
|
|
default:
|
|
|
{
|
|
|
//do nothing, keep the image constant to 0.0
|
|
@@ -453,10 +542,18 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
|
|
|
}
|
|
|
|
|
|
-
|
|
|
-
|
|
|
- // std::cerr << "uncertainty: " << gpUncertaintyVal << " minMean: " << gpMeanVal << " gpMeanRatio: " << gpMeanRatioVal << " weightAll: " << gpWeightAllVal << " weightRatio: "<< gpWeightRatioVal << std::endl;
|
|
|
+ timer.stop();
|
|
|
+ std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
|
|
|
|
|
|
+ if (visualizeALimages)
|
|
|
+ {
|
|
|
+ ColorImage imgrgbTmp (xsize, ysize);
|
|
|
+ ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
|
|
|
+ showImage(imgrgbTmp, "Novelty Image without Region Segmentation");
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ timer.start();
|
|
|
|
|
|
//Regionen ermitteln
|
|
|
if(regionSeg != NULL)
|
|
@@ -465,13 +562,14 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
int amountRegions = regionSeg->segRegions ( img, mask );
|
|
|
|
|
|
//compute probs per region
|
|
|
- vector<vector<double> > regionProb(amountRegions,vector<double>(probabilities.channels(),0.0));
|
|
|
- vector<double> regionNoveltyMeasure (amountRegions, 0.0);
|
|
|
+ std::vector<std::vector<double> > regionProb(amountRegions, std::vector<double>(probabilities.channels(),0.0));
|
|
|
+ std::vector<double> regionNoveltyMeasure (amountRegions, 0.0);
|
|
|
|
|
|
- vector<int> regionCounter(amountRegions, 0);
|
|
|
- for ( int y = 0; y < ysize; y++)
|
|
|
+ std::vector<int> regionCounter(amountRegions, 0);
|
|
|
+ std::vector<int> regionCounterNovelty(amountRegions, 0);
|
|
|
+ for ( int y = 0; y < ysize; y += trainWsize) //y++)
|
|
|
{
|
|
|
- for (int x = 0; x < xsize; x++)
|
|
|
+ for (int x = 0; x < xsize; x += trainWsize) //x++)
|
|
|
{
|
|
|
int r = mask(x,y);
|
|
|
regionCounter[r]++;
|
|
@@ -480,56 +578,98 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
regionProb[r][j] += probabilities ( x, y, j );
|
|
|
}
|
|
|
|
|
|
- //count the amount of "novelty" for the corresponding region
|
|
|
- regionNoveltyMeasure[r] += noveltyImage(x,y);
|
|
|
+ if ( forbidden_classesActiveLearning.find( labels(x,y) ) == forbidden_classesActiveLearning.end() )
|
|
|
+ {
|
|
|
+ //count the amount of "novelty" for the corresponding region
|
|
|
+ regionNoveltyMeasure[r] += noveltyImage(x,y);
|
|
|
+ regionCounterNovelty[r]++;
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
//find best class per region
|
|
|
- vector<int> bestClassPerRegion(amountRegions,0);
|
|
|
+ std::vector<int> bestClassPerRegion(amountRegions,0);
|
|
|
|
|
|
double maxNoveltyScore = -numeric_limits<double>::max();
|
|
|
+ if (!mostNoveltyWithMaxScores)
|
|
|
+ {
|
|
|
+ maxNoveltyScore = numeric_limits<double>::max();
|
|
|
+ }
|
|
|
+
|
|
|
int maxUncertRegion = -1;
|
|
|
|
|
|
+ //loop over all regions and compute averaged novelty scores
|
|
|
for(int r = 0; r < amountRegions; r++)
|
|
|
{
|
|
|
+
|
|
|
+ //check for the most plausible class per region
|
|
|
double maxval = -numeric_limits<double>::max();
|
|
|
+
|
|
|
+ //loop over all classes
|
|
|
for(int c = 0; c < probabilities.channels(); c++)
|
|
|
{
|
|
|
regionProb[r][c] /= regionCounter[r];
|
|
|
- if(maxval < regionProb[r][c] && regionProb[r][c] != 0.0)
|
|
|
- {
|
|
|
- maxval = regionProb[r][c];
|
|
|
- bestClassPerRegion[r] = c;
|
|
|
+
|
|
|
+ if( (maxval < regionProb[r][c]) ) //&& (regionProb[r][c] != 0.0) )
|
|
|
+ {
|
|
|
+ maxval = regionProb[r][c];
|
|
|
+ bestClassPerRegion[r] = c;
|
|
|
}
|
|
|
}
|
|
|
- //normalize summed novelty scores to region size
|
|
|
- regionNoveltyMeasure[r] /= regionCounter[r];
|
|
|
- //
|
|
|
+
|
|
|
+ //if the region only contains unvalid information (e.g., background) skip it
|
|
|
+ if (regionCounterNovelty[r] == 0)
|
|
|
+ {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
|
|
|
- if(maxNoveltyScore < regionNoveltyMeasure[r])
|
|
|
+ //normalize summed novelty scores to region size
|
|
|
+ regionNoveltyMeasure[r] /= regionCounterNovelty[r];
|
|
|
+
|
|
|
+ //did we find a region that has a higher score as the most novel region known so far within this image?
|
|
|
+ if( ( mostNoveltyWithMaxScores && (maxNoveltyScore < regionNoveltyMeasure[r]) ) // if we look for large novelty scores, e.g., variance
|
|
|
+ || ( !mostNoveltyWithMaxScores && (maxNoveltyScore > regionNoveltyMeasure[r]) ) ) // if we look for small novelty scores, e.g., min mean
|
|
|
{
|
|
|
- maxNoveltyScore = regionNoveltyMeasure[r];
|
|
|
- maxUncertRegion = r;
|
|
|
+ //did we already query a region of this image? -- and it was this specific region
|
|
|
+ if ( (queriedRegions.find( currentFile ) != queriedRegions.end() ) && ( queriedRegions[currentFile].find(r) != queriedRegions[currentFile].end() ) )
|
|
|
+ {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ else //only accept the region as novel if we never queried it before
|
|
|
+ {
|
|
|
+ maxNoveltyScore = regionNoveltyMeasure[r];
|
|
|
+ maxUncertRegion = r;
|
|
|
+ }
|
|
|
+
|
|
|
}
|
|
|
+
|
|
|
}
|
|
|
|
|
|
+ // after finding the most novel region for the current image, check whether this region is also the most novel with respect
|
|
|
+ // to all previously seen test images
|
|
|
+ // if so, store the corresponding features, since we want to "actively" query them to incorporate useful information
|
|
|
if(findMaximumUncert)
|
|
|
{
|
|
|
- if(maxNoveltyScore > globalMaxUncert)
|
|
|
+ if( ( mostNoveltyWithMaxScores && (maxNoveltyScore > globalMaxUncert) )
|
|
|
+ || ( !mostNoveltyWithMaxScores && (maxNoveltyScore < globalMaxUncert) ) )
|
|
|
{
|
|
|
- //save new important features
|
|
|
+ //current most novel region of the image has "higher" novelty score then previous most novel region of all test images worked on so far
|
|
|
+ // -> save new important features of this region
|
|
|
Examples examples;
|
|
|
- for ( int y = 0; y < ysize; y += testWSize )
|
|
|
+ for ( int y = 0; y < ysize; y += trainWsize )
|
|
|
{
|
|
|
- for ( int x = 0; x < xsize; x += testWSize)
|
|
|
+ for ( int x = 0; x < xsize; x += trainWsize)
|
|
|
{
|
|
|
if(mask(x,y) == maxUncertRegion)
|
|
|
{
|
|
|
+ int classnoTmp = labels(x,y);
|
|
|
+ if ( forbidden_classesActiveLearning.find(classnoTmp) != forbidden_classesActiveLearning.end() )
|
|
|
+ continue;
|
|
|
+
|
|
|
Example example;
|
|
|
example.vec = NULL;
|
|
|
example.svec = new SparseVector ( featdim );
|
|
|
- int classnoTmp = labels(x,y);
|
|
|
+
|
|
|
for ( int f = 0; f < featdim; f++ )
|
|
|
{
|
|
|
double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
@@ -544,15 +684,27 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
|
|
|
if(examples.size() > 0)
|
|
|
{
|
|
|
+ std::cerr << "found " << examples.size() << " new examples in the queried region" << std::endl << std::endl;
|
|
|
newTrainExamples.clear();
|
|
|
newTrainExamples = examples;
|
|
|
globalMaxUncert = maxNoveltyScore;
|
|
|
- visualizeRegion(img,mask,maxUncertRegion,maskedImg);
|
|
|
+ //prepare for later visualization
|
|
|
+// if (visualizeALimages)
|
|
|
+ visualizeRegion(img,mask,maxUncertRegion,maskedImg);
|
|
|
}
|
|
|
+ else
|
|
|
+ {
|
|
|
+ std::cerr << "the queried region has no valid information" << std::endl << std::endl;
|
|
|
+ }
|
|
|
+
|
|
|
+ //save filename and region index
|
|
|
+ currentRegionToQuery.first = currentFile;
|
|
|
+ currentRegionToQuery.second = maxUncertRegion;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
//write back best results per region
|
|
|
+ //i.e., write normalized novelty scores for every region into the novelty image
|
|
|
for ( int y = 0; y < ysize; y++)
|
|
|
{
|
|
|
for (int x = 0; x < xsize; x++)
|
|
@@ -567,10 +719,13 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
noveltyImage(x,y) = regionNoveltyMeasure[r];
|
|
|
}
|
|
|
}
|
|
|
- }
|
|
|
-
|
|
|
+ } // if regionSeg != null
|
|
|
+
|
|
|
timer.stop();
|
|
|
- cout << "second: " << timer.getLastAbsolute() << endl;
|
|
|
+ std::cout << "AL time for determination of novel regions: " << timer.getLastAbsolute() << std::endl;
|
|
|
+
|
|
|
+// timer.stop();
|
|
|
+// cout << "second: " << timer.getLastAbsolute() << endl;
|
|
|
timer.start();
|
|
|
|
|
|
ColorImage imgrgb ( xsize, ysize );
|
|
@@ -578,13 +733,18 @@ void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NI
|
|
|
std::stringstream out;
|
|
|
std::vector< std::string > list2;
|
|
|
StringTools::split ( Globals::getCurrentImgFN (), '/', list2 );
|
|
|
- out << uncertdir << "/" << list2.back();
|
|
|
+ out << resultdir << "/" << list2.back();
|
|
|
|
|
|
- //TODO append a suffix according to the novelty strategie chosen
|
|
|
- noveltyImage.writeRaw(out.str() + "_" + noveltyMethodString+".rawfloat");
|
|
|
+ noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
|
|
|
+
|
|
|
+ if (visualizeALimages)
|
|
|
+ {
|
|
|
+ ICETools::convertToRGB ( noveltyImage, imgrgb );
|
|
|
+ showImage(imgrgb, "Novelty Image");
|
|
|
+ }
|
|
|
|
|
|
timer.stop();
|
|
|
- cout << "last: " << timer.getLastAbsolute() << endl;
|
|
|
+ cout << "AL time for writing the raw novelty image: " << timer.getLastAbsolute() << endl;
|
|
|
}
|
|
|
|
|
|
inline void SemSegNovelty::computeClassificationResults( const NICE::MultiChannelImageT<double> & feats,
|
|
@@ -595,7 +755,99 @@ inline void SemSegNovelty::computeClassificationResults( const NICE::MultiChanne
|
|
|
const int & featdim
|
|
|
)
|
|
|
{
|
|
|
- #pragma omp parallel for
|
|
|
+ std::cerr << "featdim: " << featdim << std::endl;
|
|
|
+
|
|
|
+ if ( classifier != NULL )
|
|
|
+ {
|
|
|
+
|
|
|
+ #pragma omp parallel for
|
|
|
+ for ( int y = 0; y < ysize; y += testWSize )
|
|
|
+ {
|
|
|
+ Example example;
|
|
|
+ example.vec = NULL;
|
|
|
+ example.svec = new SparseVector ( featdim );
|
|
|
+ for ( int x = 0; x < xsize; x += testWSize)
|
|
|
+ {
|
|
|
+ for ( int f = 0; f < featdim; f++ )
|
|
|
+ {
|
|
|
+ double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
|
+ if ( val > 1e-10 )
|
|
|
+ ( *example.svec ) [f] = val;
|
|
|
+ }
|
|
|
+ example.svec->normalize();
|
|
|
+
|
|
|
+ ClassificationResult cr = classifier->classify ( example );
|
|
|
+
|
|
|
+ int xs = std::max(0, x - testWSize/2);
|
|
|
+ int xe = std::min(xsize - 1, x + testWSize/2);
|
|
|
+ int ys = std::max(0, y - testWSize/2);
|
|
|
+ int ye = std::min(ysize - 1, y + testWSize/2);
|
|
|
+ for (int yl = ys; yl <= ye; yl++)
|
|
|
+ {
|
|
|
+ for (int xl = xs; xl <= xe; xl++)
|
|
|
+ {
|
|
|
+ for ( int j = 0 ; j < cr.scores.size(); j++ )
|
|
|
+ {
|
|
|
+ probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
+ }
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ example.svec->clear();
|
|
|
+ }
|
|
|
+ delete example.svec;
|
|
|
+ example.svec = NULL;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else //vclassifier
|
|
|
+ {
|
|
|
+ std::cerr << "compute classification results with vclassifier" << std::endl;
|
|
|
+ #pragma omp parallel for
|
|
|
+ for ( int y = 0; y < ysize; y += testWSize )
|
|
|
+ {
|
|
|
+ for ( int x = 0; x < xsize; x += testWSize)
|
|
|
+ {
|
|
|
+ NICE::Vector v(featdim);
|
|
|
+ for ( int f = 0; f < featdim; f++ )
|
|
|
+ {
|
|
|
+ double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
|
|
|
+ v[f] = val;
|
|
|
+ }
|
|
|
+ v.normalizeL1();
|
|
|
+
|
|
|
+ ClassificationResult cr = vclassifier->classify ( v );
|
|
|
+
|
|
|
+ int xs = std::max(0, x - testWSize/2);
|
|
|
+ int xe = std::min(xsize - 1, x + testWSize/2);
|
|
|
+ int ys = std::max(0, y - testWSize/2);
|
|
|
+ int ye = std::min(ysize - 1, y + testWSize/2);
|
|
|
+ for (int yl = ys; yl <= ye; yl++)
|
|
|
+ {
|
|
|
+ for (int xl = xs; xl <= xe; xl++)
|
|
|
+ {
|
|
|
+ for ( int j = 0 ; j < cr.scores.size(); j++ )
|
|
|
+ {
|
|
|
+ probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
+ }
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+// compute novelty images depending on the strategy chosen
|
|
|
+
|
|
|
+void SemSegNovelty::computeNoveltyByRandom( NICE::FloatImage & noveltyImage,
|
|
|
+ const NICE::MultiChannelImageT<double> & feats,
|
|
|
+ NICE::Image & segresult,
|
|
|
+ NICE::MultiChannelImageT<double> & probabilities,
|
|
|
+ const int & xsize, const int & ysize, const int & featdim )
|
|
|
+{
|
|
|
+#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
|
{
|
|
|
Example example;
|
|
@@ -612,11 +864,14 @@ inline void SemSegNovelty::computeClassificationResults( const NICE::MultiChanne
|
|
|
example.svec->normalize();
|
|
|
|
|
|
ClassificationResult cr = classifier->classify ( example );
|
|
|
-
|
|
|
+
|
|
|
int xs = std::max(0, x - testWSize/2);
|
|
|
int xe = std::min(xsize - 1, x + testWSize/2);
|
|
|
int ys = std::max(0, y - testWSize/2);
|
|
|
int ye = std::min(ysize - 1, y + testWSize/2);
|
|
|
+
|
|
|
+ double randVal = randDouble();
|
|
|
+
|
|
|
for (int yl = ys; yl <= ye; yl++)
|
|
|
{
|
|
|
for (int xl = xs; xl <= xe; xl++)
|
|
@@ -625,18 +880,14 @@ inline void SemSegNovelty::computeClassificationResults( const NICE::MultiChanne
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
+ noveltyImage ( xl, yl ) = randVal;
|
|
|
}
|
|
|
- }
|
|
|
-
|
|
|
- example.svec->clear();
|
|
|
+ }
|
|
|
}
|
|
|
- delete example.svec;
|
|
|
- example.svec = NULL;
|
|
|
- }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
-// compute novelty images depending on the strategy chosen
|
|
|
|
|
|
void SemSegNovelty::computeNoveltyByVariance( NICE::FloatImage & noveltyImage,
|
|
|
const NICE::MultiChannelImageT<double> & feats,
|
|
@@ -674,8 +925,8 @@ void SemSegNovelty::computeNoveltyByVariance( NICE::FloatImage & noveltyIm
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
- noveltyImage ( xl, yl ) = cr.uncertainty;
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
+ noveltyImage ( xl, yl ) = cr.uncertainty;
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -742,13 +993,12 @@ void SemSegNovelty::computeNoveltyByGPUncertainty( NICE::FloatImage & noveltyIm
|
|
|
for (int yl = ys; yl <= ye; yl++)
|
|
|
{
|
|
|
for (int xl = xs; xl <= xe; xl++)
|
|
|
- {
|
|
|
+ {
|
|
|
for ( int j = 0 ; j < cr.scores.size(); j++ )
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
-
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
noveltyImage ( xl, yl ) = gpUncertaintyVal;
|
|
|
}
|
|
|
}
|
|
@@ -767,7 +1017,7 @@ void SemSegNovelty::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
|
|
|
const int & xsize, const int & ysize, const int & featdim )
|
|
|
{
|
|
|
double gpNoise = conf->gD("GPHIK", "noise", 0.01);
|
|
|
-
|
|
|
+
|
|
|
#pragma omp parallel for
|
|
|
for ( int y = 0; y < ysize; y += testWSize )
|
|
|
{
|
|
@@ -788,16 +1038,18 @@ void SemSegNovelty::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
|
|
|
|
|
|
double minMeanAbs ( numeric_limits<double>::max() );
|
|
|
|
|
|
- for ( int j = 0 ; j < cr.scores.size(); j++ )
|
|
|
+ for ( int j = 0 ; j < probabilities.channels(); j++ )
|
|
|
{
|
|
|
if ( forbidden_classesTrain.find ( j ) != forbidden_classesTrain.end() )
|
|
|
{
|
|
|
continue;
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
//check whether we found a class with higher smaller abs mean than the current minimum
|
|
|
- if (abs(cr.scores[j]) < minMeanAbs)
|
|
|
- minMeanAbs = abs(cr.scores[j]);
|
|
|
+ if (abs(probabilities(x,y,j)) < minMeanAbs)
|
|
|
+ {
|
|
|
+ minMeanAbs = abs(cr.scores[j]);
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
// compute results when we take the lowest mean value of all classes
|
|
@@ -815,15 +1067,11 @@ void SemSegNovelty::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
noveltyImage ( xl, yl ) = gpMeanVal;
|
|
|
}
|
|
|
- }
|
|
|
-
|
|
|
- example.svec->clear();
|
|
|
+ }
|
|
|
}
|
|
|
- delete example.svec;
|
|
|
- example.svec = NULL;
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -893,7 +1141,7 @@ void SemSegNovelty::computeNoveltyByGPMeanRatio( NICE::FloatImage & noveltyImag
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
noveltyImage ( xl, yl ) = gpMeanRatioVal;
|
|
|
}
|
|
|
}
|
|
@@ -1001,7 +1249,7 @@ void SemSegNovelty::computeNoveltyByGPWeightAll( NICE::FloatImage & noveltyImag
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
noveltyImage ( xl, yl ) = gpWeightAllVal;
|
|
|
}
|
|
|
}
|
|
@@ -1124,7 +1372,7 @@ void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyIm
|
|
|
{
|
|
|
probabilities ( xl, yl, j ) = cr.scores[j];
|
|
|
}
|
|
|
- segresult ( xl, yl ) = cr.classno;
|
|
|
+ segresult ( xl, yl ) = cr.classno;
|
|
|
noveltyImage ( xl, yl ) = gpWeightRatioVal;
|
|
|
}
|
|
|
}
|
|
@@ -1134,4 +1382,138 @@ void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyIm
|
|
|
delete example.svec;
|
|
|
example.svec = NULL;
|
|
|
}
|
|
|
-}
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+void SemSegNovelty::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
|
|
|
+{
|
|
|
+ //accept the new class as valid information
|
|
|
+ if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
|
|
|
+ {
|
|
|
+ forbidden_classesTrain.erase(newClassNo);
|
|
|
+ numberOfClasses++;
|
|
|
+ }
|
|
|
+ if ( classesInUse.find ( newClassNo ) == classesInUse.end() )
|
|
|
+ {
|
|
|
+ classesInUse.insert( newClassNo );
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ //then add it to the classifier used
|
|
|
+ if ( classifier != NULL )
|
|
|
+ {
|
|
|
+ //TODO
|
|
|
+ }
|
|
|
+ else //vclassifier
|
|
|
+ {
|
|
|
+ if (this->classifierString.compare("nn") == 0)
|
|
|
+ {
|
|
|
+ vclassifier->teach ( newClassNo, newExample );
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+void SemSegNovelty::addNovelExamples()
|
|
|
+{
|
|
|
+
|
|
|
+ Timer timer;
|
|
|
+
|
|
|
+ //show the image that contains the most novel region
|
|
|
+ if (visualizeALimages)
|
|
|
+ showImage(maskedImg, "Most novel region");
|
|
|
+
|
|
|
+ timer.start();
|
|
|
+
|
|
|
+
|
|
|
+ std::stringstream out;
|
|
|
+ std::vector< std::string > list2;
|
|
|
+ StringTools::split ( Globals::getCurrentImgFN (), '/', list2 );
|
|
|
+ out << resultdir << "/" << list2.back();
|
|
|
+
|
|
|
+ maskedImg.writePPM ( out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+ "_query.ppm" );
|
|
|
+
|
|
|
+
|
|
|
+ timer.stop();
|
|
|
+ std::cerr << "AL time for writing queried image: " << timer.getLast() << std::endl;
|
|
|
+
|
|
|
+ timer.start();
|
|
|
+
|
|
|
+ //check which classes will be added using the features from the novel region
|
|
|
+ std::set<int> newClassNumbers;
|
|
|
+ newClassNumbers.clear(); //just to be sure
|
|
|
+ for ( uint i = 0 ; i < newTrainExamples.size() ; i++ )
|
|
|
+ {
|
|
|
+ if (newClassNumbers.find(newTrainExamples[i].first /* classNumber*/) == newClassNumbers.end() )
|
|
|
+ {
|
|
|
+ newClassNumbers.insert(newTrainExamples[i].first );
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ //accept the new classes as valid information
|
|
|
+ for (std::set<int>::const_iterator clNoIt = newClassNumbers.begin(); clNoIt != newClassNumbers.end(); clNoIt++)
|
|
|
+ {
|
|
|
+ if ( forbidden_classesTrain.find ( *clNoIt ) != forbidden_classesTrain.end() )
|
|
|
+ {
|
|
|
+ forbidden_classesTrain.erase(*clNoIt);
|
|
|
+ numberOfClasses++;
|
|
|
+ }
|
|
|
+ if ( classesInUse.find ( *clNoIt ) == classesInUse.end() )
|
|
|
+ {
|
|
|
+ classesInUse.insert( *clNoIt );
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ timer.stop();
|
|
|
+ std::cerr << "AL time for accepting possible new classes: " << timer.getLast() << std::endl;
|
|
|
+
|
|
|
+ timer.start();
|
|
|
+ //then add the new features to the classifier used
|
|
|
+ if ( classifier != NULL )
|
|
|
+ {
|
|
|
+ if (this->classifierString.compare("ClassifierGPHIK") == 0)
|
|
|
+ {
|
|
|
+ classifier->addMultipleExamples ( this->newTrainExamples );
|
|
|
+ }
|
|
|
+ }
|
|
|
+ else //vclassifier
|
|
|
+ {
|
|
|
+ //TODO
|
|
|
+ }
|
|
|
+
|
|
|
+ timer.stop();
|
|
|
+ std::cerr << "AL time for actually updating the classifier: " << timer.getLast() << std::endl;
|
|
|
+
|
|
|
+ std::cerr << "the current region to query is: " << currentRegionToQuery.first << " -- " << currentRegionToQuery.second << std::endl;
|
|
|
+
|
|
|
+ //did we already query a region of this image?
|
|
|
+ if ( queriedRegions.find( currentRegionToQuery.first ) != queriedRegions.end() )
|
|
|
+ {
|
|
|
+ queriedRegions[ currentRegionToQuery.first ].insert(currentRegionToQuery.second);
|
|
|
+ }
|
|
|
+ else
|
|
|
+ {
|
|
|
+ std::set<int> tmpSet; tmpSet.insert(currentRegionToQuery.second);
|
|
|
+ queriedRegions.insert(std::pair<std::string,std::set<int> > (currentRegionToQuery.first, tmpSet ) );
|
|
|
+ }
|
|
|
+
|
|
|
+ std::cerr << "Write already queried regions: " << std::endl;
|
|
|
+ for (std::map<std::string,std::set<int> >::const_iterator it = queriedRegions.begin(); it != queriedRegions.end(); it++)
|
|
|
+ {
|
|
|
+ std::cerr << "image: " << it->first << " -- ";
|
|
|
+ for (std::set<int>::const_iterator itReg = it->second.begin(); itReg != it->second.end(); itReg++)
|
|
|
+ {
|
|
|
+ std::cerr << *itReg << " ";
|
|
|
+ }
|
|
|
+ std::cerr << std::endl;
|
|
|
+ }
|
|
|
+
|
|
|
+ //clear the latest results, since one iteration is over
|
|
|
+ globalMaxUncert = -numeric_limits<double>::max();
|
|
|
+ if (!mostNoveltyWithMaxScores)
|
|
|
+ globalMaxUncert = numeric_limits<double>::max();
|
|
|
+}
|
|
|
+
|
|
|
+const Examples * SemSegNovelty::getNovelExamples() const
|
|
|
+{
|
|
|
+ return &(this->newTrainExamples);
|
|
|
+}
|