1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519 |
- #include <sstream>
- #include <iostream>
- #include "core/image/FilterT.h"
- #include "core/basics/numerictools.h"
- #include "core/basics/StringTools.h"
- #include "core/basics/Timer.h"
- #include "gp-hik-exp/GPHIKClassifierNICE.h"
- #include "vislearning/baselib/ICETools.h"
- #include "vislearning/baselib/Globals.h"
- #include "vislearning/features/fpfeatures/SparseVectorFeature.h"
- #include "segmentation/GenericRegionSegmentationMethodSelection.h"
- #include "SemSegNovelty.h"
- using namespace std;
- using namespace NICE;
- using namespace OBJREC;
- SemSegNovelty::SemSegNovelty ( const Config *conf,
- const MultiDataset *md )
- : SemanticSegmentation ( conf, & ( md->getClassNames ( "train" ) ) )
- {
- this->conf = conf;
- globalMaxUncert = -numeric_limits<double>::max();
-
- string section = "SemSegNovelty";
- featExtract = new LocalFeatureColorWeijer ( conf );
- this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
- this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
- this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
- //write uncertainty results in the same folder as done for the segmentation results
- resultdir = conf->gS("debug", "resultdir", "result");
- cache = conf->gS ( "cache", "root", "" );
-
-
- //stupid work around of the const attribute
- Config confCopy = *conf;
-
- //just to make sure, that we do NOT perform an optimization after every iteration step
- //this would just take a lot of time, which is not desired so far
- confCopy.sB("ClassifierGPHIK","performOptimizationAfterIncrement",false);
-
- classifierString = conf->gS ( section, "classifier", "ClassifierGPHIK" );
- classifier = NULL;
- vclassifier = NULL;
- if ( classifierString.compare("ClassifierGPHIK") == 0)
- classifier = new GPHIKClassifierNICE ( &confCopy, "ClassifierGPHIK" );
- else
- vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
-
- findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
- whs = conf->gI ( section, "window_size", 10 );
- //distance to next descriptor during training
- trainWsize = conf->gI ( section, "train_window_size", 10 );
- //distance to next descriptor during testing
- testWSize = conf->gI (section, "test_window_size", 10);
- // select your segmentation method here
- string rsMethode = conf->gS ( section, "segmentation", "none" );
-
- if(rsMethode == "none")
- {
- regionSeg = NULL;
- }
- else
- {
- RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod(conf, rsMethode);
- if ( reuseSegmentation )
- regionSeg = new RSCache ( conf, tmpRegionSeg );
- else
- regionSeg = tmpRegionSeg;
- }
-
- cn = md->getClassNames ( "train" );
- if ( read_classifier )
- {
- try
- {
- if ( classifier != NULL )
- {
- string classifierdst = "/classifier.data";
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
- classifier->read ( cache + classifierdst );
- }
- else
- {
- string classifierdst = "/veccl.data";
- fprintf ( stderr, "SemSegNovelty:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
- vclassifier->read ( cache + classifierdst );
- }
-
- fprintf ( stderr, "SemSegNovelty:: successfully read\n" );
- }
- catch ( char *str )
- {
- cerr << "error reading data: " << str << endl;
- }
- }
- else
- {
- train ( md );
- }
-
- //define which measure for "novelty" we want to use
- noveltyMethodString = conf->gS( section, "noveltyMethod", "gp-variance");
- if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
- {
- this->noveltyMethod = GPVARIANCE;
- this->mostNoveltyWithMaxScores = true;
- }
- else if (noveltyMethodString.compare("gp-uncertainty") == 0) //novel = large uncertainty (mean / var)
- {
- this->noveltyMethod = GPUNCERTAINTY;
- this->mostNoveltyWithMaxScores = false;
- globalMaxUncert = numeric_limits<double>::max();
- }
- else if (noveltyMethodString.compare("gp-mean") == 0) //novel = small mean
- {
- this->noveltyMethod = GPMINMEAN;
- this->mostNoveltyWithMaxScores = false;
- globalMaxUncert = numeric_limits<double>::max();
- }
- else if (noveltyMethodString.compare("gp-meanRatio") == 0) //novel = small difference between mean of most plausible class and mean of snd
- // most plausible class (not useful in binary settings)
- {
- this->noveltyMethod = GPMEANRATIO;
- this->mostNoveltyWithMaxScores = false;
- globalMaxUncert = numeric_limits<double>::max();
- }
- else if (noveltyMethodString.compare("gp-weightAll") == 0) // novel = large weight in alpha vector after updating the model (can be predicted exactly)
- {
- this->noveltyMethod = GPWEIGHTALL;
- this->mostNoveltyWithMaxScores = true;
- }
- else if (noveltyMethodString.compare("gp-weightRatio") == 0) // novel = small difference between weights for alpha vectors
- // with assumptions of GT label to be the most
- // plausible against the second most plausible class
- {
- this->noveltyMethod = GPWEIGHTRATIO;
- this->mostNoveltyWithMaxScores = false;
- globalMaxUncert = numeric_limits<double>::max();
- }
- else if (noveltyMethodString.compare("random") == 0)
- {
- initRand();
- this->noveltyMethod = RANDOM;
- }
- else
- {
- this->noveltyMethod = GPVARIANCE;
- this->mostNoveltyWithMaxScores = true;
- }
-
- //we don't have queried any region so far
- queriedRegions.clear();
- visualizeALimages = conf->gB(section, "visualizeALimages", false);
- }
- SemSegNovelty::~SemSegNovelty()
- {
- if(newTrainExamples.size() > 0)
- {
- // show most uncertain region
- if (visualizeALimages)
- showImage(maskedImg);
-
- //incorporate new information into the classifier
- if (classifier != NULL)
- classifier->addMultipleExamples(newTrainExamples);
-
- //store the classifier, such that we can read it again in the next round (if we like that)
- classifier->save ( cache + "/classifier.data" );
- }
-
- // clean-up
- if ( classifier != NULL )
- delete classifier;
- if ( vclassifier != NULL )
- delete vclassifier;
- if ( featExtract != NULL )
- delete featExtract;
- }
- void SemSegNovelty::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix ®ions, int region, NICE::ColorImage &outimage)
- {
- std::vector<uchar> color;
- color.push_back(255);
- color.push_back(0);
- color.push_back(0);
-
- int width = img.width();
- int height = img.height();
-
- outimage.resize(width,height);
-
- for(int y = 0; y < height; y++)
- {
- for(int x = 0; x < width; x++)
- {
- if(regions(x,y) == region)
- {
- for(int c = 0; c < 3; c++)
- {
- outimage(x,y,c) = color[c];
- }
- }
- else
- {
- for(int c = 0; c < 3; c++)
- {
- outimage(x,y,c) = img(x,y,c);
- }
- }
- }
- }
- }
- void SemSegNovelty::train ( const MultiDataset *md )
- {
- const LabeledSet train = * ( *md ) ["train"];
- const LabeledSet *trainp = &train;
- ////////////////////////
- // feature extraction //
- ////////////////////////
-
- //check the same thing for the training classes - this is very specific to our setup
- std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
- if ( forbidden_classesTrain_s == "" )
- {
- forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
- }
- cn.getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
-
- ProgressBar pb ( "Local Feature Extraction" );
- pb.show();
- int imgnb = 0;
- Examples examples;
- examples.filename = "training";
- int featdim = -1;
- classesInUse.clear();
-
- LOOP_ALL_S ( *trainp )
- {
- //EACH_S(classno, currentFile);
- EACH_INFO ( classno, info );
- std::string currentFile = info.img();
- CachedExample *ce = new CachedExample ( currentFile );
- const LocalizationResult *locResult = info.localization();
- if ( locResult->size() <= 0 )
- {
- fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
- currentFile.c_str() );
- continue;
- }
- int xsize, ysize;
- ce->getImageSize ( xsize, ysize );
- Image labels ( xsize, ysize );
- labels.set ( 0 );
- locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
- NICE::ColorImage img;
- try {
- img = ColorImage ( currentFile );
- } catch ( Exception ) {
- cerr << "SemSegNovelty: error opening image file <" << currentFile << ">" << endl;
- continue;
- }
- Globals::setCurrentImgFN ( currentFile );
- MultiChannelImageT<double> feats;
- // extract features
- featExtract->getFeats ( img, feats );
- featdim = feats.channels();
- feats.addChannel(featdim);
- for (int c = 0; c < featdim; c++)
- {
- ImageT<double> tmp = feats[c];
- ImageT<double> tmp2 = feats[c+featdim];
- NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
- }
- featdim += featdim;
- // compute integral images
- for ( int c = 0; c < featdim; c++ )
- {
- feats.calcIntegral ( c );
- }
- for ( int y = 0; y < ysize; y += trainWsize)
- {
- for ( int x = 0; x < xsize; x += trainWsize )
- {
- int classnoTmp = labels.getPixel ( x, y );
-
- if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
- {
- continue;
- }
-
- if (classesInUse.find(classnoTmp) == classesInUse.end())
- {
- classesInUse.insert(classnoTmp);
- }
-
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- example.position = imgnb;
- examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
- }
- }
-
-
-
- delete ce;
- imgnb++;
- pb.update ( trainp->count() );
- }
-
-
- numberOfClasses = classesInUse.size();
- std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
- std::cerr << "classes in use: " << std::endl;
- for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
- {
- std::cerr << *it << " ";
- }
- std::cerr << std::endl;
- pb.hide();
- //////////////////////
- // train classifier //
- //////////////////////
- FeaturePool fp;
- Feature *f = new SparseVectorFeature ( featdim );
- f->explode ( fp );
- delete f;
- if ( classifier != NULL )
- {
- std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
- classifier->train ( fp, examples );
- std::cerr << "training finished" << std::endl;
- }
- else
- {
- LabeledSetVector lvec;
- convertExamplesToLSet ( examples, lvec );
- vclassifier->teach ( lvec );
- // if ( usegmm )
- // convertLSetToSparseExamples ( examples, lvec );
- // else
- std::cerr << "classifierString: " << classifierString << std::endl;
- if (this->classifierString.compare("nn") == 0)
- {
- convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
- }
- else
- {
- convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
- }
- vclassifier->finishTeaching();
- }
- fp.destroy();
- if ( save_classifier )
- {
- if ( classifier != NULL )
- classifier->save ( cache + "/classifier.data" );
- else
- vclassifier->save ( cache + "/veccl.data" );
- }
- ////////////
- //clean up//
- ////////////
- for ( int i = 0; i < ( int ) examples.size(); i++ )
- {
- examples[i].second.clean();
- }
- examples.clear();
- cerr << "SemSeg training finished" << endl;
- }
- void SemSegNovelty::semanticseg ( CachedExample *ce, NICE::Image & segresult, NICE::MultiChannelImageT<double> & probabilities )
- {
- Timer timer;
- timer.start();
-
- //segResult contains the GT labels when this method is called
- // we simply store them in labels, to have an easy access to the GT information lateron
- Image labels = segresult;
- //just to be sure that we do not have a GT-biased result :)
- segresult.set(0);
- int featdim = -1;
- std::string currentFile = Globals::getCurrentImgFN();
- int xsize, ysize;
- ce->getImageSize ( xsize, ysize );
- probabilities.reInit( xsize, ysize, cn.getMaxClassno() + 1);
- probabilities.setAll ( 0.0 );
-
- NICE::ColorImage img;
- try {
- img = ColorImage ( currentFile );
- } catch ( Exception ) {
- cerr << "SemSegNovelty: error opening image file <" << currentFile << ">" << endl;
- return;
- }
- MultiChannelImageT<double> feats;
- // extract features
- featExtract->getFeats ( img, feats );
- featdim = feats.channels();
- feats.addChannel(featdim);
- for (int c = 0; c < featdim; c++)
- {
- ImageT<double> tmp = feats[c];
- ImageT<double> tmp2 = feats[c+featdim];
- NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
- }
- featdim += featdim;
- // compute integral images
- for ( int c = 0; c < featdim; c++ )
- {
- feats.calcIntegral ( c );
- }
-
- timer.stop();
- std::cout << "AL time for preparation: " << timer.getLastAbsolute() << std::endl;
-
- timer.start();
- //classification results currently only needed to be computed separately if we use the vclassifier, i.e., the nearest neighbor used
- // for the "novel feature learning" approach
- //in all other settings, such as active sem seg in general, we do this within the novelty-computation-methods
- if ( classifier == NULL )
- {
- this->computeClassificationResults( feats, segresult, probabilities, xsize, ysize, featdim);
- }
- // timer.stop();
- //
- // std::cerr << "classification results computed" << std::endl;
-
- FloatImage noveltyImage ( xsize, ysize );
- noveltyImage.set ( 0.0 );
-
- switch (noveltyMethod)
- {
- case GPVARIANCE:
- {
- this->computeNoveltyByVariance( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- case GPUNCERTAINTY:
- {
- this->computeNoveltyByGPUncertainty( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- case GPMINMEAN:
- {
- std::cerr << "compute novelty using the minimum mean" << std::endl;
- this->computeNoveltyByGPMean( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- case GPMEANRATIO:
- {
- this->computeNoveltyByGPMeanRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- case GPWEIGHTALL:
- {
- this->computeNoveltyByGPWeightAll( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- case GPWEIGHTRATIO:
- {
- this->computeNoveltyByGPWeightRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- case RANDOM:
- {
- this->computeNoveltyByRandom( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
- break;
- }
- default:
- {
- //do nothing, keep the image constant to 0.0
- break;
- }
-
- }
-
- timer.stop();
- std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
-
- if (visualizeALimages)
- {
- ColorImage imgrgbTmp (xsize, ysize);
- ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
- showImage(imgrgbTmp, "Novelty Image without Region Segmentation");
- }
-
-
- timer.start();
-
- //Regionen ermitteln
- if(regionSeg != NULL)
- {
- NICE::Matrix mask;
- int amountRegions = regionSeg->segRegions ( img, mask );
-
- //compute probs per region
- std::vector<std::vector<double> > regionProb(amountRegions, std::vector<double>(probabilities.channels(),0.0));
- std::vector<double> regionNoveltyMeasure (amountRegions, 0.0);
- std::vector<int> regionCounter(amountRegions, 0);
- std::vector<int> regionCounterNovelty(amountRegions, 0);
- for ( int y = 0; y < ysize; y += trainWsize) //y++)
- {
- for (int x = 0; x < xsize; x += trainWsize) //x++)
- {
- int r = mask(x,y);
- regionCounter[r]++;
- for(int j = 0; j < probabilities.channels(); j++)
- {
- regionProb[r][j] += probabilities ( x, y, j );
- }
-
- if ( forbidden_classesActiveLearning.find( labels(x,y) ) == forbidden_classesActiveLearning.end() )
- {
- //count the amount of "novelty" for the corresponding region
- regionNoveltyMeasure[r] += noveltyImage(x,y);
- regionCounterNovelty[r]++;
- }
- }
- }
-
- //find best class per region
- std::vector<int> bestClassPerRegion(amountRegions,0);
-
- double maxNoveltyScore = -numeric_limits<double>::max();
- if (!mostNoveltyWithMaxScores)
- {
- maxNoveltyScore = numeric_limits<double>::max();
- }
-
- int maxUncertRegion = -1;
-
- //loop over all regions and compute averaged novelty scores
- for(int r = 0; r < amountRegions; r++)
- {
-
- //check for the most plausible class per region
- double maxval = -numeric_limits<double>::max();
-
- //loop over all classes
- for(int c = 0; c < probabilities.channels(); c++)
- {
- regionProb[r][c] /= regionCounter[r];
-
- if( (maxval < regionProb[r][c]) ) //&& (regionProb[r][c] != 0.0) )
- {
- maxval = regionProb[r][c];
- bestClassPerRegion[r] = c;
- }
- }
-
- //if the region only contains unvalid information (e.g., background) skip it
- if (regionCounterNovelty[r] == 0)
- {
- continue;
- }
-
- //normalize summed novelty scores to region size
- regionNoveltyMeasure[r] /= regionCounterNovelty[r];
-
- //did we find a region that has a higher score as the most novel region known so far within this image?
- if( ( mostNoveltyWithMaxScores && (maxNoveltyScore < regionNoveltyMeasure[r]) ) // if we look for large novelty scores, e.g., variance
- || ( !mostNoveltyWithMaxScores && (maxNoveltyScore > regionNoveltyMeasure[r]) ) ) // if we look for small novelty scores, e.g., min mean
- {
- //did we already query a region of this image? -- and it was this specific region
- if ( (queriedRegions.find( currentFile ) != queriedRegions.end() ) && ( queriedRegions[currentFile].find(r) != queriedRegions[currentFile].end() ) )
- {
- continue;
- }
- else //only accept the region as novel if we never queried it before
- {
- maxNoveltyScore = regionNoveltyMeasure[r];
- maxUncertRegion = r;
- }
- }
- }
-
- // after finding the most novel region for the current image, check whether this region is also the most novel with respect
- // to all previously seen test images
- // if so, store the corresponding features, since we want to "actively" query them to incorporate useful information
- if(findMaximumUncert)
- {
- if( ( mostNoveltyWithMaxScores && (maxNoveltyScore > globalMaxUncert) )
- || ( !mostNoveltyWithMaxScores && (maxNoveltyScore < globalMaxUncert) ) )
- {
- //current most novel region of the image has "higher" novelty score then previous most novel region of all test images worked on so far
- // -> save new important features of this region
- Examples examples;
- for ( int y = 0; y < ysize; y += trainWsize )
- {
- for ( int x = 0; x < xsize; x += trainWsize)
- {
- if(mask(x,y) == maxUncertRegion)
- {
- int classnoTmp = labels(x,y);
- if ( forbidden_classesActiveLearning.find(classnoTmp) != forbidden_classesActiveLearning.end() )
- continue;
-
- Example example(NULL, x, y);
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
-
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- examples.push_back ( pair<int, Example> ( classnoTmp, example ) );
- }
- }
- }
-
- if(examples.size() > 0)
- {
- std::cerr << "found " << examples.size() << " new examples in the queried region" << std::endl << std::endl;
- newTrainExamples.clear();
- newTrainExamples = examples;
- globalMaxUncert = maxNoveltyScore;
- //prepare for later visualization
- // if (visualizeALimages)
- visualizeRegion(img,mask,maxUncertRegion,maskedImg);
- }
- else
- {
- std::cerr << "the queried region has no valid information" << std::endl << std::endl;
- }
-
- //save filename and region index
- currentRegionToQuery.first = currentFile;
- currentRegionToQuery.second = maxUncertRegion;
- }
- }
- //write back best results per region
- //i.e., write normalized novelty scores for every region into the novelty image
- for ( int y = 0; y < ysize; y++)
- {
- for (int x = 0; x < xsize; x++)
- {
- int r = mask(x,y);
- for(int j = 0; j < probabilities.channels(); j++)
- {
- probabilities ( x, y, j ) = regionProb[r][j];
- }
- segresult(x,y) = bestClassPerRegion[r];
- // write novelty scores for every segment into the "final" image
- noveltyImage(x,y) = regionNoveltyMeasure[r];
- }
- }
- } // if regionSeg != null
-
- timer.stop();
- std::cout << "AL time for determination of novel regions: " << timer.getLastAbsolute() << std::endl;
- // timer.stop();
- // cout << "second: " << timer.getLastAbsolute() << endl;
- timer.start();
- ColorImage imgrgb ( xsize, ysize );
- std::stringstream out;
- std::vector< std::string > list2;
- StringTools::split ( Globals::getCurrentImgFN (), '/', list2 );
- out << resultdir << "/" << list2.back();
-
- noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
-
- if (visualizeALimages)
- {
- ICETools::convertToRGB ( noveltyImage, imgrgb );
- showImage(imgrgb, "Novelty Image");
- }
- timer.stop();
- cout << "AL time for writing the raw novelty image: " << timer.getLastAbsolute() << endl;
- }
- inline void SemSegNovelty::computeClassificationResults( const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize,
- const int & ysize,
- const int & featdim
- )
- {
- std::cerr << "featdim: " << featdim << std::endl;
-
- if ( classifier != NULL )
- {
-
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- }
- }
-
- example.svec->clear();
- }
- delete example.svec;
- example.svec = NULL;
- }
- }
- else //vclassifier
- {
- std::cerr << "compute classification results with vclassifier" << std::endl;
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- for ( int x = 0; x < xsize; x += testWSize)
- {
- NICE::Vector v(featdim);
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- v[f] = val;
- }
- v.normalizeL1();
- ClassificationResult cr = vclassifier->classify ( v );
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- }
- }
- }
- }
- }
- }
- // compute novelty images depending on the strategy chosen
- void SemSegNovelty::computeNoveltyByRandom( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
-
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
-
- double randVal = randDouble();
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = randVal;
- }
- }
- }
- }
- }
- void SemSegNovelty::computeNoveltyByVariance( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
-
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = cr.uncertainty;
- }
- }
-
- example.svec->clear();
- }
- delete example.svec;
- example.svec = NULL;
- }
- }
- void SemSegNovelty::computeNoveltyByGPUncertainty( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
-
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
-
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
-
- double maxMeanAbs ( 0.0 );
-
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- if ( forbidden_classesTrain.find ( j ) != forbidden_classesTrain.end() )
- {
- continue;
- }
- //check for larger abs mean
- if (abs(cr.scores[j]) > maxMeanAbs)
- {
- maxMeanAbs = abs(cr.scores[j]);
- }
-
- }
- double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
-
- //compute the heuristic GP-UNCERTAINTY, as proposed by Kapoor et al. in IJCV 2010
- // GP-UNCERTAINTY : |mean| / sqrt(var^2 + gpnoise^2)
- double gpUncertaintyVal = maxMeanAbs*firstTerm; //firstTerm = 1.0 / sqrt(r.uncertainty+gpNoise))
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = gpUncertaintyVal;
- }
- }
-
- example.svec->clear();
- }
- delete example.svec;
- example.svec = NULL;
- }
- }
- void SemSegNovelty::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
-
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
- double minMeanAbs ( numeric_limits<double>::max() );
-
- for ( int j = 0 ; j < probabilities.channels(); j++ )
- {
- if ( forbidden_classesTrain.find ( j ) != forbidden_classesTrain.end() )
- {
- continue;
- }
- //check whether we found a class with higher smaller abs mean than the current minimum
- if (abs(probabilities(x,y,j)) < minMeanAbs)
- {
- minMeanAbs = abs(cr.scores[j]);
- }
- }
- // compute results when we take the lowest mean value of all classes
- double gpMeanVal = minMeanAbs;
-
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = gpMeanVal;
- }
- }
- }
- }
- }
- void SemSegNovelty::computeNoveltyByGPMeanRatio( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
-
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
- double maxMean ( -numeric_limits<double>::max() );
- double sndMaxMean ( -numeric_limits<double>::max() );
-
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- if ( forbidden_classesTrain.find ( j ) != forbidden_classesTrain.end() )
- {
- continue;
- }
-
- //check for larger mean without abs as well
- if (cr.scores[j] > maxMean)
- {
- sndMaxMean = maxMean;
- maxMean = cr.scores[j];
- }
- // and also for the second highest mean of all classes
- else if (cr.scores[j] > sndMaxMean)
- {
- sndMaxMean = cr.scores[j];
- }
- }
-
- //look at the difference in the absolut mean values for the most plausible class
- // and the second most plausible class
- double gpMeanRatioVal= maxMean - sndMaxMean;
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = gpMeanRatioVal;
- }
- }
-
- example.svec->clear();
- }
- delete example.svec;
- example.svec = NULL;
- }
- }
- void SemSegNovelty::computeNoveltyByGPWeightAll( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
-
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
-
- double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
-
- double gpWeightAllVal ( 0.0 );
- if ( numberOfClasses > 2)
- {
- //compute the weight in the alpha-vector for every sample after assuming it to be
- // added to the training set.
- // Thereby, we measure its "importance" for the current model
- //
- //double firstTerm is already computed
- //
- //the second term is only needed when computing impacts
- //double secondTerm; //this is the nasty guy :/
-
- //--- compute the third term
- // this is the difference between predicted label and GT label
- std::vector<double> diffToPositive; diffToPositive.clear();
- std::vector<double> diffToNegative; diffToNegative.clear();
- double diffToNegativeSum(0.0);
-
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- if ( forbidden_classesTrain.find ( j ) != forbidden_classesTrain.end() )
- {
- continue;
- }
-
- // look at the difference to plus 1
- diffToPositive.push_back(abs(cr.scores[j] - 1));
- // look at the difference to -1
- diffToNegative.push_back(abs(cr.scores[j] + 1));
- //sum up the difference to -1
- diffToNegativeSum += abs(cr.scores[j] - 1);
- }
- //let's subtract for every class its diffToNegative from the sum, add its diffToPositive,
- //and use this as the third term for this specific class.
- //the final value is obtained by minimizing over all classes
- //
- // originally, we minimize over all classes after building the final score
- // however, the first and the second term do not depend on the choice of
- // y*, therefore we minimize here already
- double thirdTerm (numeric_limits<double>::max()) ;
- for(uint tmpCnt = 0; tmpCnt < diffToPositive.size(); tmpCnt++)
- {
- double tmpVal ( diffToPositive[tmpCnt] + (diffToNegativeSum-diffToNegative[tmpCnt]) );
- if (tmpVal < thirdTerm)
- thirdTerm = tmpVal;
- }
- gpWeightAllVal = thirdTerm*firstTerm;
- }
- else //binary scenario
- {
- gpWeightAllVal = std::min( abs(cr.scores[*classesInUse.begin()]+1), abs(cr.scores[*classesInUse.begin()]-1) );
- gpWeightAllVal *= firstTerm;
- }
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = gpWeightAllVal;
- }
- }
-
-
- example.svec->clear();
- }
- delete example.svec;
- example.svec = NULL;
- }
- }
- void SemSegNovelty::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyImage,
- const NICE::MultiChannelImageT<double> & feats,
- NICE::Image & segresult,
- NICE::MultiChannelImageT<double> & probabilities,
- const int & xsize, const int & ysize, const int & featdim )
- {
- double gpNoise = conf->gD("GPHIK", "noise", 0.01);
-
- #pragma omp parallel for
- for ( int y = 0; y < ysize; y += testWSize )
- {
- Example example;
- example.vec = NULL;
- example.svec = new SparseVector ( featdim );
- for ( int x = 0; x < xsize; x += testWSize)
- {
- for ( int f = 0; f < featdim; f++ )
- {
- double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
- if ( val > 1e-10 )
- ( *example.svec ) [f] = val;
- }
- example.svec->normalize();
- ClassificationResult cr = classifier->classify ( example );
-
- double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
- double gpWeightRatioVal ( 0.0 );
- if ( numberOfClasses > 2)
- {
- //compute the weight in the alpha-vector for every sample after assuming it to be
- // added to the training set.
- // Thereby, we measure its "importance" for the current model
- //
- //double firstTerm is already computed
- //
- //the second term is only needed when computing impacts
- //double secondTerm; //this is the nasty guy :/
-
- //--- compute the third term
- // this is the difference between predicted label and GT label
- std::vector<double> diffToPositive; diffToPositive.clear();
- std::vector<double> diffToNegative; diffToNegative.clear();
- double diffToNegativeSum(0.0);
-
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- if ( forbidden_classesTrain.find ( j ) != forbidden_classesTrain.end() )
- {
- continue;
- }
-
- // look at the difference to plus 1
- diffToPositive.push_back(abs(cr.scores[j] - 1));
- }
- //let's subtract for every class its diffToNegative from the sum, add its diffToPositive,
- //and use this as the third term for this specific class.
- //the final value is obtained by minimizing over all classes
- //
- // originally, we minimize over all classes after building the final score
- // however, the first and the second term do not depend on the choice of
- // y*, therefore we minimize here already
-
- //now look on the ratio of the resulting weights for the most plausible
- // against the second most plausible class
- double thirdTermMostPlausible ( 0.0 ) ;
- double thirdTermSecondMostPlausible ( 0.0 ) ;
- for(uint tmpCnt = 0; tmpCnt < diffToPositive.size(); tmpCnt++)
- {
- if (diffToPositive[tmpCnt] > thirdTermMostPlausible)
- {
- thirdTermSecondMostPlausible = thirdTermMostPlausible;
- thirdTermMostPlausible = diffToPositive[tmpCnt];
- }
- else if (diffToPositive[tmpCnt] > thirdTermSecondMostPlausible)
- {
- thirdTermSecondMostPlausible = diffToPositive[tmpCnt];
- }
- }
- //compute the resulting score
- gpWeightRatioVal = (thirdTermMostPlausible - thirdTermSecondMostPlausible)*firstTerm;
- //finally, look for this feature how it would affect to whole model (summarized by weight-vector alpha), if we would
- //use it as an additional training example
- //TODO this would be REALLY computational demanding. Do we really want to do this?
- // gpImpactAll[s] ( pce[i].second.x, pce[i].second.y ) = thirdTerm*firstTerm*secondTerm;
- // gpImpactRatio[s] ( pce[i].second.x, pce[i].second.y ) = (thirdTermMostPlausible - thirdTermSecondMostPlausible)*firstTerm*secondTerm;
- }
- else //binary scenario
- {
- gpWeightRatioVal = std::min( abs(cr.scores[*classesInUse.begin()]+1), abs(cr.scores[*classesInUse.begin()]-1) );
- gpWeightRatioVal *= firstTerm;
- }
- int xs = std::max(0, x - testWSize/2);
- int xe = std::min(xsize - 1, x + testWSize/2);
- int ys = std::max(0, y - testWSize/2);
- int ye = std::min(ysize - 1, y + testWSize/2);
- for (int yl = ys; yl <= ye; yl++)
- {
- for (int xl = xs; xl <= xe; xl++)
- {
- for ( int j = 0 ; j < cr.scores.size(); j++ )
- {
- probabilities ( xl, yl, j ) = cr.scores[j];
- }
- segresult ( xl, yl ) = cr.classno;
- noveltyImage ( xl, yl ) = gpWeightRatioVal;
- }
- }
-
- example.svec->clear();
- }
- delete example.svec;
- example.svec = NULL;
- }
- }
- void SemSegNovelty::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
- {
- //accept the new class as valid information
- if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
- {
- forbidden_classesTrain.erase(newClassNo);
- numberOfClasses++;
- }
- if ( classesInUse.find ( newClassNo ) == classesInUse.end() )
- {
- classesInUse.insert( newClassNo );
- }
-
-
- //then add it to the classifier used
- if ( classifier != NULL )
- {
- //TODO
- }
- else //vclassifier
- {
- if (this->classifierString.compare("nn") == 0)
- {
- vclassifier->teach ( newClassNo, newExample );
- }
- }
- }
- void SemSegNovelty::addNovelExamples()
- {
- Timer timer;
-
- //show the image that contains the most novel region
- if (visualizeALimages)
- showImage(maskedImg, "Most novel region");
-
- timer.start();
-
- std::stringstream out;
- std::vector< std::string > list2;
- StringTools::split ( Globals::getCurrentImgFN (), '/', list2 );
- out << resultdir << "/" << list2.back();
-
- maskedImg.writePPM ( out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+ "_query.ppm" );
-
- timer.stop();
- std::cerr << "AL time for writing queried image: " << timer.getLast() << std::endl;
- timer.start();
-
- //check which classes will be added using the features from the novel region
- std::set<int> newClassNumbers;
- newClassNumbers.clear(); //just to be sure
- for ( uint i = 0 ; i < newTrainExamples.size() ; i++ )
- {
- if (newClassNumbers.find(newTrainExamples[i].first /* classNumber*/) == newClassNumbers.end() )
- {
- newClassNumbers.insert(newTrainExamples[i].first );
- }
- }
- //accept the new classes as valid information
- for (std::set<int>::const_iterator clNoIt = newClassNumbers.begin(); clNoIt != newClassNumbers.end(); clNoIt++)
- {
- if ( forbidden_classesTrain.find ( *clNoIt ) != forbidden_classesTrain.end() )
- {
- forbidden_classesTrain.erase(*clNoIt);
- numberOfClasses++;
- }
- if ( classesInUse.find ( *clNoIt ) == classesInUse.end() )
- {
- classesInUse.insert( *clNoIt );
- }
- }
-
- timer.stop();
- std::cerr << "AL time for accepting possible new classes: " << timer.getLast() << std::endl;
-
- timer.start();
- //then add the new features to the classifier used
- if ( classifier != NULL )
- {
- if (this->classifierString.compare("ClassifierGPHIK") == 0)
- {
- classifier->addMultipleExamples ( this->newTrainExamples );
- }
- }
- else //vclassifier
- {
- //TODO
- }
-
- timer.stop();
- std::cerr << "AL time for actually updating the classifier: " << timer.getLast() << std::endl;
-
- std::cerr << "the current region to query is: " << currentRegionToQuery.first << " -- " << currentRegionToQuery.second << std::endl;
-
- //did we already query a region of this image?
- if ( queriedRegions.find( currentRegionToQuery.first ) != queriedRegions.end() )
- {
- queriedRegions[ currentRegionToQuery.first ].insert(currentRegionToQuery.second);
- }
- else
- {
- std::set<int> tmpSet; tmpSet.insert(currentRegionToQuery.second);
- queriedRegions.insert(std::pair<std::string,std::set<int> > (currentRegionToQuery.first, tmpSet ) );
- }
-
- std::cerr << "Write already queried regions: " << std::endl;
- for (std::map<std::string,std::set<int> >::const_iterator it = queriedRegions.begin(); it != queriedRegions.end(); it++)
- {
- std::cerr << "image: " << it->first << " -- ";
- for (std::set<int>::const_iterator itReg = it->second.begin(); itReg != it->second.end(); itReg++)
- {
- std::cerr << *itReg << " ";
- }
- std::cerr << std::endl;
- }
-
- //clear the latest results, since one iteration is over
- globalMaxUncert = -numeric_limits<double>::max();
- if (!mostNoveltyWithMaxScores)
- globalMaxUncert = numeric_limits<double>::max();
- }
- const Examples * SemSegNovelty::getNovelExamples() const
- {
- return &(this->newTrainExamples);
- }
|