Переглянути джерело

added missing SemSegNoveltyBinary.*

Alexander Freytag 12 роки тому
батько
коміт
914753e2a1
2 змінених файлів з 1772 додано та 0 видалено
  1. 1527 0
      semseg/SemSegNoveltyBinary.cpp~
  2. 245 0
      semseg/SemSegNoveltyBinary.h

+ 1527 - 0
semseg/SemSegNoveltyBinary.cpp~

@@ -0,0 +1,1527 @@
+#include <sstream>
+#include <iostream>
+
+#include "SemSegNoveltyBinary.h"
+
+#include <core/image/FilterT.h>
+#include <core/basics/numerictools.h>
+#include <core/basics/StringTools.h>
+#include <core/basics/Timer.h>
+
+#include <gp-hik-exp/GPHIKClassifierNICE.h>
+#include <vislearning/baselib/ICETools.h>
+#include <vislearning/baselib/Globals.h>
+#include <vislearning/features/fpfeatures/SparseVectorFeature.h>
+
+#include "segmentation/GenericRegionSegmentationMethodSelection.h"
+
+using namespace std;
+using namespace NICE;
+using namespace OBJREC;
+
+SemSegNoveltyBinary::SemSegNoveltyBinary ( const Config *conf,
+                               const MultiDataset *md )
+    : SemanticSegmentation ( conf, & ( md->getClassNames ( "train" ) ) )
+{
+  this->conf = conf;
+
+  globalMaxUncert = -numeric_limits<double>::max();
+  
+  string section = "SemSegNoveltyBinary";
+
+  featExtract = new LFColorWeijer ( conf );
+
+  this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
+  this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
+  this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
+
+  //write uncertainty results in the same folder as done for the segmentation results
+  resultdir = conf->gS("debug", "resultdir", "result");
+  cache = conf->gS ( "cache", "root", "" );
+  
+  
+  //stupid work around of the const attribute
+  Config confCopy = *conf;
+  
+  //just to make sure, that we do NOT perform an optimization after every iteration step
+  //this would just take a lot of time, which is not desired so far
+  confCopy.sB("ClassifierGPHIK","performOptimizationAfterIncrement",false);
+  
+  classifierString = conf->gS ( section, "classifier", "ClassifierGPHIK" );  
+  classifier = NULL;
+  vclassifier = NULL;
+  if ( classifierString.compare("ClassifierGPHIK") == 0)
+    classifier = new GPHIKClassifierNICE ( &confCopy, "ClassifierGPHIK" );
+  else
+    vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
+  
+
+
+  findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
+  whs = conf->gI ( section, "window_size", 10 );
+  //distance to next descriptor during training
+  trainWsize = conf->gI ( section, "train_window_size", 10 );
+  //distance to next descriptor during testing
+  testWSize = conf->gI (section, "test_window_size", 10);
+  // select your segmentation method here
+  string rsMethode = conf->gS ( section, "segmentation", "none" );
+ 
+  if(rsMethode == "none")
+  {
+    regionSeg = NULL;
+  }
+  else
+  {
+    RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod(conf, rsMethode);    
+    if ( reuseSegmentation )
+      regionSeg = new RSCache ( conf, tmpRegionSeg );
+    else
+      regionSeg = tmpRegionSeg;
+  }
+  
+  cn = md->getClassNames ( "train" );
+
+  if ( read_classifier )
+  {
+    try
+    {
+      if ( classifier != NULL )
+      {
+        string classifierdst = "/classifier.data";        
+        fprintf ( stderr, "SemSegNoveltyBinary:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );        
+        classifier->read ( cache + classifierdst );
+      }
+      else
+      {
+        string classifierdst = "/veccl.data";        
+        fprintf ( stderr, "SemSegNoveltyBinary:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );          
+        vclassifier->read ( cache + classifierdst );      
+      }
+      
+
+      fprintf ( stderr, "SemSegNoveltyBinary:: successfully read\n" );
+    }
+    catch ( char *str )
+    {
+      cerr << "error reading data: " << str << endl;
+    }
+  }
+  else
+  {
+    train ( md );
+  }
+  
+  //define which measure for "novelty" we want to use
+  noveltyMethodString = conf->gS( section,  "noveltyMethod", "gp-variance");
+  if (noveltyMethodString.compare("gp-variance") == 0)  // novel = large variance
+  {
+    this->noveltyMethod = GPVARIANCE;
+    this->mostNoveltyWithMaxScores = true;
+  }
+  else if (noveltyMethodString.compare("gp-uncertainty") == 0) //novel = large uncertainty (mean / var)
+  {
+    this->noveltyMethod = GPUNCERTAINTY;
+    this->mostNoveltyWithMaxScores = false;
+    globalMaxUncert = numeric_limits<double>::max();
+  } 
+  else if (noveltyMethodString.compare("gp-mean") == 0) //novel = small mean
+  {
+    this->noveltyMethod = GPMINMEAN;
+    this->mostNoveltyWithMaxScores = false;
+    globalMaxUncert = numeric_limits<double>::max();
+  }
+  else if (noveltyMethodString.compare("gp-meanRatio") == 0)  //novel = small difference between mean of most plausible class and mean of snd
+                                                              //        most plausible class (not useful in binary settings)
+  {
+    this->noveltyMethod = GPMEANRATIO;
+    this->mostNoveltyWithMaxScores = false;
+    globalMaxUncert = numeric_limits<double>::max();
+  }
+  else if (noveltyMethodString.compare("gp-weightAll") == 0) // novel = large weight in alpha vector after updating the model (can be predicted exactly)
+  {
+    this->noveltyMethod = GPWEIGHTALL;
+    this->mostNoveltyWithMaxScores = true;
+  }
+  else if (noveltyMethodString.compare("gp-weightRatio") == 0) // novel = small difference between weights for alpha vectors 
+                                                               //     with assumptions of GT label to be the most 
+                                                               //     plausible against the second most plausible class   
+  {
+    this->noveltyMethod = GPWEIGHTRATIO;
+    this->mostNoveltyWithMaxScores = false;
+    globalMaxUncert = numeric_limits<double>::max();
+  }
+  else if (noveltyMethodString.compare("random") == 0) 
+  {
+     initRand(); 
+     this->noveltyMethod = RANDOM;
+  }
+  else
+  {
+    this->noveltyMethod = GPVARIANCE;
+    this->mostNoveltyWithMaxScores = true;
+  }
+  
+  //we don't have queried any region so far
+  queriedRegions.clear();
+  visualizeALimages = conf->gB(section, "visualizeALimages", false);
+  
+  resultsOfSingleRun.clear();
+  
+  write_results = conf->gB( "debug", "write_results", false );
+}
+
+SemSegNoveltyBinary::~SemSegNoveltyBinary()
+{
+  if(newTrainExamples.size() > 0)
+  {
+    // show most uncertain region
+    if (visualizeALimages)
+      showImage(maskedImg);
+    
+    //incorporate new information into the classifier
+    if (classifier != NULL)
+      classifier->addMultipleExamples(newTrainExamples);
+    
+    //store the classifier, such that we can read it again in the next round (if we like that)
+    classifier->save ( cache + "/classifier.data" );
+  }
+  
+  // clean-up
+  if ( classifier != NULL )
+    delete classifier;
+  if ( vclassifier != NULL )
+    delete vclassifier;
+  if ( featExtract != NULL )
+    delete featExtract;
+}
+
+void SemSegNoveltyBinary::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix &regions, int region, NICE::ColorImage &outimage)
+{
+  std::vector<uchar> color;
+  color.push_back(255);
+  color.push_back(0);
+  color.push_back(0);
+    
+  int width = img.width();
+  int height = img.height();
+  
+  outimage.resize(width,height);
+  
+  for(int y = 0; y < height; y++)
+  {
+    for(int x = 0; x < width; x++)
+    {
+      if(regions(x,y) == region)
+      {
+        for(int c = 0; c < 3; c++)
+        {
+          outimage(x,y,c) = color[c];
+        }
+      }
+      else
+      {
+        for(int c = 0; c < 3; c++)
+        {
+          outimage(x,y,c) = img(x,y,c);
+        }
+      }
+    }
+  }
+}
+
+void SemSegNoveltyBinary::train ( const MultiDataset *md )
+{
+  const LabeledSet train = * ( *md ) ["train"];
+  const LabeledSet *trainp = &train;
+
+  ////////////////////////
+  // feature extraction //
+  ////////////////////////
+ 
+  //check the same thing for the training classes - this is very specific to our setup 
+  std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
+  if ( forbidden_classesTrain_s == "" )
+  {
+    forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
+  }
+  cn.getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
+  
+  //check whether we have a single positive class
+  std::string positiveClass_s = conf->gS ( "SemSegNoveltyBinary", "positiveClass", "" );
+  std::set<int> positiveClassNumberTmp;
+  cn.getSelection ( positiveClass_s, positiveClassNumberTmp );  
+
+  std::cerr << "BINARY SETTING ENABLED! " << std::endl;
+  switch ( positiveClassNumberTmp.size() )
+  {
+    case 0:
+    {
+      positiveClass = 0;
+      std::cerr << "no positive class given, assume 0 as positive class" << std::endl;
+      break;
+    }
+    case 1:
+    {
+      positiveClass = *(positiveClassNumberTmp.begin());
+      std::cerr << "positive class will be number" << positiveClass << " with the name: " << positiveClass_s << std::endl;
+      break;
+    }
+    default:
+    {
+      //we specified more than a single positive class. right now, this is not what we are interested in, but 
+      //in theory we could also accept this and convert positiveClass into a set of ints of possible positive classes
+      positiveClass = 0;
+      std::cerr << "no positive class given, assume 0 as positive class" << std::endl;
+      break;
+    }
+  }  
+  std::cerr << "============================" << std::endl << std::endl;  
+
+
+  ProgressBar pb ( "Local Feature Extraction" );
+  pb.show();
+
+  int imgnb = 0;
+
+  Examples examples;
+  examples.filename = "training";
+
+  int featdim = -1;
+
+  classesInUse.clear();  
+  
+  LOOP_ALL_S ( *trainp )
+  {
+    //EACH_S(classno, currentFile);
+    EACH_INFO ( classno, info );
+
+    std::string currentFile = info.img();
+
+    CachedExample *ce = new CachedExample ( currentFile );
+    
+    const LocalizationResult *locResult = info.localization();
+    if ( locResult->size() <= 0 )
+    {
+      fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
+                currentFile.c_str() );
+      continue;
+    }
+
+    int xsize, ysize;
+    ce->getImageSize ( xsize, ysize );
+
+    Image labels ( xsize, ysize );
+    labels.set ( 0 );
+    locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
+
+    NICE::ColorImage img;
+    try {
+      img = ColorImage ( currentFile );
+    } catch ( Exception ) {
+      cerr << "SemSegNoveltyBinary: error opening image file <" << currentFile << ">" << endl;
+      continue;
+    }
+
+    Globals::setCurrentImgFN ( currentFile );
+
+    MultiChannelImageT<double> feats;
+
+    // extract features
+    featExtract->getFeats ( img, feats );
+    featdim = feats.channels();
+    feats.addChannel(featdim);
+
+    for (int c = 0; c < featdim; c++)
+    {
+      ImageT<double> tmp = feats[c];
+      ImageT<double> tmp2 = feats[c+featdim];
+
+      NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
+    }
+    featdim += featdim;
+
+    // compute integral images
+    for ( int c = 0; c < featdim; c++ )
+    {
+      feats.calcIntegral ( c );
+    }
+
+    for ( int y = 0; y < ysize; y += trainWsize)
+    {
+      for ( int x = 0; x < xsize; x += trainWsize )
+      {
+
+        int classnoTmp = labels.getPixel ( x, y );
+        
+        if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
+        {
+          continue;
+        }
+        
+        if (classesInUse.find(classnoTmp) == classesInUse.end())
+        {
+          classesInUse.insert(classnoTmp);
+        }
+        
+        Example example;
+        example.vec = NULL;
+        example.svec = new SparseVector ( featdim );
+        for ( int f = 0; f < featdim; f++ )
+        {
+          double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+          if ( val > 1e-10 )
+            ( *example.svec ) [f] = val;
+        }
+
+        example.svec->normalize();
+
+        example.position = imgnb;
+        if ( classnoTmp == positiveClass )
+          examples.push_back ( pair<int, Example> ( 1, example ) );
+        else
+          examples.push_back ( pair<int, Example> ( 0, example ) );
+      }
+    }
+ 
+    
+    
+
+    delete ce;
+    imgnb++;
+    pb.update ( trainp->count() );
+  }
+  
+    
+  numberOfClasses = classesInUse.size();
+  std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;  
+  std::cerr << "classes in use: " << std::endl;
+  for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
+  {
+    std::cerr << *it << " : " <<  cn.text(*it) <<  " ";
+  }    
+  std::cerr << std::endl;
+
+  pb.hide();
+
+
+  //////////////////////
+  // train classifier //
+  //////////////////////
+  FeaturePool fp;
+
+  Feature *f = new SparseVectorFeature ( featdim );
+
+  f->explode ( fp );
+  delete f;
+
+  if ( classifier != NULL )
+  {
+    std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
+    classifier->train ( fp, examples );
+    std::cerr << "training finished" << std::endl;
+  }
+  else
+  {
+    LabeledSetVector lvec;
+    convertExamplesToLSet ( examples, lvec );
+    vclassifier->teach ( lvec );
+//     if ( usegmm )
+//       convertLSetToSparseExamples ( examples, lvec );
+//     else
+    std::cerr << "classifierString: " << classifierString << std::endl;
+    if (this->classifierString.compare("nn") == 0)
+    {
+      convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
+    }
+    else
+    {
+      convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
+    }
+    vclassifier->finishTeaching();
+  }  
+
+  fp.destroy();
+
+  if ( save_classifier )
+  {
+    if ( classifier != NULL )
+      classifier->save ( cache + "/classifier.data" );
+    else
+      vclassifier->save ( cache + "/veccl.data" );    
+  }
+
+  ////////////
+  //clean up//
+  ////////////
+  for ( int i = 0; i < ( int ) examples.size(); i++ )
+  {
+    examples[i].second.clean();
+  }
+  examples.clear();
+
+  cerr << "SemSeg training finished" << endl;
+}
+
+
+void SemSegNoveltyBinary::semanticseg ( CachedExample *ce, NICE::Image & segresult, NICE::MultiChannelImageT<double> & probabilities )
+{  
+  Timer timer;
+  timer.start();
+  
+  //segResult contains the GT labels when this method is called
+  // we simply store them in labels, to have an easy access to the GT information lateron
+  Image labels = segresult;
+  //just to be sure that we do not have a GT-biased result :)
+  segresult.set(0);
+
+  int featdim = -1;
+
+  std::string currentFile = Globals::getCurrentImgFN();
+
+
+  int xsize, ysize;
+  ce->getImageSize ( xsize, ysize );
+
+  probabilities.reInit( xsize, ysize, 2);
+  probabilities.setAll ( 0.0 );
+   
+  NICE::ColorImage img;
+  try {
+    img = ColorImage ( currentFile );
+  } catch ( Exception ) {
+    cerr << "SemSegNoveltyBinary: error opening image file <" << currentFile << ">" << endl;
+    return;
+  }
+
+  MultiChannelImageT<double> feats;
+
+  // extract features
+  featExtract->getFeats ( img, feats );
+  featdim = feats.channels();
+  feats.addChannel(featdim);
+
+  for (int c = 0; c < featdim; c++)
+  {
+    ImageT<double> tmp = feats[c];
+    ImageT<double> tmp2 = feats[c+featdim];
+
+    NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
+  }
+  featdim += featdim;
+
+  // compute integral images
+  for ( int c = 0; c < featdim; c++ )
+  {
+    feats.calcIntegral ( c );
+  }
+  
+  timer.stop();
+  std::cout << "AL time for preparation: " << timer.getLastAbsolute() << std::endl;
+    
+  timer.start();
+  //classification results currently only needed to be computed separately if we use the vclassifier, i.e., the nearest neighbor used 
+  // for the "novel feature learning" approach
+  //in all other settings, such as active sem seg in general, we do this within the novelty-computation-methods
+  if ( classifier == NULL )
+  {
+    this->computeClassificationResults( feats, segresult, probabilities, xsize, ysize, featdim);
+  }
+//   timer.stop();
+//   
+//   std::cerr << "classification results computed" << std::endl;
+  
+  FloatImage noveltyImage ( xsize, ysize );
+  noveltyImage.set ( 0.0 );  
+  
+  switch (noveltyMethod)
+  {
+    case GPVARIANCE:
+    {
+         this->computeNoveltyByVariance( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;
+    }
+    case GPUNCERTAINTY:
+    {
+         this->computeNoveltyByGPUncertainty( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;         
+    }
+    case GPMINMEAN:
+    {
+         std::cerr << "compute novelty using the minimum mean" << std::endl;
+         this->computeNoveltyByGPMean( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;         
+    }
+    case GPMEANRATIO:
+    {
+         this->computeNoveltyByGPMeanRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;         
+    }
+    case GPWEIGHTALL:
+    {
+         this->computeNoveltyByGPWeightAll( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;         
+    }
+    case GPWEIGHTRATIO:
+    {
+         this->computeNoveltyByGPWeightRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;         
+    }    
+    case RANDOM:
+    {
+         this->computeNoveltyByRandom( noveltyImage, feats, segresult, probabilities, xsize, ysize,  featdim );
+         break;               
+    }
+    default:
+    {
+         //do nothing, keep the image constant to 0.0
+         break;
+    }
+         
+  }
+  
+  timer.stop();
+  std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
+
+  if ( write_results || visualizeALimages )
+  {
+    ColorImage imgrgbTmp (xsize, ysize);
+    ICETools::convertToRGB ( noveltyImage, imgrgbTmp );  
+  
+    this->cn.labelToRGB( segresult, imgrgbTmp );  
+    
+    if ( write_results )
+    {
+      std::stringstream out;
+      std::vector< std::string > list2;
+      StringTools::split ( currentFile, '/', list2 );
+      out << resultdir << "/" << list2.back();
+//       std::cerr << "writing to " << out.str() + "_run_" +  NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+"_unsmoothed.rawfloat" << std::endl;
+    
+      noveltyImage.writeRaw(out.str() + "_run_" +  NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+"_unsmoothed.rawfloat");
+      
+    }
+    
+    if (visualizeALimages)
+    {
+        showImage(imgrgbTmp, "Novelty Image without Region Segmentation");       
+        showImage(imgrgbTmp, "Classification Result without Region Segmentation");        
+    }    
+  }
+
+  
+    
+  timer.start();
+  
+  //Regionen ermitteln
+  if(regionSeg != NULL)
+  {
+    NICE::Matrix mask;
+    int amountRegions = regionSeg->segRegions ( img, mask );
+    
+    //compute probs per region
+    std::vector<std::vector<double> > regionProb(amountRegions, std::vector<double>(probabilities.channels(),0.0));
+    std::vector<double> regionNoveltyMeasure (amountRegions, 0.0);
+
+    std::vector<int> regionCounter(amountRegions, 0);
+    std::vector<int> regionCounterNovelty(amountRegions, 0);
+    for ( int y = 0; y < ysize; y += trainWsize) //y++)
+    {
+      for (int x = 0; x < xsize; x += trainWsize) //x++)
+      {
+        int r = mask(x,y);
+        regionCounter[r]++;
+        for(int j = 0; j < probabilities.channels(); j++)
+        {
+          regionProb[r][j] += probabilities ( x, y, j );
+        }
+        
+        if ( forbidden_classesActiveLearning.find( labels(x,y) ) == forbidden_classesActiveLearning.end() )
+        {
+          //count the amount of "novelty" for the corresponding region
+          regionNoveltyMeasure[r] += noveltyImage(x,y);
+          regionCounterNovelty[r]++;
+        }
+      }
+    }
+       
+    //find best class per region
+    std::vector<int> bestClassPerRegion(amountRegions,0);
+    
+    double maxNoveltyScore = -numeric_limits<double>::max();
+    if (!mostNoveltyWithMaxScores)
+    {
+      maxNoveltyScore = numeric_limits<double>::max();
+    }   
+    
+    int maxUncertRegion = -1;
+    
+    //loop over all regions and compute averaged novelty scores
+    for(int r = 0; r < amountRegions; r++)
+    {
+      
+      //check for the most plausible class per region
+      double maxval = -numeric_limits<double>::max();
+      
+      //loop over all classes
+      for(int c = 0; c < probabilities.channels(); c++)
+      {
+        regionProb[r][c] /= regionCounter[r];
+        
+        if(  (maxval < regionProb[r][c]) ) //&& (regionProb[r][c] != 0.0) ) 
+        {        
+              maxval = regionProb[r][c];
+              bestClassPerRegion[r] = c;
+        }
+      }
+       
+      //if the region only contains unvalid information (e.g., background) skip it
+      if (regionCounterNovelty[r] == 0)
+      {
+        continue;
+      }
+      
+      //normalize summed novelty scores to region size
+      regionNoveltyMeasure[r] /= regionCounterNovelty[r];
+    
+      //did we find a region that has a higher score as the most novel region known so far within this image?
+      if(   (  mostNoveltyWithMaxScores && (maxNoveltyScore < regionNoveltyMeasure[r]) )    // if we look for large novelty scores, e.g., variance
+        || ( !mostNoveltyWithMaxScores && (maxNoveltyScore > regionNoveltyMeasure[r]) ) )  // if we look for small novelty scores, e.g., min mean
+      {
+                   //did we already query a region of this image?                --   and it was this specific region
+        if ( (queriedRegions.find( currentFile ) != queriedRegions.end() ) && ( queriedRegions[currentFile].find(r) != queriedRegions[currentFile].end() ) )
+        {
+          continue;
+        }
+        else //only accept the region as novel if we never queried it before
+        {
+          maxNoveltyScore = regionNoveltyMeasure[r];
+          maxUncertRegion = r;        
+        }
+
+      }
+
+    }
+    
+    // after finding the most novel region for the current image, check whether this region is also the most novel with respect
+    // to all previously seen test images
+    // if so, store the corresponding features, since we want to "actively" query them to incorporate useful information
+    if(findMaximumUncert)
+    {
+      if(    (   mostNoveltyWithMaxScores && (maxNoveltyScore > globalMaxUncert) )
+          || (  !mostNoveltyWithMaxScores && (maxNoveltyScore < globalMaxUncert) ) )
+      {
+        //current most novel region of the image has "higher" novelty score then previous most novel region of all test images worked on so far
+        // -> save new important features of this region
+        Examples examples;
+        for ( int y = 0; y < ysize; y += trainWsize )
+        {
+          for ( int x = 0; x < xsize; x += trainWsize)
+          {
+            if(mask(x,y) == maxUncertRegion)
+            {
+              int classnoTmp = labels(x,y);
+              if ( forbidden_classesActiveLearning.find(classnoTmp) != forbidden_classesActiveLearning.end() )
+                continue;
+              
+              Example example;
+              example.vec = NULL;
+              example.svec = new SparseVector ( featdim );
+              
+              for ( int f = 0; f < featdim; f++ )
+              {
+                double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+                if ( val > 1e-10 )
+                  ( *example.svec ) [f] = val;
+              }
+              example.svec->normalize();
+              if ( classnoTmp == positiveClass )
+                examples.push_back ( pair<int, Example> ( 1, example ) );
+              else
+                examples.push_back ( pair<int, Example> ( 0, example ) );
+            }
+          }
+        }
+        
+        if(examples.size() > 0)
+        {
+          std::cerr << "found " << examples.size() << " new examples in the queried region" << std::endl << std::endl;
+          newTrainExamples.clear();
+          newTrainExamples = examples;
+          globalMaxUncert = maxNoveltyScore;
+          //prepare for later visualization
+          visualizeRegion(img,mask,maxUncertRegion,maskedImg);
+        }
+        else
+        {
+          std::cerr << "the queried region has no valid information" << std::endl << std::endl;
+        }
+        
+        //save filename and region index
+        currentRegionToQuery.first = currentFile;
+        currentRegionToQuery.second = maxUncertRegion;
+      }
+    }
+
+    //write back best results per region
+    //i.e., write normalized novelty scores for every region into the novelty image
+    for ( int y = 0; y < ysize; y++)
+    {
+      for (int x = 0; x < xsize; x++)
+      {
+        int r = mask(x,y);
+        for(int j = 0; j < probabilities.channels(); j++)
+        {
+          probabilities ( x, y, j ) = regionProb[r][j];
+        }
+        if ( bestClassPerRegion[r] == 0 )
+          segresult(x,y) = positiveClass;
+        else //take the various class as negative
+          segresult(x,y) = 22; //bestClassPerRegion[r];
+        
+        // write novelty scores for every segment into the "final" image
+        noveltyImage(x,y) = regionNoveltyMeasure[r];
+      }
+    }
+    
+    //compute these nice Classification results
+    for ( int y = 0; y < ysize; y++)
+    {
+      for (int x = 0; x < xsize; x++)
+      {
+        OBJREC::FullVector scoresTmp (2);
+        scoresTmp[1] = probabilities ( x, y, 0 ); //probabilities[0] == negative class == scores[1]
+        scoresTmp[0] = probabilities ( x, y, 1 ); //probabilities[1] == positive class == scores[0]
+        
+        int cno = scoresTmp[1] > 0 ? 1 : 0;
+
+        ClassificationResult cr ( cno/*doesn't matter*/, scoresTmp );
+        
+        if ( labels(x,y) == positiveClass )
+          cr.classno_groundtruth = 1;
+        else
+          cr.classno_groundtruth = 0;
+        
+        resultsOfSingleRun.push_back(cr);        
+      }      
+    }
+  } // if regionSeg != null
+  
+  timer.stop();
+  std::cout << "AL time for determination of novel regions: " << timer.getLastAbsolute() << std::endl;
+
+  timer.start();
+
+  ColorImage imgrgb ( xsize, ysize );
+
+  if ( write_results )
+  {
+    std::stringstream out;
+    std::vector< std::string > list2;
+    StringTools::split ( currentFile, '/', list2 );
+    out << resultdir << "/" << list2.back();
+    
+    noveltyImage.writeRaw(out.str() + "_run_" +  NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
+  }
+  
+  if (visualizeALimages)
+  {
+    ICETools::convertToRGB ( noveltyImage, imgrgb );
+    showImage(imgrgb, "Novelty Image");
+    
+    ColorImage tmp (xsize, ysize);
+    cn.labelToRGB(segresult,tmp);
+    showImage(tmp, "Cl result after region seg");    
+  }
+
+  timer.stop();
+  cout << "AL time for writing the raw novelty image: " << timer.getLastAbsolute() << endl;
+}
+
+inline void SemSegNoveltyBinary::computeClassificationResults( const NICE::MultiChannelImageT<double> & feats, 
+                                                   NICE::Image & segresult,
+                                                   NICE::MultiChannelImageT<double> & probabilities,
+                                                   const int & xsize,
+                                                   const int & ysize,
+                                                   const int & featdim
+                                                       )
+{
+  std::cerr << "featdim: " << featdim << std::endl;
+  
+  if ( classifier != NULL )
+  {  
+
+            
+    #pragma omp parallel for
+    for ( int y = 0; y < ysize; y += testWSize )
+    {
+      Example example;
+      example.vec = NULL;
+      example.svec = new SparseVector ( featdim );
+      for ( int x = 0; x < xsize; x += testWSize)
+      {
+        for ( int f = 0; f < featdim; f++ )
+        {
+          double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+          if ( val > 1e-10 )
+            ( *example.svec ) [f] = val;
+        }
+        example.svec->normalize();
+
+        ClassificationResult cr = classifier->classify ( example );
+
+        int xs = std::max(0, x - testWSize/2);
+        int xe = std::min(xsize - 1, x + testWSize/2);
+        int ys = std::max(0, y - testWSize/2);
+        int ye = std::min(ysize - 1, y + testWSize/2);
+        for (int yl = ys; yl <= ye; yl++)
+        {
+          for (int xl = xs; xl <= xe; xl++)
+          {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              probabilities ( xl, yl, j ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == 1 )
+              segresult ( xl, yl ) = positiveClass;
+            else
+              segresult ( xl, yl ) = 22; //various  
+          }
+        }
+        
+        example.svec->clear();
+      }
+      delete example.svec;
+      example.svec = NULL;
+    }
+  }
+  else //vclassifier
+  {
+    std::cerr << "compute classification results with vclassifier" << std::endl;
+    #pragma omp parallel for
+    for ( int y = 0; y < ysize; y += testWSize )
+    {
+      for ( int x = 0; x < xsize; x += testWSize)
+      {
+        NICE::Vector v(featdim);
+        for ( int f = 0; f < featdim; f++ )
+        {
+          double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+          v[f] = val;
+        }
+        v.normalizeL1();
+
+        ClassificationResult cr = vclassifier->classify ( v );
+
+        int xs = std::max(0, x - testWSize/2);
+        int xe = std::min(xsize - 1, x + testWSize/2);
+        int ys = std::max(0, y - testWSize/2);
+        int ye = std::min(ysize - 1, y + testWSize/2);
+        for (int yl = ys; yl <= ye; yl++)
+        {
+          for (int xl = xs; xl <= xe; xl++)
+          {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              probabilities ( xl, yl, j ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == 1 )
+              segresult ( xl, yl ) = positiveClass;
+            else
+              segresult ( xl, yl ) = 22; //various  
+          }
+        }
+      }
+    }    
+
+  }
+}
+
+// compute novelty images depending on the strategy chosen
+
+void SemSegNoveltyBinary::computeNoveltyByRandom(         NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim  )
+{
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+  
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      
+      double randVal = randDouble();
+
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              if ( cr.scores[j] == 1)
+                probabilities ( xl, yl, j ) = cr.scores[j];
+              else
+                probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == 1 )
+              segresult ( xl, yl ) = positiveClass;
+            else
+              segresult ( xl, yl ) = 22; //various  
+              
+          noveltyImage ( xl, yl ) = randVal; 
+        }
+      }     
+    }
+  }  
+}
+
+
+void SemSegNoveltyBinary::computeNoveltyByVariance(       NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim )
+{
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+      
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              if ( cr.scores[j] == 1)
+                probabilities ( xl, yl, j ) = cr.scores[j];
+              else
+                probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == 1 )
+              segresult ( xl, yl ) = positiveClass;
+            else
+              segresult ( xl, yl ) = 22; //various  
+              
+          noveltyImage ( xl, yl ) = cr.uncertainty; 
+        }
+      }          
+      
+      example.svec->clear();
+    }
+    delete example.svec;
+    example.svec = NULL;
+  }  
+}
+
+void SemSegNoveltyBinary::computeNoveltyByGPUncertainty(  NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim )
+{
+  
+  double gpNoise =  conf->gD("GPHIK", "noise", 0.01);
+  
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+           
+      double gpMeanVal = abs(cr.scores[0]);    //very specific to the binary setting  
+
+      double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
+      
+      //compute the heuristic GP-UNCERTAINTY, as proposed by Kapoor et al. in IJCV 2010
+      // GP-UNCERTAINTY : |mean| / sqrt(var^2 + gpnoise^2)
+      double gpUncertaintyVal = gpMeanVal*firstTerm; //firstTerm = 1.0 / sqrt(r.uncertainty+gpNoise))
+
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {         
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              if ( cr.scores[j] == 1)
+                probabilities ( xl, yl, j ) = cr.scores[j];
+              else
+                probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == positiveClass )
+              segresult ( xl, yl ) = cr.classno;
+            else
+              segresult ( xl, yl ) = 22; //various        
+          noveltyImage ( xl, yl ) = gpUncertaintyVal;  
+        }
+      }   
+      
+      example.svec->clear();
+    }
+    delete example.svec;
+    example.svec = NULL;
+  }  
+}
+
+void SemSegNoveltyBinary::computeNoveltyByGPMean(  NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim )
+{
+  double gpNoise =  conf->gD("GPHIK", "noise", 0.01);  
+    
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+
+      double gpMeanVal = abs(cr.scores[0]);  //very specific to the binary setting  
+  
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == 1 )
+              segresult ( xl, yl ) = positiveClass;
+            else
+              segresult ( xl, yl ) = 22; //various  
+              
+          noveltyImage ( xl, yl ) = gpMeanVal; 
+        }
+      }     
+    }
+  }  
+}
+
+void SemSegNoveltyBinary::computeNoveltyByGPMeanRatio(  NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim )
+{
+  double gpNoise =  conf->gD("GPHIK", "noise", 0.01);  
+  
+  //NOTE in binary settings, this is the same as the same as 2*mean  
+  
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+     
+      //look at the difference in the absolut mean values for the most plausible class
+      // and the second most plausible class
+      double gpMeanRatioVal= 2*abs(cr.scores[0]);  //very specific to the binary setting  
+
+
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              if ( cr.scores[j] == 1)
+                probabilities ( xl, yl, j ) = cr.scores[j];
+              else
+                probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == positiveClass )
+              segresult ( xl, yl ) = cr.classno;
+            else
+              segresult ( xl, yl ) = 22; //various      
+          noveltyImage ( xl, yl ) = gpMeanRatioVal;
+        }
+      }    
+      
+      example.svec->clear();
+    }
+    delete example.svec;
+    example.svec = NULL;
+  }  
+}
+
+void SemSegNoveltyBinary::computeNoveltyByGPWeightAll(  NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim )
+{
+  double gpNoise =  conf->gD("GPHIK", "noise", 0.01);  
+  
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+      
+      double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
+      
+      double gpWeightAllVal ( 0.0 );
+
+      //binary scenario
+      gpWeightAllVal = std::min( abs(cr.scores[0]+1), abs(cr.scores[0]-1) );
+      gpWeightAllVal *= firstTerm;
+
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              if ( cr.scores[j] == 1)
+                probabilities ( xl, yl, j ) = cr.scores[j];
+              else
+                probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == positiveClass )
+              segresult ( xl, yl ) = cr.classno;
+            else
+              segresult ( xl, yl ) = 22; //various         
+          noveltyImage ( xl, yl ) = gpWeightAllVal;
+        }
+      }
+   
+      
+      example.svec->clear();
+    }
+    delete example.svec;
+    example.svec = NULL;
+  }  
+}
+
+void SemSegNoveltyBinary::computeNoveltyByGPWeightRatio(  NICE::FloatImage & noveltyImage, 
+                                              const NICE::MultiChannelImageT<double> & feats,  
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                             const int & xsize, const int & ysize, const int & featdim )
+{
+  double gpNoise =  conf->gD("GPHIK", "noise", 0.01);  
+  
+  //NOTE in binary settings, this is the same as the same as 2*weightAll
+  
+#pragma omp parallel for
+  for ( int y = 0; y < ysize; y += testWSize )
+  {
+    Example example;
+    example.vec = NULL;
+    example.svec = new SparseVector ( featdim );
+    for ( int x = 0; x < xsize; x += testWSize)
+    {
+      for ( int f = 0; f < featdim; f++ )
+      {
+        double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
+        if ( val > 1e-10 )
+          ( *example.svec ) [f] = val;
+      }
+      example.svec->normalize();
+
+      ClassificationResult cr = classifier->classify ( example );
+ 
+
+       double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
+
+       double gpWeightRatioVal ( 0.0 );
+
+      //binary scenario
+      gpWeightRatioVal = std::min( abs(cr.scores[0]+1), abs(cr.scores[0]-1) );
+      gpWeightRatioVal *= 2*firstTerm;
+
+      int xs = std::max(0, x - testWSize/2);
+      int xe = std::min(xsize - 1, x + testWSize/2);
+      int ys = std::max(0, y - testWSize/2);
+      int ye = std::min(ysize - 1, y + testWSize/2);
+      for (int yl = ys; yl <= ye; yl++)
+      {
+        for (int xl = xs; xl <= xe; xl++)
+        {
+            for ( int j = 0 ; j < cr.scores.size(); j++ )
+            { 
+              if ( cr.scores[j] == 1)
+                probabilities ( xl, yl, j ) = cr.scores[j];
+              else
+                probabilities ( xl, yl, 0 ) = cr.scores[j];
+            }
+            
+            if ( cr.classno == positiveClass )
+              segresult ( xl, yl ) = cr.classno;
+            else
+              segresult ( xl, yl ) = 22; //various         
+          noveltyImage ( xl, yl ) = gpWeightRatioVal;  
+        }
+      }
+       
+      example.svec->clear();
+    }
+    delete example.svec;
+    example.svec = NULL;
+  }  
+}
+
+
+void SemSegNoveltyBinary::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
+{
+  //accept the new class as valid information
+  if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
+  {
+    forbidden_classesTrain.erase(newClassNo);
+    numberOfClasses++;
+  }
+  if ( classesInUse.find ( newClassNo ) == classesInUse.end() )
+  {
+    classesInUse.insert( newClassNo );
+  }    
+  
+  
+  //then add it to the classifier used
+  if ( classifier != NULL )
+  { 
+    //TODO    
+  }
+  else //vclassifier
+  {
+    if (this->classifierString.compare("nn") == 0)    
+    {
+      vclassifier->teach ( newClassNo, newExample );
+    }
+  }
+}
+
+void SemSegNoveltyBinary::addNovelExamples()
+{
+
+  Timer timer;
+  
+  //show the image that contains the most novel region
+  if (visualizeALimages)
+    showImage(maskedImg, "Most novel region");  
+  
+  timer.start();
+    
+  std::stringstream out;
+  std::vector< std::string > list;
+  StringTools::split ( currentRegionToQuery.first, '/', list );  
+  out << resultdir << "/" << list.back();     
+    
+  maskedImg.writePPM ( out.str() + "_run_" +  NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+ "_query.ppm" );
+
+  
+  timer.stop();
+  std::cerr << "AL time for writing queried image: " << timer.getLast() << std::endl;
+
+  timer.start();
+  
+  //check which classes will be added using the features from the novel region
+  std::set<int> newClassNumbers;
+  newClassNumbers.clear(); //just to be sure  
+  for ( uint i = 0 ; i < newTrainExamples.size() ; i++ )
+  {
+    if (newClassNumbers.find(newTrainExamples[i].first /* classNumber*/) == newClassNumbers.end() )
+    {
+      newClassNumbers.insert(newTrainExamples[i].first );
+    }
+  }
+
+  //accept the new classes as valid information
+  for (std::set<int>::const_iterator clNoIt = newClassNumbers.begin(); clNoIt != newClassNumbers.end(); clNoIt++)
+  {
+    if ( forbidden_classesTrain.find ( *clNoIt ) != forbidden_classesTrain.end() )
+    {
+      forbidden_classesTrain.erase(*clNoIt);
+      numberOfClasses++;
+    }
+    if ( classesInUse.find ( *clNoIt ) == classesInUse.end() )
+    {
+      classesInUse.insert( *clNoIt );
+    }
+  }
+  
+  timer.stop();
+  std::cerr << "AL time for accepting possible new classes: " << timer.getLast() << std::endl;
+  
+  timer.start();
+  //then add the new features to the classifier used
+  if ( classifier != NULL )
+  { 
+    if (this->classifierString.compare("ClassifierGPHIK") == 0)    
+    {
+      classifier->addMultipleExamples ( this->newTrainExamples );
+    }    
+  }
+  else //vclassifier
+  {
+    //TODO
+  }
+  
+  timer.stop();
+  std::cerr << "AL time for actually updating the classifier: " << timer.getLast() << std::endl;
+  
+  std::cerr << "the current region to query is: " << currentRegionToQuery.first << " -- " << currentRegionToQuery.second << std::endl;
+  
+  //did we already query a region of this image?
+  if ( queriedRegions.find( currentRegionToQuery.first ) != queriedRegions.end() )
+  {
+    queriedRegions[ currentRegionToQuery.first ].insert(currentRegionToQuery.second);
+  }
+  else
+  {
+    std::set<int> tmpSet; tmpSet.insert(currentRegionToQuery.second);
+    queriedRegions.insert(std::pair<std::string,std::set<int> > (currentRegionToQuery.first, tmpSet ) );
+  }  
+  
+  std::cerr << "Write already queried regions: " << std::endl;
+  for (std::map<std::string,std::set<int> >::const_iterator it = queriedRegions.begin(); it != queriedRegions.end(); it++)
+  {
+    std::cerr << "image: " << it->first << " --   ";
+    for (std::set<int>::const_iterator itReg = it->second.begin(); itReg != it->second.end(); itReg++)
+    {
+      std::cerr << *itReg << " ";
+    } 
+    std::cerr << std::endl;
+  }
+  
+  //clear the latest results, since one iteration is over
+  globalMaxUncert = -numeric_limits<double>::max();
+  if (!mostNoveltyWithMaxScores)
+    globalMaxUncert = numeric_limits<double>::max();
+}
+
+const Examples * SemSegNoveltyBinary::getNovelExamples() const
+{
+  return &(this->newTrainExamples);
+}
+
+
+double SemSegNoveltyBinary::getAUCPerformance() const
+{
+  std::cerr << "evaluate AUC performance" << std::endl;
+  int noGTPositives ( 0 );
+  int noGTNegatives ( 0 );
+  
+  for (std::vector<OBJREC::ClassificationResult>::const_iterator it = resultsOfSingleRun.begin(); it != resultsOfSingleRun.end(); it++)
+  {
+    if (it->classno_groundtruth == 1)
+    {
+       noGTPositives++;
+    }
+    else
+      noGTNegatives++;
+  }
+  
+  std::cerr << "GT positives: " << noGTPositives << " -- GT negatives: " << noGTNegatives << std::endl;
+  
+  std::cerr << "ARR: " << resultsOfSingleRun.getAverageRecognitionRate() << std::endl;
+  
+  return resultsOfSingleRun.getBinaryClassPerformance( ClassificationResults::PERF_AUC ); 
+}

+ 245 - 0
semseg/SemSegNoveltyBinary.h

@@ -0,0 +1,245 @@
+/**
+ * @file SemSegNoveltyBinary.h
+ * @brief semantic segmentation using the method from Csurka08
+ * @author Björn Fröhlich, Alexander Freytag
+ * @date 04/24/2009
+ */
+#ifndef SemSegNoveltyBinaryINCLUDE
+#define SemSegNoveltyBinaryINCLUDE
+
+#include "SemanticSegmentation.h"
+
+#include "SemSegTools.h"
+#include "vislearning/classifier/classifierbase/FeaturePoolClassifier.h"
+#include "vislearning/classifier/genericClassifierSelection.h"
+#include "vislearning/features/localfeatures/LFColorWeijer.h"
+#include "vislearning/cbaselib/ClassificationResults.h"
+
+#include "segmentation/RegionSegmentationMethod.h"
+
+
+/** @brief pixelwise labeling systems */
+
+namespace OBJREC {
+
+class SemSegNoveltyBinary : public SemanticSegmentation
+{
+
+  protected:
+    //! boolean whether to reuse segmentation results for single images in different runs
+    bool reuseSegmentation;
+
+    //! boolean whether to read the initial classifier from a file. If not, training will be performed
+    bool read_classifier;
+    
+    //! boolean whether to save the final classifier or not
+    bool save_classifier;
+
+    //! The cached Data
+    std::string cache;
+    
+    //! Classifier
+    FeaturePoolClassifier *classifier;
+    VecClassifier *vclassifier;
+    
+    //! feature extraction
+    LFColorWeijer *featExtract;
+    
+    //! Configuration File
+    const NICE::Config *conf;
+    
+    //! distance between features for training
+    int trainWsize;
+    
+    //! half of the window size for local features
+    int whs;
+    
+    //! rectangle size for classification, 1 means pixelwise
+    int testWSize;
+    
+    //! name of all classes
+    ClassNames cn;
+    
+    //! low level Segmentation method
+    RegionSegmentationMethod *regionSeg;
+    
+    //! set of forbidden/background classes for the initial training
+    std::set<int> forbidden_classesTrain;
+    //! set of forbidden/background classes for the whole process of learning over time
+    std::set<int> forbidden_classesActiveLearning;
+    //! store the class numbers currently used
+    std::set<int> classesInUse;
+    
+    //! only needed for binary scenarios, index of the positive class
+    int positiveClass;    
+    
+    //! obviously, the number of classes used for training (i.e., classesInUse.size() )
+    int numberOfClasses; 
+    
+    //! where to save the resulting images (uncertainty and classification results)
+    std::string resultdir;
+    
+    //! find the maximum uncertainty or not within the whole test set
+    bool findMaximumUncert;
+    
+    //! image with most uncertain region
+    NICE::ColorImage maskedImg;
+    
+    //! for debugging and visualization: show novelty images with and without region segmentation and the most novel region
+    bool visualizeALimages;
+    
+    //! maximum uncertainty over all images, i.e., the novelty score of the most "novel" region of all test images
+    double globalMaxUncert;
+    
+    //! determine whether a "novelty" method computes large scores for novel objects (e.g., variance), or small scores (e.g., min abs mean)
+    bool mostNoveltyWithMaxScores;
+    
+    //! current examples for most uncertain region
+    Examples newTrainExamples;
+    
+    //! contains filenames of images and indices of contained regions, that where already queried, to prevent them from being queried again
+    std::map<std::string,std::set<int> > queriedRegions;
+        
+    std::pair<std::string, int> currentRegionToQuery;
+    
+    //! store the binary classification results from a single run to evaluate them with AUC lateron
+    ClassificationResults resultsOfSingleRun;
+    
+    bool write_results;
+    
+    enum NoveltyMethod{
+      GPVARIANCE, // novel = large variance
+      GPUNCERTAINTY, //novel = small uncertainty (mean / var)
+      GPMINMEAN,  //novel = small mean
+      GPMEANRATIO,  //novel = small difference between mean of most plausible class and mean of snd
+                   //        most plausible class (not useful in binary settings)
+      GPWEIGHTALL, // novel = large weight in alpha vector after updating the model (can be predicted exactly)
+      GPWEIGHTRATIO, // novel = small difference between weights for alpha vectors with assumptions of GT label to be the most 
+                    //         plausible against the second most plausible class
+      RANDOM        // query regions randomly
+    }; 
+    
+    //! specify how "novelty" shall be computed, e.g., using GP-variance, GP-uncertainty, or predicted weight entries
+    NoveltyMethod noveltyMethod;
+    std::string noveltyMethodString;
+    
+    //! just store the name of our classifier
+    std::string classifierString;
+    
+    inline void computeClassificationResults( const NICE::MultiChannelImageT<double> & feats, 
+                                                    NICE::Image & segresult,
+                                                    NICE::MultiChannelImageT<double> & probabilities,
+                                                    const int & xsize,
+                                                    const int & ysize,
+                                                    const int & featdim );
+
+   void computeNoveltyByRandom(         NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );    
+    
+   void computeNoveltyByVariance(       NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );
+   
+   void computeNoveltyByGPUncertainty ( NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );
+   
+   void computeNoveltyByGPMean        ( NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );  
+   void computeNoveltyByGPMeanRatio   ( NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );  
+   void computeNoveltyByGPWeightAll   ( NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );  
+   void computeNoveltyByGPWeightRatio ( NICE::FloatImage & noveltyImage, 
+                                  const NICE::MultiChannelImageT<double> & feats,  
+                                        NICE::Image & segresult,
+                                        NICE::MultiChannelImageT<double> & probabilities,
+                                  const int & xsize, const int & ysize, const int & featdim );     
+   
+  public:
+
+    /** constructor
+      *  @param conf needs a configfile
+      *  @param md and a MultiDataset (contains images and other things)
+      */
+    SemSegNoveltyBinary ( const NICE::Config *conf, const MultiDataset *md );
+
+    /** simple destructor */
+    virtual ~SemSegNoveltyBinary();
+
+    /** The trainingstep
+      *  @param md and a MultiDataset (contains images and other things)
+      */
+    void train ( const MultiDataset *md );
+
+    /** The main procedure. Input: Image, Output: Segmented Image with pixelwise labeles and the probabilities
+      * @param ce image data
+      * @param segresult result of the semantic segmentation with a label for each pixel
+      * @param probabilities multi-channel image with one channel for each class and corresponding probabilities for each pixel
+      */
+    void semanticseg ( CachedExample *ce,
+                       NICE::Image & segresult,
+                       NICE::MultiChannelImageT<double> & probabilities );
+    
+    
+    /**
+     * @brief visualize a specific region in the original image
+     *
+     * @param img input image
+     * @param regions map of the regions
+     * @param region visualize this region
+     * @param outimage result
+     * @return void
+     **/
+    void visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix &regions, int region, NICE::ColorImage &outimage);
+
+    /**
+     * @brief Add a new example to the known training data
+     *
+     * @param newExample (NICE::Vector) the feature vector of the new examples
+     * @param newClassNo (int) the corresponding GT class number
+     * @return void
+     **/    
+    void addNewExample(const NICE::Vector & newExample, const int & newClassNo);
+    
+    /**
+     * @brief Add those examples, which belong to the most novel region seen so far
+     *
+     * @return void
+     **/    
+    virtual void addNovelExamples();    
+
+    /**
+     * @brief Get a pointer to the examples extracted from the most novel region seen so far
+     *
+     * @return Examples *
+     **/        
+    virtual const Examples * getNovelExamples() const; 
+    
+    /**
+     * @brief Compute AUC scores from the results of the images computed so far
+     *
+     * @return double
+     **/       
+    double getAUCPerformance() const;
+};
+
+} //namespace
+
+#endif