SemSegNoveltyBinary.cpp 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527
  1. #include <sstream>
  2. #include <iostream>
  3. #include "SemSegNoveltyBinary.h"
  4. #include <core/image/FilterT.h>
  5. #include <core/basics/numerictools.h>
  6. #include <core/basics/StringTools.h>
  7. #include <core/basics/Timer.h>
  8. #include <gp-hik-exp/GPHIKClassifierNICE.h>
  9. #include <vislearning/baselib/ICETools.h>
  10. #include <vislearning/baselib/Globals.h>
  11. #include <vislearning/features/fpfeatures/SparseVectorFeature.h>
  12. #include "segmentation/GenericRegionSegmentationMethodSelection.h"
  13. using namespace std;
  14. using namespace NICE;
  15. using namespace OBJREC;
  16. SemSegNoveltyBinary::SemSegNoveltyBinary ( const Config *conf,
  17. const MultiDataset *md )
  18. : SemanticSegmentation ( conf, & ( md->getClassNames ( "train" ) ) )
  19. {
  20. this->conf = conf;
  21. globalMaxUncert = -numeric_limits<double>::max();
  22. string section = "SemSegNoveltyBinary";
  23. featExtract = new LFColorWeijer ( conf );
  24. this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
  25. this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
  26. this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
  27. //write uncertainty results in the same folder as done for the segmentation results
  28. resultdir = conf->gS("debug", "resultdir", "result");
  29. cache = conf->gS ( "cache", "root", "" );
  30. //stupid work around of the const attribute
  31. Config confCopy = *conf;
  32. //just to make sure, that we do NOT perform an optimization after every iteration step
  33. //this would just take a lot of time, which is not desired so far
  34. confCopy.sB("ClassifierGPHIK","performOptimizationAfterIncrement",false);
  35. classifierString = conf->gS ( section, "classifier", "ClassifierGPHIK" );
  36. classifier = NULL;
  37. vclassifier = NULL;
  38. if ( classifierString.compare("ClassifierGPHIK") == 0)
  39. classifier = new GPHIKClassifierNICE ( &confCopy, "ClassifierGPHIK" );
  40. else
  41. vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
  42. findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
  43. whs = conf->gI ( section, "window_size", 10 );
  44. //distance to next descriptor during training
  45. trainWsize = conf->gI ( section, "train_window_size", 10 );
  46. //distance to next descriptor during testing
  47. testWSize = conf->gI (section, "test_window_size", 10);
  48. // select your segmentation method here
  49. string rsMethode = conf->gS ( section, "segmentation", "none" );
  50. if(rsMethode == "none")
  51. {
  52. regionSeg = NULL;
  53. }
  54. else
  55. {
  56. RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod(conf, rsMethode);
  57. if ( reuseSegmentation )
  58. regionSeg = new RSCache ( conf, tmpRegionSeg );
  59. else
  60. regionSeg = tmpRegionSeg;
  61. }
  62. cn = md->getClassNames ( "train" );
  63. if ( read_classifier )
  64. {
  65. try
  66. {
  67. if ( classifier != NULL )
  68. {
  69. string classifierdst = "/classifier.data";
  70. fprintf ( stderr, "SemSegNoveltyBinary:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
  71. classifier->read ( cache + classifierdst );
  72. }
  73. else
  74. {
  75. string classifierdst = "/veccl.data";
  76. fprintf ( stderr, "SemSegNoveltyBinary:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
  77. vclassifier->read ( cache + classifierdst );
  78. }
  79. fprintf ( stderr, "SemSegNoveltyBinary:: successfully read\n" );
  80. }
  81. catch ( char *str )
  82. {
  83. cerr << "error reading data: " << str << endl;
  84. }
  85. }
  86. else
  87. {
  88. train ( md );
  89. }
  90. //define which measure for "novelty" we want to use
  91. noveltyMethodString = conf->gS( section, "noveltyMethod", "gp-variance");
  92. if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
  93. {
  94. this->noveltyMethod = GPVARIANCE;
  95. this->mostNoveltyWithMaxScores = true;
  96. }
  97. else if (noveltyMethodString.compare("gp-uncertainty") == 0) //novel = large uncertainty (mean / var)
  98. {
  99. this->noveltyMethod = GPUNCERTAINTY;
  100. this->mostNoveltyWithMaxScores = false;
  101. globalMaxUncert = numeric_limits<double>::max();
  102. }
  103. else if (noveltyMethodString.compare("gp-mean") == 0) //novel = small mean
  104. {
  105. this->noveltyMethod = GPMINMEAN;
  106. this->mostNoveltyWithMaxScores = false;
  107. globalMaxUncert = numeric_limits<double>::max();
  108. }
  109. else if (noveltyMethodString.compare("gp-meanRatio") == 0) //novel = small difference between mean of most plausible class and mean of snd
  110. // most plausible class (not useful in binary settings)
  111. {
  112. this->noveltyMethod = GPMEANRATIO;
  113. this->mostNoveltyWithMaxScores = false;
  114. globalMaxUncert = numeric_limits<double>::max();
  115. }
  116. else if (noveltyMethodString.compare("gp-weightAll") == 0) // novel = large weight in alpha vector after updating the model (can be predicted exactly)
  117. {
  118. this->noveltyMethod = GPWEIGHTALL;
  119. this->mostNoveltyWithMaxScores = true;
  120. }
  121. else if (noveltyMethodString.compare("gp-weightRatio") == 0) // novel = small difference between weights for alpha vectors
  122. // with assumptions of GT label to be the most
  123. // plausible against the second most plausible class
  124. {
  125. this->noveltyMethod = GPWEIGHTRATIO;
  126. this->mostNoveltyWithMaxScores = false;
  127. globalMaxUncert = numeric_limits<double>::max();
  128. }
  129. else if (noveltyMethodString.compare("random") == 0)
  130. {
  131. initRand();
  132. this->noveltyMethod = RANDOM;
  133. }
  134. else
  135. {
  136. this->noveltyMethod = GPVARIANCE;
  137. this->mostNoveltyWithMaxScores = true;
  138. }
  139. //we don't have queried any region so far
  140. queriedRegions.clear();
  141. visualizeALimages = conf->gB(section, "visualizeALimages", false);
  142. resultsOfSingleRun.clear();
  143. write_results = conf->gB( "debug", "write_results", false );
  144. }
  145. SemSegNoveltyBinary::~SemSegNoveltyBinary()
  146. {
  147. if(newTrainExamples.size() > 0)
  148. {
  149. // show most uncertain region
  150. if (visualizeALimages)
  151. showImage(maskedImg);
  152. //incorporate new information into the classifier
  153. if (classifier != NULL)
  154. classifier->addMultipleExamples(newTrainExamples);
  155. //store the classifier, such that we can read it again in the next round (if we like that)
  156. classifier->save ( cache + "/classifier.data" );
  157. }
  158. // clean-up
  159. if ( classifier != NULL )
  160. delete classifier;
  161. if ( vclassifier != NULL )
  162. delete vclassifier;
  163. if ( featExtract != NULL )
  164. delete featExtract;
  165. }
  166. void SemSegNoveltyBinary::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix &regions, int region, NICE::ColorImage &outimage)
  167. {
  168. std::vector<uchar> color;
  169. color.push_back(255);
  170. color.push_back(0);
  171. color.push_back(0);
  172. int width = img.width();
  173. int height = img.height();
  174. outimage.resize(width,height);
  175. for(int y = 0; y < height; y++)
  176. {
  177. for(int x = 0; x < width; x++)
  178. {
  179. if(regions(x,y) == region)
  180. {
  181. for(int c = 0; c < 3; c++)
  182. {
  183. outimage(x,y,c) = color[c];
  184. }
  185. }
  186. else
  187. {
  188. for(int c = 0; c < 3; c++)
  189. {
  190. outimage(x,y,c) = img(x,y,c);
  191. }
  192. }
  193. }
  194. }
  195. }
  196. void SemSegNoveltyBinary::train ( const MultiDataset *md )
  197. {
  198. const LabeledSet train = * ( *md ) ["train"];
  199. const LabeledSet *trainp = &train;
  200. ////////////////////////
  201. // feature extraction //
  202. ////////////////////////
  203. //check the same thing for the training classes - this is very specific to our setup
  204. std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
  205. if ( forbidden_classesTrain_s == "" )
  206. {
  207. forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
  208. }
  209. cn.getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
  210. //check whether we have a single positive class
  211. std::string positiveClass_s = conf->gS ( "SemSegNoveltyBinary", "positiveClass", "" );
  212. std::set<int> positiveClassNumberTmp;
  213. cn.getSelection ( positiveClass_s, positiveClassNumberTmp );
  214. std::cerr << "BINARY SETTING ENABLED! " << std::endl;
  215. switch ( positiveClassNumberTmp.size() )
  216. {
  217. case 0:
  218. {
  219. positiveClass = 0;
  220. std::cerr << "no positive class given, assume 0 as positive class" << std::endl;
  221. break;
  222. }
  223. case 1:
  224. {
  225. positiveClass = *(positiveClassNumberTmp.begin());
  226. std::cerr << "positive class will be number" << positiveClass << " with the name: " << positiveClass_s << std::endl;
  227. break;
  228. }
  229. default:
  230. {
  231. //we specified more than a single positive class. right now, this is not what we are interested in, but
  232. //in theory we could also accept this and convert positiveClass into a set of ints of possible positive classes
  233. positiveClass = 0;
  234. std::cerr << "no positive class given, assume 0 as positive class" << std::endl;
  235. break;
  236. }
  237. }
  238. std::cerr << "============================" << std::endl << std::endl;
  239. ProgressBar pb ( "Local Feature Extraction" );
  240. pb.show();
  241. int imgnb = 0;
  242. Examples examples;
  243. examples.filename = "training";
  244. int featdim = -1;
  245. classesInUse.clear();
  246. LOOP_ALL_S ( *trainp )
  247. {
  248. //EACH_S(classno, currentFile);
  249. EACH_INFO ( classno, info );
  250. std::string currentFile = info.img();
  251. CachedExample *ce = new CachedExample ( currentFile );
  252. const LocalizationResult *locResult = info.localization();
  253. if ( locResult->size() <= 0 )
  254. {
  255. fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
  256. currentFile.c_str() );
  257. continue;
  258. }
  259. int xsize, ysize;
  260. ce->getImageSize ( xsize, ysize );
  261. Image labels ( xsize, ysize );
  262. labels.set ( 0 );
  263. locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
  264. NICE::ColorImage img;
  265. try {
  266. img = ColorImage ( currentFile );
  267. } catch ( Exception ) {
  268. cerr << "SemSegNoveltyBinary: error opening image file <" << currentFile << ">" << endl;
  269. continue;
  270. }
  271. Globals::setCurrentImgFN ( currentFile );
  272. MultiChannelImageT<double> feats;
  273. // extract features
  274. featExtract->getFeats ( img, feats );
  275. featdim = feats.channels();
  276. feats.addChannel(featdim);
  277. for (int c = 0; c < featdim; c++)
  278. {
  279. ImageT<double> tmp = feats[c];
  280. ImageT<double> tmp2 = feats[c+featdim];
  281. NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
  282. }
  283. featdim += featdim;
  284. // compute integral images
  285. for ( int c = 0; c < featdim; c++ )
  286. {
  287. feats.calcIntegral ( c );
  288. }
  289. for ( int y = 0; y < ysize; y += trainWsize)
  290. {
  291. for ( int x = 0; x < xsize; x += trainWsize )
  292. {
  293. int classnoTmp = labels.getPixel ( x, y );
  294. if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
  295. {
  296. continue;
  297. }
  298. if (classesInUse.find(classnoTmp) == classesInUse.end())
  299. {
  300. classesInUse.insert(classnoTmp);
  301. }
  302. Example example;
  303. example.vec = NULL;
  304. example.svec = new SparseVector ( featdim );
  305. for ( int f = 0; f < featdim; f++ )
  306. {
  307. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  308. if ( val > 1e-10 )
  309. ( *example.svec ) [f] = val;
  310. }
  311. example.svec->normalize();
  312. example.position = imgnb;
  313. if ( classnoTmp == positiveClass )
  314. examples.push_back ( pair<int, Example> ( 1, example ) );
  315. else
  316. examples.push_back ( pair<int, Example> ( 0, example ) );
  317. }
  318. }
  319. delete ce;
  320. imgnb++;
  321. pb.update ( trainp->count() );
  322. }
  323. numberOfClasses = classesInUse.size();
  324. std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
  325. std::cerr << "classes in use: " << std::endl;
  326. for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
  327. {
  328. std::cerr << *it << " : " << cn.text(*it) << " ";
  329. }
  330. std::cerr << std::endl;
  331. pb.hide();
  332. //////////////////////
  333. // train classifier //
  334. //////////////////////
  335. FeaturePool fp;
  336. Feature *f = new SparseVectorFeature ( featdim );
  337. f->explode ( fp );
  338. delete f;
  339. if ( classifier != NULL )
  340. {
  341. std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
  342. classifier->train ( fp, examples );
  343. std::cerr << "training finished" << std::endl;
  344. }
  345. else
  346. {
  347. LabeledSetVector lvec;
  348. convertExamplesToLSet ( examples, lvec );
  349. vclassifier->teach ( lvec );
  350. // if ( usegmm )
  351. // convertLSetToSparseExamples ( examples, lvec );
  352. // else
  353. std::cerr << "classifierString: " << classifierString << std::endl;
  354. if (this->classifierString.compare("nn") == 0)
  355. {
  356. convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
  357. }
  358. else
  359. {
  360. convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
  361. }
  362. vclassifier->finishTeaching();
  363. }
  364. fp.destroy();
  365. if ( save_classifier )
  366. {
  367. if ( classifier != NULL )
  368. classifier->save ( cache + "/classifier.data" );
  369. else
  370. vclassifier->save ( cache + "/veccl.data" );
  371. }
  372. ////////////
  373. //clean up//
  374. ////////////
  375. for ( int i = 0; i < ( int ) examples.size(); i++ )
  376. {
  377. examples[i].second.clean();
  378. }
  379. examples.clear();
  380. cerr << "SemSeg training finished" << endl;
  381. }
  382. void SemSegNoveltyBinary::semanticseg ( CachedExample *ce, NICE::Image & segresult, NICE::MultiChannelImageT<double> & probabilities )
  383. {
  384. Timer timer;
  385. timer.start();
  386. //segResult contains the GT labels when this method is called
  387. // we simply store them in labels, to have an easy access to the GT information lateron
  388. Image labels = segresult;
  389. //just to be sure that we do not have a GT-biased result :)
  390. segresult.set(0);
  391. int featdim = -1;
  392. std::string currentFile = Globals::getCurrentImgFN();
  393. int xsize, ysize;
  394. ce->getImageSize ( xsize, ysize );
  395. probabilities.reInit( xsize, ysize, 2);
  396. probabilities.setAll ( 0.0 );
  397. NICE::ColorImage img;
  398. try {
  399. img = ColorImage ( currentFile );
  400. } catch ( Exception ) {
  401. cerr << "SemSegNoveltyBinary: error opening image file <" << currentFile << ">" << endl;
  402. return;
  403. }
  404. MultiChannelImageT<double> feats;
  405. // extract features
  406. featExtract->getFeats ( img, feats );
  407. featdim = feats.channels();
  408. feats.addChannel(featdim);
  409. for (int c = 0; c < featdim; c++)
  410. {
  411. ImageT<double> tmp = feats[c];
  412. ImageT<double> tmp2 = feats[c+featdim];
  413. NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
  414. }
  415. featdim += featdim;
  416. // compute integral images
  417. for ( int c = 0; c < featdim; c++ )
  418. {
  419. feats.calcIntegral ( c );
  420. }
  421. timer.stop();
  422. std::cout << "AL time for preparation: " << timer.getLastAbsolute() << std::endl;
  423. timer.start();
  424. //classification results currently only needed to be computed separately if we use the vclassifier, i.e., the nearest neighbor used
  425. // for the "novel feature learning" approach
  426. //in all other settings, such as active sem seg in general, we do this within the novelty-computation-methods
  427. if ( classifier == NULL )
  428. {
  429. this->computeClassificationResults( feats, segresult, probabilities, xsize, ysize, featdim);
  430. }
  431. // timer.stop();
  432. //
  433. // std::cerr << "classification results computed" << std::endl;
  434. FloatImage noveltyImage ( xsize, ysize );
  435. noveltyImage.set ( 0.0 );
  436. switch (noveltyMethod)
  437. {
  438. case GPVARIANCE:
  439. {
  440. this->computeNoveltyByVariance( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  441. break;
  442. }
  443. case GPUNCERTAINTY:
  444. {
  445. this->computeNoveltyByGPUncertainty( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  446. break;
  447. }
  448. case GPMINMEAN:
  449. {
  450. std::cerr << "compute novelty using the minimum mean" << std::endl;
  451. this->computeNoveltyByGPMean( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  452. break;
  453. }
  454. case GPMEANRATIO:
  455. {
  456. this->computeNoveltyByGPMeanRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  457. break;
  458. }
  459. case GPWEIGHTALL:
  460. {
  461. this->computeNoveltyByGPWeightAll( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  462. break;
  463. }
  464. case GPWEIGHTRATIO:
  465. {
  466. this->computeNoveltyByGPWeightRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  467. break;
  468. }
  469. case RANDOM:
  470. {
  471. this->computeNoveltyByRandom( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  472. break;
  473. }
  474. default:
  475. {
  476. //do nothing, keep the image constant to 0.0
  477. break;
  478. }
  479. }
  480. timer.stop();
  481. std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
  482. if ( write_results || visualizeALimages )
  483. {
  484. ColorImage imgrgbTmp (xsize, ysize);
  485. ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
  486. this->cn.labelToRGB( segresult, imgrgbTmp );
  487. if ( write_results )
  488. {
  489. std::stringstream out;
  490. std::vector< std::string > list2;
  491. StringTools::split ( currentFile, '/', list2 );
  492. out << resultdir << "/" << list2.back();
  493. // std::cerr << "writing to " << out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+"_unsmoothed.rawfloat" << std::endl;
  494. noveltyImage.writeRaw("run_" + NICE::intToString(this->iterationCountSuffix) + "_" + out.str() + "_" + noveltyMethodString+"_unsmoothed.rawfloat");
  495. }
  496. if (visualizeALimages)
  497. {
  498. showImage(imgrgbTmp, "Novelty Image without Region Segmentation");
  499. showImage(imgrgbTmp, "Classification Result without Region Segmentation");
  500. }
  501. }
  502. timer.start();
  503. //Regionen ermitteln
  504. if(regionSeg != NULL)
  505. {
  506. NICE::Matrix mask;
  507. int amountRegions = regionSeg->segRegions ( img, mask );
  508. //compute probs per region
  509. std::vector<std::vector<double> > regionProb(amountRegions, std::vector<double>(probabilities.channels(),0.0));
  510. std::vector<double> regionNoveltyMeasure (amountRegions, 0.0);
  511. std::vector<int> regionCounter(amountRegions, 0);
  512. std::vector<int> regionCounterNovelty(amountRegions, 0);
  513. for ( int y = 0; y < ysize; y += trainWsize) //y++)
  514. {
  515. for (int x = 0; x < xsize; x += trainWsize) //x++)
  516. {
  517. int r = mask(x,y);
  518. regionCounter[r]++;
  519. for(int j = 0; j < probabilities.channels(); j++)
  520. {
  521. regionProb[r][j] += probabilities ( x, y, j );
  522. }
  523. if ( forbidden_classesActiveLearning.find( labels(x,y) ) == forbidden_classesActiveLearning.end() )
  524. {
  525. //count the amount of "novelty" for the corresponding region
  526. regionNoveltyMeasure[r] += noveltyImage(x,y);
  527. regionCounterNovelty[r]++;
  528. }
  529. }
  530. }
  531. //find best class per region
  532. std::vector<int> bestClassPerRegion(amountRegions,0);
  533. double maxNoveltyScore = -numeric_limits<double>::max();
  534. if (!mostNoveltyWithMaxScores)
  535. {
  536. maxNoveltyScore = numeric_limits<double>::max();
  537. }
  538. int maxUncertRegion = -1;
  539. //loop over all regions and compute averaged novelty scores
  540. for(int r = 0; r < amountRegions; r++)
  541. {
  542. //check for the most plausible class per region
  543. double maxval = -numeric_limits<double>::max();
  544. //loop over all classes
  545. for(int c = 0; c < probabilities.channels(); c++)
  546. {
  547. regionProb[r][c] /= regionCounter[r];
  548. if( (maxval < regionProb[r][c]) ) //&& (regionProb[r][c] != 0.0) )
  549. {
  550. maxval = regionProb[r][c];
  551. bestClassPerRegion[r] = c;
  552. }
  553. }
  554. //if the region only contains unvalid information (e.g., background) skip it
  555. if (regionCounterNovelty[r] == 0)
  556. {
  557. continue;
  558. }
  559. //normalize summed novelty scores to region size
  560. regionNoveltyMeasure[r] /= regionCounterNovelty[r];
  561. //did we find a region that has a higher score as the most novel region known so far within this image?
  562. if( ( mostNoveltyWithMaxScores && (maxNoveltyScore < regionNoveltyMeasure[r]) ) // if we look for large novelty scores, e.g., variance
  563. || ( !mostNoveltyWithMaxScores && (maxNoveltyScore > regionNoveltyMeasure[r]) ) ) // if we look for small novelty scores, e.g., min mean
  564. {
  565. //did we already query a region of this image? -- and it was this specific region
  566. if ( (queriedRegions.find( currentFile ) != queriedRegions.end() ) && ( queriedRegions[currentFile].find(r) != queriedRegions[currentFile].end() ) )
  567. {
  568. continue;
  569. }
  570. else //only accept the region as novel if we never queried it before
  571. {
  572. maxNoveltyScore = regionNoveltyMeasure[r];
  573. maxUncertRegion = r;
  574. }
  575. }
  576. }
  577. // after finding the most novel region for the current image, check whether this region is also the most novel with respect
  578. // to all previously seen test images
  579. // if so, store the corresponding features, since we want to "actively" query them to incorporate useful information
  580. if(findMaximumUncert)
  581. {
  582. if( ( mostNoveltyWithMaxScores && (maxNoveltyScore > globalMaxUncert) )
  583. || ( !mostNoveltyWithMaxScores && (maxNoveltyScore < globalMaxUncert) ) )
  584. {
  585. //current most novel region of the image has "higher" novelty score then previous most novel region of all test images worked on so far
  586. // -> save new important features of this region
  587. Examples examples;
  588. for ( int y = 0; y < ysize; y += trainWsize )
  589. {
  590. for ( int x = 0; x < xsize; x += trainWsize)
  591. {
  592. if(mask(x,y) == maxUncertRegion)
  593. {
  594. int classnoTmp = labels(x,y);
  595. if ( forbidden_classesActiveLearning.find(classnoTmp) != forbidden_classesActiveLearning.end() )
  596. continue;
  597. Example example;
  598. example.vec = NULL;
  599. example.svec = new SparseVector ( featdim );
  600. for ( int f = 0; f < featdim; f++ )
  601. {
  602. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  603. if ( val > 1e-10 )
  604. ( *example.svec ) [f] = val;
  605. }
  606. example.svec->normalize();
  607. if ( classnoTmp == positiveClass )
  608. examples.push_back ( pair<int, Example> ( 1, example ) );
  609. else
  610. examples.push_back ( pair<int, Example> ( 0, example ) );
  611. }
  612. }
  613. }
  614. if(examples.size() > 0)
  615. {
  616. std::cerr << "found " << examples.size() << " new examples in the queried region" << std::endl << std::endl;
  617. newTrainExamples.clear();
  618. newTrainExamples = examples;
  619. globalMaxUncert = maxNoveltyScore;
  620. //prepare for later visualization
  621. visualizeRegion(img,mask,maxUncertRegion,maskedImg);
  622. }
  623. else
  624. {
  625. std::cerr << "the queried region has no valid information" << std::endl << std::endl;
  626. }
  627. //save filename and region index
  628. currentRegionToQuery.first = currentFile;
  629. currentRegionToQuery.second = maxUncertRegion;
  630. }
  631. }
  632. //write back best results per region
  633. //i.e., write normalized novelty scores for every region into the novelty image
  634. for ( int y = 0; y < ysize; y++)
  635. {
  636. for (int x = 0; x < xsize; x++)
  637. {
  638. int r = mask(x,y);
  639. for(int j = 0; j < probabilities.channels(); j++)
  640. {
  641. probabilities ( x, y, j ) = regionProb[r][j];
  642. }
  643. if ( bestClassPerRegion[r] == 0 )
  644. segresult(x,y) = positiveClass;
  645. else //take the various class as negative
  646. segresult(x,y) = 22; //bestClassPerRegion[r];
  647. // write novelty scores for every segment into the "final" image
  648. noveltyImage(x,y) = regionNoveltyMeasure[r];
  649. }
  650. }
  651. //compute these nice Classification results
  652. for ( int y = 0; y < ysize; y++)
  653. {
  654. for (int x = 0; x < xsize; x++)
  655. {
  656. OBJREC::FullVector scoresTmp (2);
  657. scoresTmp[1] = probabilities ( x, y, 0 ); //probabilities[0] == negative class == scores[1]
  658. scoresTmp[0] = probabilities ( x, y, 1 ); //probabilities[1] == positive class == scores[0]
  659. int cno = scoresTmp[1] > 0 ? 1 : 0;
  660. ClassificationResult cr ( cno/*doesn't matter*/, scoresTmp );
  661. if ( labels(x,y) == positiveClass )
  662. cr.classno_groundtruth = 1;
  663. else
  664. cr.classno_groundtruth = 0;
  665. resultsOfSingleRun.push_back(cr);
  666. }
  667. }
  668. } // if regionSeg != null
  669. timer.stop();
  670. std::cout << "AL time for determination of novel regions: " << timer.getLastAbsolute() << std::endl;
  671. timer.start();
  672. ColorImage imgrgb ( xsize, ysize );
  673. if ( write_results )
  674. {
  675. std::stringstream out;
  676. std::vector< std::string > list2;
  677. StringTools::split ( currentFile, '/', list2 );
  678. out << resultdir << "/" << list2.back();
  679. noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
  680. }
  681. if (visualizeALimages)
  682. {
  683. ICETools::convertToRGB ( noveltyImage, imgrgb );
  684. showImage(imgrgb, "Novelty Image");
  685. ColorImage tmp (xsize, ysize);
  686. cn.labelToRGB(segresult,tmp);
  687. showImage(tmp, "Cl result after region seg");
  688. }
  689. timer.stop();
  690. cout << "AL time for writing the raw novelty image: " << timer.getLastAbsolute() << endl;
  691. }
  692. inline void SemSegNoveltyBinary::computeClassificationResults( const NICE::MultiChannelImageT<double> & feats,
  693. NICE::Image & segresult,
  694. NICE::MultiChannelImageT<double> & probabilities,
  695. const int & xsize,
  696. const int & ysize,
  697. const int & featdim
  698. )
  699. {
  700. std::cerr << "featdim: " << featdim << std::endl;
  701. if ( classifier != NULL )
  702. {
  703. #pragma omp parallel for
  704. for ( int y = 0; y < ysize; y += testWSize )
  705. {
  706. Example example;
  707. example.vec = NULL;
  708. example.svec = new SparseVector ( featdim );
  709. for ( int x = 0; x < xsize; x += testWSize)
  710. {
  711. for ( int f = 0; f < featdim; f++ )
  712. {
  713. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  714. if ( val > 1e-10 )
  715. ( *example.svec ) [f] = val;
  716. }
  717. example.svec->normalize();
  718. ClassificationResult cr = classifier->classify ( example );
  719. int xs = std::max(0, x - testWSize/2);
  720. int xe = std::min(xsize - 1, x + testWSize/2);
  721. int ys = std::max(0, y - testWSize/2);
  722. int ye = std::min(ysize - 1, y + testWSize/2);
  723. for (int yl = ys; yl <= ye; yl++)
  724. {
  725. for (int xl = xs; xl <= xe; xl++)
  726. {
  727. for ( int j = 0 ; j < cr.scores.size(); j++ )
  728. {
  729. probabilities ( xl, yl, j ) = cr.scores[j];
  730. }
  731. if ( cr.classno == 1 )
  732. segresult ( xl, yl ) = positiveClass;
  733. else
  734. segresult ( xl, yl ) = 22; //various
  735. }
  736. }
  737. example.svec->clear();
  738. }
  739. delete example.svec;
  740. example.svec = NULL;
  741. }
  742. }
  743. else //vclassifier
  744. {
  745. std::cerr << "compute classification results with vclassifier" << std::endl;
  746. #pragma omp parallel for
  747. for ( int y = 0; y < ysize; y += testWSize )
  748. {
  749. for ( int x = 0; x < xsize; x += testWSize)
  750. {
  751. NICE::Vector v(featdim);
  752. for ( int f = 0; f < featdim; f++ )
  753. {
  754. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  755. v[f] = val;
  756. }
  757. v.normalizeL1();
  758. ClassificationResult cr = vclassifier->classify ( v );
  759. int xs = std::max(0, x - testWSize/2);
  760. int xe = std::min(xsize - 1, x + testWSize/2);
  761. int ys = std::max(0, y - testWSize/2);
  762. int ye = std::min(ysize - 1, y + testWSize/2);
  763. for (int yl = ys; yl <= ye; yl++)
  764. {
  765. for (int xl = xs; xl <= xe; xl++)
  766. {
  767. for ( int j = 0 ; j < cr.scores.size(); j++ )
  768. {
  769. probabilities ( xl, yl, j ) = cr.scores[j];
  770. }
  771. if ( cr.classno == 1 )
  772. segresult ( xl, yl ) = positiveClass;
  773. else
  774. segresult ( xl, yl ) = 22; //various
  775. }
  776. }
  777. }
  778. }
  779. }
  780. }
  781. // compute novelty images depending on the strategy chosen
  782. void SemSegNoveltyBinary::computeNoveltyByRandom( NICE::FloatImage & noveltyImage,
  783. const NICE::MultiChannelImageT<double> & feats,
  784. NICE::Image & segresult,
  785. NICE::MultiChannelImageT<double> & probabilities,
  786. const int & xsize, const int & ysize, const int & featdim )
  787. {
  788. #pragma omp parallel for
  789. for ( int y = 0; y < ysize; y += testWSize )
  790. {
  791. Example example;
  792. example.vec = NULL;
  793. example.svec = new SparseVector ( featdim );
  794. for ( int x = 0; x < xsize; x += testWSize)
  795. {
  796. for ( int f = 0; f < featdim; f++ )
  797. {
  798. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  799. if ( val > 1e-10 )
  800. ( *example.svec ) [f] = val;
  801. }
  802. example.svec->normalize();
  803. ClassificationResult cr = classifier->classify ( example );
  804. int xs = std::max(0, x - testWSize/2);
  805. int xe = std::min(xsize - 1, x + testWSize/2);
  806. int ys = std::max(0, y - testWSize/2);
  807. int ye = std::min(ysize - 1, y + testWSize/2);
  808. double randVal = randDouble();
  809. for (int yl = ys; yl <= ye; yl++)
  810. {
  811. for (int xl = xs; xl <= xe; xl++)
  812. {
  813. for ( int j = 0 ; j < cr.scores.size(); j++ )
  814. {
  815. if ( cr.scores[j] == 1)
  816. probabilities ( xl, yl, j ) = cr.scores[j];
  817. else
  818. probabilities ( xl, yl, 0 ) = cr.scores[j];
  819. }
  820. if ( cr.classno == 1 )
  821. segresult ( xl, yl ) = positiveClass;
  822. else
  823. segresult ( xl, yl ) = 22; //various
  824. noveltyImage ( xl, yl ) = randVal;
  825. }
  826. }
  827. }
  828. }
  829. }
  830. void SemSegNoveltyBinary::computeNoveltyByVariance( NICE::FloatImage & noveltyImage,
  831. const NICE::MultiChannelImageT<double> & feats,
  832. NICE::Image & segresult,
  833. NICE::MultiChannelImageT<double> & probabilities,
  834. const int & xsize, const int & ysize, const int & featdim )
  835. {
  836. #pragma omp parallel for
  837. for ( int y = 0; y < ysize; y += testWSize )
  838. {
  839. Example example;
  840. example.vec = NULL;
  841. example.svec = new SparseVector ( featdim );
  842. for ( int x = 0; x < xsize; x += testWSize)
  843. {
  844. for ( int f = 0; f < featdim; f++ )
  845. {
  846. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  847. if ( val > 1e-10 )
  848. ( *example.svec ) [f] = val;
  849. }
  850. example.svec->normalize();
  851. ClassificationResult cr = classifier->classify ( example );
  852. int xs = std::max(0, x - testWSize/2);
  853. int xe = std::min(xsize - 1, x + testWSize/2);
  854. int ys = std::max(0, y - testWSize/2);
  855. int ye = std::min(ysize - 1, y + testWSize/2);
  856. for (int yl = ys; yl <= ye; yl++)
  857. {
  858. for (int xl = xs; xl <= xe; xl++)
  859. {
  860. for ( int j = 0 ; j < cr.scores.size(); j++ )
  861. {
  862. if ( cr.scores[j] == 1)
  863. probabilities ( xl, yl, j ) = cr.scores[j];
  864. else
  865. probabilities ( xl, yl, 0 ) = cr.scores[j];
  866. }
  867. if ( cr.classno == 1 )
  868. segresult ( xl, yl ) = positiveClass;
  869. else
  870. segresult ( xl, yl ) = 22; //various
  871. noveltyImage ( xl, yl ) = cr.uncertainty;
  872. }
  873. }
  874. example.svec->clear();
  875. }
  876. delete example.svec;
  877. example.svec = NULL;
  878. }
  879. }
  880. void SemSegNoveltyBinary::computeNoveltyByGPUncertainty( NICE::FloatImage & noveltyImage,
  881. const NICE::MultiChannelImageT<double> & feats,
  882. NICE::Image & segresult,
  883. NICE::MultiChannelImageT<double> & probabilities,
  884. const int & xsize, const int & ysize, const int & featdim )
  885. {
  886. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  887. #pragma omp parallel for
  888. for ( int y = 0; y < ysize; y += testWSize )
  889. {
  890. Example example;
  891. example.vec = NULL;
  892. example.svec = new SparseVector ( featdim );
  893. for ( int x = 0; x < xsize; x += testWSize)
  894. {
  895. for ( int f = 0; f < featdim; f++ )
  896. {
  897. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  898. if ( val > 1e-10 )
  899. ( *example.svec ) [f] = val;
  900. }
  901. example.svec->normalize();
  902. ClassificationResult cr = classifier->classify ( example );
  903. double gpMeanVal = abs(cr.scores[0]); //very specific to the binary setting
  904. double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
  905. //compute the heuristic GP-UNCERTAINTY, as proposed by Kapoor et al. in IJCV 2010
  906. // GP-UNCERTAINTY : |mean| / sqrt(var^2 + gpnoise^2)
  907. double gpUncertaintyVal = gpMeanVal*firstTerm; //firstTerm = 1.0 / sqrt(r.uncertainty+gpNoise))
  908. int xs = std::max(0, x - testWSize/2);
  909. int xe = std::min(xsize - 1, x + testWSize/2);
  910. int ys = std::max(0, y - testWSize/2);
  911. int ye = std::min(ysize - 1, y + testWSize/2);
  912. for (int yl = ys; yl <= ye; yl++)
  913. {
  914. for (int xl = xs; xl <= xe; xl++)
  915. {
  916. for ( int j = 0 ; j < cr.scores.size(); j++ )
  917. {
  918. if ( cr.scores[j] == 1)
  919. probabilities ( xl, yl, j ) = cr.scores[j];
  920. else
  921. probabilities ( xl, yl, 0 ) = cr.scores[j];
  922. }
  923. if ( cr.classno == positiveClass )
  924. segresult ( xl, yl ) = cr.classno;
  925. else
  926. segresult ( xl, yl ) = 22; //various
  927. noveltyImage ( xl, yl ) = gpUncertaintyVal;
  928. }
  929. }
  930. example.svec->clear();
  931. }
  932. delete example.svec;
  933. example.svec = NULL;
  934. }
  935. }
  936. void SemSegNoveltyBinary::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
  937. const NICE::MultiChannelImageT<double> & feats,
  938. NICE::Image & segresult,
  939. NICE::MultiChannelImageT<double> & probabilities,
  940. const int & xsize, const int & ysize, const int & featdim )
  941. {
  942. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  943. #pragma omp parallel for
  944. for ( int y = 0; y < ysize; y += testWSize )
  945. {
  946. Example example;
  947. example.vec = NULL;
  948. example.svec = new SparseVector ( featdim );
  949. for ( int x = 0; x < xsize; x += testWSize)
  950. {
  951. for ( int f = 0; f < featdim; f++ )
  952. {
  953. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  954. if ( val > 1e-10 )
  955. ( *example.svec ) [f] = val;
  956. }
  957. example.svec->normalize();
  958. ClassificationResult cr = classifier->classify ( example );
  959. double gpMeanVal = abs(cr.scores[0]); //very specific to the binary setting
  960. int xs = std::max(0, x - testWSize/2);
  961. int xe = std::min(xsize - 1, x + testWSize/2);
  962. int ys = std::max(0, y - testWSize/2);
  963. int ye = std::min(ysize - 1, y + testWSize/2);
  964. for (int yl = ys; yl <= ye; yl++)
  965. {
  966. for (int xl = xs; xl <= xe; xl++)
  967. {
  968. for ( int j = 0 ; j < cr.scores.size(); j++ )
  969. {
  970. probabilities ( xl, yl, 0 ) = cr.scores[j];
  971. }
  972. if ( cr.classno == 1 )
  973. segresult ( xl, yl ) = positiveClass;
  974. else
  975. segresult ( xl, yl ) = 22; //various
  976. noveltyImage ( xl, yl ) = gpMeanVal;
  977. }
  978. }
  979. }
  980. }
  981. }
  982. void SemSegNoveltyBinary::computeNoveltyByGPMeanRatio( NICE::FloatImage & noveltyImage,
  983. const NICE::MultiChannelImageT<double> & feats,
  984. NICE::Image & segresult,
  985. NICE::MultiChannelImageT<double> & probabilities,
  986. const int & xsize, const int & ysize, const int & featdim )
  987. {
  988. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  989. //NOTE in binary settings, this is the same as the same as 2*mean
  990. #pragma omp parallel for
  991. for ( int y = 0; y < ysize; y += testWSize )
  992. {
  993. Example example;
  994. example.vec = NULL;
  995. example.svec = new SparseVector ( featdim );
  996. for ( int x = 0; x < xsize; x += testWSize)
  997. {
  998. for ( int f = 0; f < featdim; f++ )
  999. {
  1000. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  1001. if ( val > 1e-10 )
  1002. ( *example.svec ) [f] = val;
  1003. }
  1004. example.svec->normalize();
  1005. ClassificationResult cr = classifier->classify ( example );
  1006. //look at the difference in the absolut mean values for the most plausible class
  1007. // and the second most plausible class
  1008. double gpMeanRatioVal= 2*abs(cr.scores[0]); //very specific to the binary setting
  1009. int xs = std::max(0, x - testWSize/2);
  1010. int xe = std::min(xsize - 1, x + testWSize/2);
  1011. int ys = std::max(0, y - testWSize/2);
  1012. int ye = std::min(ysize - 1, y + testWSize/2);
  1013. for (int yl = ys; yl <= ye; yl++)
  1014. {
  1015. for (int xl = xs; xl <= xe; xl++)
  1016. {
  1017. for ( int j = 0 ; j < cr.scores.size(); j++ )
  1018. {
  1019. if ( cr.scores[j] == 1)
  1020. probabilities ( xl, yl, j ) = cr.scores[j];
  1021. else
  1022. probabilities ( xl, yl, 0 ) = cr.scores[j];
  1023. }
  1024. if ( cr.classno == positiveClass )
  1025. segresult ( xl, yl ) = cr.classno;
  1026. else
  1027. segresult ( xl, yl ) = 22; //various
  1028. noveltyImage ( xl, yl ) = gpMeanRatioVal;
  1029. }
  1030. }
  1031. example.svec->clear();
  1032. }
  1033. delete example.svec;
  1034. example.svec = NULL;
  1035. }
  1036. }
  1037. void SemSegNoveltyBinary::computeNoveltyByGPWeightAll( NICE::FloatImage & noveltyImage,
  1038. const NICE::MultiChannelImageT<double> & feats,
  1039. NICE::Image & segresult,
  1040. NICE::MultiChannelImageT<double> & probabilities,
  1041. const int & xsize, const int & ysize, const int & featdim )
  1042. {
  1043. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  1044. #pragma omp parallel for
  1045. for ( int y = 0; y < ysize; y += testWSize )
  1046. {
  1047. Example example;
  1048. example.vec = NULL;
  1049. example.svec = new SparseVector ( featdim );
  1050. for ( int x = 0; x < xsize; x += testWSize)
  1051. {
  1052. for ( int f = 0; f < featdim; f++ )
  1053. {
  1054. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  1055. if ( val > 1e-10 )
  1056. ( *example.svec ) [f] = val;
  1057. }
  1058. example.svec->normalize();
  1059. ClassificationResult cr = classifier->classify ( example );
  1060. double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
  1061. double gpWeightAllVal ( 0.0 );
  1062. //binary scenario
  1063. gpWeightAllVal = std::min( abs(cr.scores[0]+1), abs(cr.scores[0]-1) );
  1064. gpWeightAllVal *= firstTerm;
  1065. int xs = std::max(0, x - testWSize/2);
  1066. int xe = std::min(xsize - 1, x + testWSize/2);
  1067. int ys = std::max(0, y - testWSize/2);
  1068. int ye = std::min(ysize - 1, y + testWSize/2);
  1069. for (int yl = ys; yl <= ye; yl++)
  1070. {
  1071. for (int xl = xs; xl <= xe; xl++)
  1072. {
  1073. for ( int j = 0 ; j < cr.scores.size(); j++ )
  1074. {
  1075. if ( cr.scores[j] == 1)
  1076. probabilities ( xl, yl, j ) = cr.scores[j];
  1077. else
  1078. probabilities ( xl, yl, 0 ) = cr.scores[j];
  1079. }
  1080. if ( cr.classno == positiveClass )
  1081. segresult ( xl, yl ) = cr.classno;
  1082. else
  1083. segresult ( xl, yl ) = 22; //various
  1084. noveltyImage ( xl, yl ) = gpWeightAllVal;
  1085. }
  1086. }
  1087. example.svec->clear();
  1088. }
  1089. delete example.svec;
  1090. example.svec = NULL;
  1091. }
  1092. }
  1093. void SemSegNoveltyBinary::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyImage,
  1094. const NICE::MultiChannelImageT<double> & feats,
  1095. NICE::Image & segresult,
  1096. NICE::MultiChannelImageT<double> & probabilities,
  1097. const int & xsize, const int & ysize, const int & featdim )
  1098. {
  1099. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  1100. //NOTE in binary settings, this is the same as the same as 2*weightAll
  1101. #pragma omp parallel for
  1102. for ( int y = 0; y < ysize; y += testWSize )
  1103. {
  1104. Example example;
  1105. example.vec = NULL;
  1106. example.svec = new SparseVector ( featdim );
  1107. for ( int x = 0; x < xsize; x += testWSize)
  1108. {
  1109. for ( int f = 0; f < featdim; f++ )
  1110. {
  1111. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  1112. if ( val > 1e-10 )
  1113. ( *example.svec ) [f] = val;
  1114. }
  1115. example.svec->normalize();
  1116. ClassificationResult cr = classifier->classify ( example );
  1117. double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
  1118. double gpWeightRatioVal ( 0.0 );
  1119. //binary scenario
  1120. gpWeightRatioVal = std::min( abs(cr.scores[0]+1), abs(cr.scores[0]-1) );
  1121. gpWeightRatioVal *= 2*firstTerm;
  1122. int xs = std::max(0, x - testWSize/2);
  1123. int xe = std::min(xsize - 1, x + testWSize/2);
  1124. int ys = std::max(0, y - testWSize/2);
  1125. int ye = std::min(ysize - 1, y + testWSize/2);
  1126. for (int yl = ys; yl <= ye; yl++)
  1127. {
  1128. for (int xl = xs; xl <= xe; xl++)
  1129. {
  1130. for ( int j = 0 ; j < cr.scores.size(); j++ )
  1131. {
  1132. if ( cr.scores[j] == 1)
  1133. probabilities ( xl, yl, j ) = cr.scores[j];
  1134. else
  1135. probabilities ( xl, yl, 0 ) = cr.scores[j];
  1136. }
  1137. if ( cr.classno == positiveClass )
  1138. segresult ( xl, yl ) = cr.classno;
  1139. else
  1140. segresult ( xl, yl ) = 22; //various
  1141. noveltyImage ( xl, yl ) = gpWeightRatioVal;
  1142. }
  1143. }
  1144. example.svec->clear();
  1145. }
  1146. delete example.svec;
  1147. example.svec = NULL;
  1148. }
  1149. }
  1150. void SemSegNoveltyBinary::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
  1151. {
  1152. //accept the new class as valid information
  1153. if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
  1154. {
  1155. forbidden_classesTrain.erase(newClassNo);
  1156. numberOfClasses++;
  1157. }
  1158. if ( classesInUse.find ( newClassNo ) == classesInUse.end() )
  1159. {
  1160. classesInUse.insert( newClassNo );
  1161. }
  1162. //then add it to the classifier used
  1163. if ( classifier != NULL )
  1164. {
  1165. //TODO
  1166. }
  1167. else //vclassifier
  1168. {
  1169. if (this->classifierString.compare("nn") == 0)
  1170. {
  1171. vclassifier->teach ( newClassNo, newExample );
  1172. }
  1173. }
  1174. }
  1175. void SemSegNoveltyBinary::addNovelExamples()
  1176. {
  1177. Timer timer;
  1178. //show the image that contains the most novel region
  1179. if (visualizeALimages)
  1180. showImage(maskedImg, "Most novel region");
  1181. timer.start();
  1182. std::stringstream out;
  1183. std::vector< std::string > list;
  1184. StringTools::split ( currentRegionToQuery.first, '/', list );
  1185. out << resultdir << "/" << list.back();
  1186. maskedImg.writePPM ( out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+ "_query.ppm" );
  1187. timer.stop();
  1188. std::cerr << "AL time for writing queried image: " << timer.getLast() << std::endl;
  1189. timer.start();
  1190. //check which classes will be added using the features from the novel region
  1191. std::set<int> newClassNumbers;
  1192. newClassNumbers.clear(); //just to be sure
  1193. for ( uint i = 0 ; i < newTrainExamples.size() ; i++ )
  1194. {
  1195. if (newClassNumbers.find(newTrainExamples[i].first /* classNumber*/) == newClassNumbers.end() )
  1196. {
  1197. newClassNumbers.insert(newTrainExamples[i].first );
  1198. }
  1199. }
  1200. //accept the new classes as valid information
  1201. for (std::set<int>::const_iterator clNoIt = newClassNumbers.begin(); clNoIt != newClassNumbers.end(); clNoIt++)
  1202. {
  1203. if ( forbidden_classesTrain.find ( *clNoIt ) != forbidden_classesTrain.end() )
  1204. {
  1205. forbidden_classesTrain.erase(*clNoIt);
  1206. numberOfClasses++;
  1207. }
  1208. if ( classesInUse.find ( *clNoIt ) == classesInUse.end() )
  1209. {
  1210. classesInUse.insert( *clNoIt );
  1211. }
  1212. }
  1213. timer.stop();
  1214. std::cerr << "AL time for accepting possible new classes: " << timer.getLast() << std::endl;
  1215. timer.start();
  1216. //then add the new features to the classifier used
  1217. if ( classifier != NULL )
  1218. {
  1219. if (this->classifierString.compare("ClassifierGPHIK") == 0)
  1220. {
  1221. classifier->addMultipleExamples ( this->newTrainExamples );
  1222. }
  1223. }
  1224. else //vclassifier
  1225. {
  1226. //TODO
  1227. }
  1228. timer.stop();
  1229. std::cerr << "AL time for actually updating the classifier: " << timer.getLast() << std::endl;
  1230. std::cerr << "the current region to query is: " << currentRegionToQuery.first << " -- " << currentRegionToQuery.second << std::endl;
  1231. //did we already query a region of this image?
  1232. if ( queriedRegions.find( currentRegionToQuery.first ) != queriedRegions.end() )
  1233. {
  1234. queriedRegions[ currentRegionToQuery.first ].insert(currentRegionToQuery.second);
  1235. }
  1236. else
  1237. {
  1238. std::set<int> tmpSet; tmpSet.insert(currentRegionToQuery.second);
  1239. queriedRegions.insert(std::pair<std::string,std::set<int> > (currentRegionToQuery.first, tmpSet ) );
  1240. }
  1241. std::cerr << "Write already queried regions: " << std::endl;
  1242. for (std::map<std::string,std::set<int> >::const_iterator it = queriedRegions.begin(); it != queriedRegions.end(); it++)
  1243. {
  1244. std::cerr << "image: " << it->first << " -- ";
  1245. for (std::set<int>::const_iterator itReg = it->second.begin(); itReg != it->second.end(); itReg++)
  1246. {
  1247. std::cerr << *itReg << " ";
  1248. }
  1249. std::cerr << std::endl;
  1250. }
  1251. //clear the latest results, since one iteration is over
  1252. globalMaxUncert = -numeric_limits<double>::max();
  1253. if (!mostNoveltyWithMaxScores)
  1254. globalMaxUncert = numeric_limits<double>::max();
  1255. }
  1256. const Examples * SemSegNoveltyBinary::getNovelExamples() const
  1257. {
  1258. return &(this->newTrainExamples);
  1259. }
  1260. double SemSegNoveltyBinary::getAUCPerformance() const
  1261. {
  1262. std::cerr << "evaluate AUC performance" << std::endl;
  1263. int noGTPositives ( 0 );
  1264. int noGTNegatives ( 0 );
  1265. for (std::vector<OBJREC::ClassificationResult>::const_iterator it = resultsOfSingleRun.begin(); it != resultsOfSingleRun.end(); it++)
  1266. {
  1267. if (it->classno_groundtruth == 1)
  1268. {
  1269. noGTPositives++;
  1270. }
  1271. else
  1272. noGTNegatives++;
  1273. }
  1274. std::cerr << "GT positives: " << noGTPositives << " -- GT negatives: " << noGTNegatives << std::endl;
  1275. std::cerr << "ARR: " << resultsOfSingleRun.getAverageRecognitionRate() << std::endl;
  1276. return resultsOfSingleRun.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
  1277. }