SemSegNoveltyBinary.cpp 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530
  1. #include <sstream>
  2. #include <iostream>
  3. #include "SemSegNoveltyBinary.h"
  4. #include <core/image/FilterT.h>
  5. #include <core/basics/numerictools.h>
  6. #include <core/basics/StringTools.h>
  7. #include <core/basics/Timer.h>
  8. #include <vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h>
  9. #include <vislearning/baselib/ICETools.h>
  10. #include <vislearning/baselib/Globals.h>
  11. #include <vislearning/features/fpfeatures/SparseVectorFeature.h>
  12. #include "segmentation/GenericRegionSegmentationMethodSelection.h"
  13. using namespace std;
  14. using namespace NICE;
  15. using namespace OBJREC;
  16. SemSegNoveltyBinary::SemSegNoveltyBinary ( const Config *conf,
  17. const MultiDataset *md )
  18. : SemanticSegmentation ( conf, & ( md->getClassNames ( "train" ) ) )
  19. {
  20. this->conf = conf;
  21. globalMaxUncert = -numeric_limits<double>::max();
  22. string section = "SemSegNoveltyBinary";
  23. featExtract = new LocalFeatureColorWeijer ( conf );
  24. this->reuseSegmentation = conf->gB ( "FPCPixel", "reuseSegmentation", true ); //save and read segmentation results from files
  25. this->save_classifier = conf->gB ( "FPCPixel", "save_classifier", true ); //save the classifier to a file
  26. this->read_classifier = conf->gB ( "FPCPixel", "read_classifier", false ); //read the classifier from a file
  27. //write uncertainty results in the same folder as done for the segmentation results
  28. resultdir = conf->gS("debug", "resultdir", "result");
  29. cache = conf->gS ( "cache", "root", "" );
  30. //stupid work around of the const attribute
  31. Config confCopy = *conf;
  32. //just to make sure, that we do NOT perform an optimization after every iteration step
  33. //this would just take a lot of time, which is not desired so far
  34. confCopy.sB("ClassifierGPHIK","performOptimizationAfterIncrement",false);
  35. classifierString = conf->gS ( section, "classifier", "ClassifierGPHIK" );
  36. classifier = NULL;
  37. vclassifier = NULL;
  38. if ( classifierString.compare("ClassifierGPHIK") == 0)
  39. classifier = new FPCGPHIK ( &confCopy, "ClassifierGPHIK" );
  40. else
  41. vclassifier = GenericClassifierSelection::selectVecClassifier ( conf, classifierString );
  42. findMaximumUncert = conf->gB(section, "findMaximumUncert", true);
  43. whs = conf->gI ( section, "window_size", 10 );
  44. //distance to next descriptor during training
  45. trainWsize = conf->gI ( section, "train_window_size", 10 );
  46. //distance to next descriptor during testing
  47. testWSize = conf->gI (section, "test_window_size", 10);
  48. // select your segmentation method here
  49. string rsMethode = conf->gS ( section, "segmentation", "none" );
  50. if(rsMethode == "none")
  51. {
  52. regionSeg = NULL;
  53. }
  54. else
  55. {
  56. RegionSegmentationMethod *tmpRegionSeg = GenericRegionSegmentationMethodSelection::selectRegionSegmentationMethod(conf, rsMethode);
  57. if ( reuseSegmentation )
  58. regionSeg = new RSCache ( conf, tmpRegionSeg );
  59. else
  60. regionSeg = tmpRegionSeg;
  61. }
  62. cn = md->getClassNames ( "train" );
  63. if ( read_classifier )
  64. {
  65. try
  66. {
  67. if ( classifier != NULL )
  68. {
  69. string classifierdst = "/classifier.data";
  70. fprintf ( stderr, "SemSegNoveltyBinary:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
  71. classifier->read ( cache + classifierdst );
  72. }
  73. else
  74. {
  75. string classifierdst = "/veccl.data";
  76. fprintf ( stderr, "SemSegNoveltyBinary:: Reading classifier data from %s\n", ( cache + classifierdst ).c_str() );
  77. vclassifier->read ( cache + classifierdst );
  78. }
  79. fprintf ( stderr, "SemSegNoveltyBinary:: successfully read\n" );
  80. }
  81. catch ( char *str )
  82. {
  83. cerr << "error reading data: " << str << endl;
  84. }
  85. }
  86. else
  87. {
  88. train ( md );
  89. }
  90. //define which measure for "novelty" we want to use
  91. noveltyMethodString = conf->gS( section, "noveltyMethod", "gp-variance");
  92. if (noveltyMethodString.compare("gp-variance") == 0) // novel = large variance
  93. {
  94. this->noveltyMethod = GPVARIANCE;
  95. this->mostNoveltyWithMaxScores = true;
  96. }
  97. else if (noveltyMethodString.compare("gp-uncertainty") == 0) //novel = large uncertainty (mean / var)
  98. {
  99. this->noveltyMethod = GPUNCERTAINTY;
  100. this->mostNoveltyWithMaxScores = false;
  101. globalMaxUncert = numeric_limits<double>::max();
  102. }
  103. else if (noveltyMethodString.compare("gp-mean") == 0) //novel = small mean
  104. {
  105. this->noveltyMethod = GPMINMEAN;
  106. this->mostNoveltyWithMaxScores = false;
  107. globalMaxUncert = numeric_limits<double>::max();
  108. }
  109. else if (noveltyMethodString.compare("gp-meanRatio") == 0) //novel = small difference between mean of most plausible class and mean of snd
  110. // most plausible class (not useful in binary settings)
  111. {
  112. this->noveltyMethod = GPMEANRATIO;
  113. this->mostNoveltyWithMaxScores = false;
  114. globalMaxUncert = numeric_limits<double>::max();
  115. }
  116. else if (noveltyMethodString.compare("gp-weightAll") == 0) // novel = large weight in alpha vector after updating the model (can be predicted exactly)
  117. {
  118. this->noveltyMethod = GPWEIGHTALL;
  119. this->mostNoveltyWithMaxScores = true;
  120. }
  121. else if (noveltyMethodString.compare("gp-weightRatio") == 0) // novel = small difference between weights for alpha vectors
  122. // with assumptions of GT label to be the most
  123. // plausible against the second most plausible class
  124. {
  125. this->noveltyMethod = GPWEIGHTRATIO;
  126. this->mostNoveltyWithMaxScores = false;
  127. globalMaxUncert = numeric_limits<double>::max();
  128. }
  129. else if (noveltyMethodString.compare("random") == 0)
  130. {
  131. initRand();
  132. this->noveltyMethod = RANDOM;
  133. }
  134. else
  135. {
  136. this->noveltyMethod = GPVARIANCE;
  137. this->mostNoveltyWithMaxScores = true;
  138. }
  139. //we don't have queried any region so far
  140. queriedRegions.clear();
  141. visualizeALimages = conf->gB(section, "visualizeALimages", false);
  142. resultsOfSingleRun.clear();
  143. write_results = conf->gB( "debug", "write_results", false );
  144. }
  145. SemSegNoveltyBinary::~SemSegNoveltyBinary()
  146. {
  147. if(newTrainExamples.size() > 0)
  148. {
  149. // show most uncertain region
  150. if (visualizeALimages)
  151. showImage(maskedImg);
  152. //incorporate new information into the classifier
  153. if (classifier != NULL)
  154. classifier->addMultipleExamples(newTrainExamples);
  155. //store the classifier, such that we can read it again in the next round (if we like that)
  156. classifier->save ( cache + "/classifier.data" );
  157. }
  158. // clean-up
  159. if ( classifier != NULL )
  160. delete classifier;
  161. if ( vclassifier != NULL )
  162. delete vclassifier;
  163. if ( featExtract != NULL )
  164. delete featExtract;
  165. }
  166. void SemSegNoveltyBinary::visualizeRegion(const NICE::ColorImage &img, const NICE::Matrix &regions, int region, NICE::ColorImage &outimage)
  167. {
  168. std::vector<uchar> color;
  169. color.push_back(255);
  170. color.push_back(0);
  171. color.push_back(0);
  172. int width = img.width();
  173. int height = img.height();
  174. outimage.resize(width,height);
  175. for(int y = 0; y < height; y++)
  176. {
  177. for(int x = 0; x < width; x++)
  178. {
  179. if(regions(x,y) == region)
  180. {
  181. for(int c = 0; c < 3; c++)
  182. {
  183. outimage(x,y,c) = color[c];
  184. }
  185. }
  186. else
  187. {
  188. for(int c = 0; c < 3; c++)
  189. {
  190. outimage(x,y,c) = img(x,y,c);
  191. }
  192. }
  193. }
  194. }
  195. }
  196. void SemSegNoveltyBinary::train ( const MultiDataset *md )
  197. {
  198. const LabeledSet train = * ( *md ) ["train"];
  199. const LabeledSet *trainp = &train;
  200. ////////////////////////
  201. // feature extraction //
  202. ////////////////////////
  203. //check the same thing for the training classes - this is very specific to our setup
  204. std::string forbidden_classesTrain_s = conf->gS ( "analysis", "donttrainTrain", "" );
  205. if ( forbidden_classesTrain_s == "" )
  206. {
  207. forbidden_classesTrain_s = conf->gS ( "analysis", "forbidden_classesTrain", "" );
  208. }
  209. cn.getSelection ( forbidden_classesTrain_s, forbidden_classesTrain );
  210. //check whether we have a single positive class
  211. std::string positiveClass_s = conf->gS ( "SemSegNoveltyBinary", "positiveClass", "" );
  212. std::set<int> positiveClassNumberTmp;
  213. cn.getSelection ( positiveClass_s, positiveClassNumberTmp );
  214. std::cerr << "BINARY SETTING ENABLED! " << std::endl;
  215. switch ( positiveClassNumberTmp.size() )
  216. {
  217. case 0:
  218. {
  219. positiveClass = 0;
  220. std::cerr << "no positive class given, assume 0 as positive class" << std::endl;
  221. break;
  222. }
  223. case 1:
  224. {
  225. positiveClass = *(positiveClassNumberTmp.begin());
  226. std::cerr << "positive class will be number" << positiveClass << " with the name: " << positiveClass_s << std::endl;
  227. break;
  228. }
  229. default:
  230. {
  231. //we specified more than a single positive class. right now, this is not what we are interested in, but
  232. //in theory we could also accept this and convert positiveClass into a set of ints of possible positive classes
  233. positiveClass = 0;
  234. std::cerr << "no positive class given, assume 0 as positive class" << std::endl;
  235. break;
  236. }
  237. }
  238. std::cerr << "============================" << std::endl << std::endl;
  239. ProgressBar pb ( "Local Feature Extraction" );
  240. pb.show();
  241. int imgnb = 0;
  242. Examples examples;
  243. examples.filename = "training";
  244. int featdim = -1;
  245. classesInUse.clear();
  246. LOOP_ALL_S ( *trainp )
  247. {
  248. //EACH_S(classno, currentFile);
  249. EACH_INFO ( classno, info );
  250. std::string currentFile = info.img();
  251. CachedExample *ce = new CachedExample ( currentFile );
  252. const LocalizationResult *locResult = info.localization();
  253. if ( locResult->size() <= 0 )
  254. {
  255. fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
  256. currentFile.c_str() );
  257. continue;
  258. }
  259. int xsize, ysize;
  260. ce->getImageSize ( xsize, ysize );
  261. Image labels ( xsize, ysize );
  262. labels.set ( 0 );
  263. locResult->calcLabeledImage ( labels, ( *classNames ).getBackgroundClass() );
  264. NICE::ColorImage img;
  265. try {
  266. img = ColorImage ( currentFile );
  267. } catch ( Exception ) {
  268. cerr << "SemSegNoveltyBinary: error opening image file <" << currentFile << ">" << endl;
  269. continue;
  270. }
  271. Globals::setCurrentImgFN ( currentFile );
  272. MultiChannelImageT<double> feats;
  273. // extract features
  274. featExtract->getFeats ( img, feats );
  275. featdim = feats.channels();
  276. feats.addChannel(featdim);
  277. for (int c = 0; c < featdim; c++)
  278. {
  279. ImageT<double> tmp = feats[c];
  280. ImageT<double> tmp2 = feats[c+featdim];
  281. NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
  282. }
  283. featdim += featdim;
  284. // compute integral images
  285. for ( int c = 0; c < featdim; c++ )
  286. {
  287. feats.calcIntegral ( c );
  288. }
  289. for ( int y = 0; y < ysize; y += trainWsize)
  290. {
  291. for ( int x = 0; x < xsize; x += trainWsize )
  292. {
  293. int classnoTmp = labels.getPixel ( x, y );
  294. if ( forbidden_classesTrain.find ( classnoTmp ) != forbidden_classesTrain.end() )
  295. {
  296. continue;
  297. }
  298. if (classesInUse.find(classnoTmp) == classesInUse.end())
  299. {
  300. classesInUse.insert(classnoTmp);
  301. }
  302. Example example;
  303. example.vec = NULL;
  304. example.svec = new SparseVector ( featdim );
  305. for ( int f = 0; f < featdim; f++ )
  306. {
  307. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  308. if ( val > 1e-10 )
  309. ( *example.svec ) [f] = val;
  310. }
  311. example.svec->normalize();
  312. example.position = imgnb;
  313. if ( classnoTmp == positiveClass )
  314. examples.push_back ( pair<int, Example> ( 1, example ) );
  315. else
  316. examples.push_back ( pair<int, Example> ( 0, example ) );
  317. }
  318. }
  319. delete ce;
  320. imgnb++;
  321. pb.update ( trainp->count() );
  322. }
  323. numberOfClasses = classesInUse.size();
  324. std::cerr << "numberOfClasses: " << numberOfClasses << std::endl;
  325. std::cerr << "classes in use: " << std::endl;
  326. for (std::set<int>::const_iterator it = classesInUse.begin(); it != classesInUse.end(); it++)
  327. {
  328. std::cerr << *it << " : " << cn.text(*it) << " ";
  329. }
  330. std::cerr << std::endl;
  331. pb.hide();
  332. //////////////////////
  333. // train classifier //
  334. //////////////////////
  335. FeaturePool fp;
  336. Feature *f = new SparseVectorFeature ( featdim );
  337. f->explode ( fp );
  338. delete f;
  339. if ( classifier != NULL )
  340. {
  341. std::cerr << "train FP-classifier with " << examples.size() << " examples" << std::endl;
  342. classifier->train ( fp, examples );
  343. std::cerr << "training finished" << std::endl;
  344. }
  345. else
  346. {
  347. LabeledSetVector lvec;
  348. convertExamplesToLSet ( examples, lvec );
  349. vclassifier->teach ( lvec );
  350. // if ( usegmm )
  351. // convertLSetToSparseExamples ( examples, lvec );
  352. // else
  353. std::cerr << "classifierString: " << classifierString << std::endl;
  354. if (this->classifierString.compare("nn") == 0)
  355. {
  356. convertLSetToExamples ( examples, lvec, true /* only remove pointers to the data in the LSet-struct*/);
  357. }
  358. else
  359. {
  360. convertLSetToExamples ( examples, lvec, false /* remove all training examples of the LSet-struct */);
  361. }
  362. vclassifier->finishTeaching();
  363. }
  364. fp.destroy();
  365. if ( save_classifier )
  366. {
  367. if ( classifier != NULL )
  368. classifier->save ( cache + "/classifier.data" );
  369. else
  370. vclassifier->save ( cache + "/veccl.data" );
  371. }
  372. ////////////
  373. //clean up//
  374. ////////////
  375. for ( int i = 0; i < ( int ) examples.size(); i++ )
  376. {
  377. examples[i].second.clean();
  378. }
  379. examples.clear();
  380. cerr << "SemSeg training finished" << endl;
  381. }
  382. void SemSegNoveltyBinary::semanticseg (
  383. CachedExample *ce,
  384. NICE::ImageT<int> & segresult,
  385. NICE::MultiChannelImageT<double> & probabilities )
  386. {
  387. Timer timer;
  388. timer.start();
  389. //segResult contains the GT labels when this method is called
  390. // we simply store them in labels, to have an easy access to the GT information lateron
  391. ImageT<int> labels = segresult;
  392. //just to be sure that we do not have a GT-biased result :)
  393. segresult.set(0);
  394. int featdim = -1;
  395. std::string currentFile = Globals::getCurrentImgFN();
  396. int xsize, ysize;
  397. ce->getImageSize ( xsize, ysize );
  398. probabilities.reInit( xsize, ysize, 2);
  399. probabilities.setAll ( 0.0 );
  400. NICE::ColorImage img;
  401. try {
  402. img = ColorImage ( currentFile );
  403. } catch ( Exception ) {
  404. cerr << "SemSegNoveltyBinary: error opening image file <" << currentFile << ">" << endl;
  405. return;
  406. }
  407. MultiChannelImageT<double> feats;
  408. // extract features
  409. featExtract->getFeats ( img, feats );
  410. featdim = feats.channels();
  411. feats.addChannel(featdim);
  412. for (int c = 0; c < featdim; c++)
  413. {
  414. ImageT<double> tmp = feats[c];
  415. ImageT<double> tmp2 = feats[c+featdim];
  416. NICE::FilterT<double, double, double>::gradientStrength (tmp, tmp2);
  417. }
  418. featdim += featdim;
  419. // compute integral images
  420. for ( int c = 0; c < featdim; c++ )
  421. {
  422. feats.calcIntegral ( c );
  423. }
  424. timer.stop();
  425. std::cout << "AL time for preparation: " << timer.getLastAbsolute() << std::endl;
  426. timer.start();
  427. //classification results currently only needed to be computed separately if we use the vclassifier, i.e., the nearest neighbor used
  428. // for the "novel feature learning" approach
  429. //in all other settings, such as active sem seg in general, we do this within the novelty-computation-methods
  430. if ( classifier == NULL )
  431. {
  432. this->computeClassificationResults( feats, segresult, probabilities, xsize, ysize, featdim);
  433. }
  434. // timer.stop();
  435. //
  436. // std::cerr << "classification results computed" << std::endl;
  437. FloatImage noveltyImage ( xsize, ysize );
  438. noveltyImage.set ( 0.0 );
  439. switch (noveltyMethod)
  440. {
  441. case GPVARIANCE:
  442. {
  443. this->computeNoveltyByVariance( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  444. break;
  445. }
  446. case GPUNCERTAINTY:
  447. {
  448. this->computeNoveltyByGPUncertainty( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  449. break;
  450. }
  451. case GPMINMEAN:
  452. {
  453. std::cerr << "compute novelty using the minimum mean" << std::endl;
  454. this->computeNoveltyByGPMean( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  455. break;
  456. }
  457. case GPMEANRATIO:
  458. {
  459. this->computeNoveltyByGPMeanRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  460. break;
  461. }
  462. case GPWEIGHTALL:
  463. {
  464. this->computeNoveltyByGPWeightAll( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  465. break;
  466. }
  467. case GPWEIGHTRATIO:
  468. {
  469. this->computeNoveltyByGPWeightRatio( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  470. break;
  471. }
  472. case RANDOM:
  473. {
  474. this->computeNoveltyByRandom( noveltyImage, feats, segresult, probabilities, xsize, ysize, featdim );
  475. break;
  476. }
  477. default:
  478. {
  479. //do nothing, keep the image constant to 0.0
  480. break;
  481. }
  482. }
  483. timer.stop();
  484. std::cout << "AL time for novelty score computation: " << timer.getLastAbsolute() << std::endl;
  485. if ( write_results || visualizeALimages )
  486. {
  487. ColorImage imgrgbTmp (xsize, ysize);
  488. ICETools::convertToRGB ( noveltyImage, imgrgbTmp );
  489. this->cn.labelToRGB( segresult, imgrgbTmp );
  490. if ( write_results )
  491. {
  492. std::stringstream out;
  493. std::vector< std::string > list2;
  494. StringTools::split ( currentFile, '/', list2 );
  495. out << resultdir << "/" << list2.back();
  496. // std::cerr << "writing to " << out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+"_unsmoothed.rawfloat" << std::endl;
  497. noveltyImage.writeRaw("run_" + NICE::intToString(this->iterationCountSuffix) + "_" + out.str() + "_" + noveltyMethodString+"_unsmoothed.rawfloat");
  498. }
  499. if (visualizeALimages)
  500. {
  501. showImage(imgrgbTmp, "Novelty Image without Region Segmentation");
  502. showImage(imgrgbTmp, "Classification Result without Region Segmentation");
  503. }
  504. }
  505. timer.start();
  506. //Regionen ermitteln
  507. if(regionSeg != NULL)
  508. {
  509. NICE::Matrix mask;
  510. int amountRegions = regionSeg->segRegions ( img, mask );
  511. //compute probs per region
  512. std::vector<std::vector<double> > regionProb(amountRegions, std::vector<double>(probabilities.channels(),0.0));
  513. std::vector<double> regionNoveltyMeasure (amountRegions, 0.0);
  514. std::vector<int> regionCounter(amountRegions, 0);
  515. std::vector<int> regionCounterNovelty(amountRegions, 0);
  516. for ( int y = 0; y < ysize; y += trainWsize) //y++)
  517. {
  518. for (int x = 0; x < xsize; x += trainWsize) //x++)
  519. {
  520. int r = mask(x,y);
  521. regionCounter[r]++;
  522. for(int j = 0; j < probabilities.channels(); j++)
  523. {
  524. regionProb[r][j] += probabilities ( x, y, j );
  525. }
  526. if ( forbidden_classesActiveLearning.find( labels(x,y) ) == forbidden_classesActiveLearning.end() )
  527. {
  528. //count the amount of "novelty" for the corresponding region
  529. regionNoveltyMeasure[r] += noveltyImage(x,y);
  530. regionCounterNovelty[r]++;
  531. }
  532. }
  533. }
  534. //find best class per region
  535. std::vector<int> bestClassPerRegion(amountRegions,0);
  536. double maxNoveltyScore = -numeric_limits<double>::max();
  537. if (!mostNoveltyWithMaxScores)
  538. {
  539. maxNoveltyScore = numeric_limits<double>::max();
  540. }
  541. int maxUncertRegion = -1;
  542. //loop over all regions and compute averaged novelty scores
  543. for(int r = 0; r < amountRegions; r++)
  544. {
  545. //check for the most plausible class per region
  546. double maxval = -numeric_limits<double>::max();
  547. //loop over all classes
  548. for(int c = 0; c < probabilities.channels(); c++)
  549. {
  550. regionProb[r][c] /= regionCounter[r];
  551. if( (maxval < regionProb[r][c]) ) //&& (regionProb[r][c] != 0.0) )
  552. {
  553. maxval = regionProb[r][c];
  554. bestClassPerRegion[r] = c;
  555. }
  556. }
  557. //if the region only contains unvalid information (e.g., background) skip it
  558. if (regionCounterNovelty[r] == 0)
  559. {
  560. continue;
  561. }
  562. //normalize summed novelty scores to region size
  563. regionNoveltyMeasure[r] /= regionCounterNovelty[r];
  564. //did we find a region that has a higher score as the most novel region known so far within this image?
  565. if( ( mostNoveltyWithMaxScores && (maxNoveltyScore < regionNoveltyMeasure[r]) ) // if we look for large novelty scores, e.g., variance
  566. || ( !mostNoveltyWithMaxScores && (maxNoveltyScore > regionNoveltyMeasure[r]) ) ) // if we look for small novelty scores, e.g., min mean
  567. {
  568. //did we already query a region of this image? -- and it was this specific region
  569. if ( (queriedRegions.find( currentFile ) != queriedRegions.end() ) && ( queriedRegions[currentFile].find(r) != queriedRegions[currentFile].end() ) )
  570. {
  571. continue;
  572. }
  573. else //only accept the region as novel if we never queried it before
  574. {
  575. maxNoveltyScore = regionNoveltyMeasure[r];
  576. maxUncertRegion = r;
  577. }
  578. }
  579. }
  580. // after finding the most novel region for the current image, check whether this region is also the most novel with respect
  581. // to all previously seen test images
  582. // if so, store the corresponding features, since we want to "actively" query them to incorporate useful information
  583. if(findMaximumUncert)
  584. {
  585. if( ( mostNoveltyWithMaxScores && (maxNoveltyScore > globalMaxUncert) )
  586. || ( !mostNoveltyWithMaxScores && (maxNoveltyScore < globalMaxUncert) ) )
  587. {
  588. //current most novel region of the image has "higher" novelty score then previous most novel region of all test images worked on so far
  589. // -> save new important features of this region
  590. Examples examples;
  591. for ( int y = 0; y < ysize; y += trainWsize )
  592. {
  593. for ( int x = 0; x < xsize; x += trainWsize)
  594. {
  595. if(mask(x,y) == maxUncertRegion)
  596. {
  597. int classnoTmp = labels(x,y);
  598. if ( forbidden_classesActiveLearning.find(classnoTmp) != forbidden_classesActiveLearning.end() )
  599. continue;
  600. Example example;
  601. example.vec = NULL;
  602. example.svec = new SparseVector ( featdim );
  603. for ( int f = 0; f < featdim; f++ )
  604. {
  605. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  606. if ( val > 1e-10 )
  607. ( *example.svec ) [f] = val;
  608. }
  609. example.svec->normalize();
  610. if ( classnoTmp == positiveClass )
  611. examples.push_back ( pair<int, Example> ( 1, example ) );
  612. else
  613. examples.push_back ( pair<int, Example> ( 0, example ) );
  614. }
  615. }
  616. }
  617. if(examples.size() > 0)
  618. {
  619. std::cerr << "found " << examples.size() << " new examples in the queried region" << std::endl << std::endl;
  620. newTrainExamples.clear();
  621. newTrainExamples = examples;
  622. globalMaxUncert = maxNoveltyScore;
  623. //prepare for later visualization
  624. visualizeRegion(img,mask,maxUncertRegion,maskedImg);
  625. }
  626. else
  627. {
  628. std::cerr << "the queried region has no valid information" << std::endl << std::endl;
  629. }
  630. //save filename and region index
  631. currentRegionToQuery.first = currentFile;
  632. currentRegionToQuery.second = maxUncertRegion;
  633. }
  634. }
  635. //write back best results per region
  636. //i.e., write normalized novelty scores for every region into the novelty image
  637. for ( int y = 0; y < ysize; y++)
  638. {
  639. for (int x = 0; x < xsize; x++)
  640. {
  641. int r = mask(x,y);
  642. for(int j = 0; j < probabilities.channels(); j++)
  643. {
  644. probabilities ( x, y, j ) = regionProb[r][j];
  645. }
  646. if ( bestClassPerRegion[r] == 0 )
  647. segresult(x,y) = positiveClass;
  648. else //take the various class as negative
  649. segresult(x,y) = 22; //bestClassPerRegion[r];
  650. // write novelty scores for every segment into the "final" image
  651. noveltyImage(x,y) = regionNoveltyMeasure[r];
  652. }
  653. }
  654. //compute these nice Classification results
  655. for ( int y = 0; y < ysize; y++)
  656. {
  657. for (int x = 0; x < xsize; x++)
  658. {
  659. OBJREC::FullVector scoresTmp (2);
  660. scoresTmp[1] = probabilities ( x, y, 0 ); //probabilities[0] == negative class == scores[1]
  661. scoresTmp[0] = probabilities ( x, y, 1 ); //probabilities[1] == positive class == scores[0]
  662. int cno = scoresTmp[1] > 0 ? 1 : 0;
  663. ClassificationResult cr ( cno/*doesn't matter*/, scoresTmp );
  664. if ( labels(x,y) == positiveClass )
  665. cr.classno_groundtruth = 1;
  666. else
  667. cr.classno_groundtruth = 0;
  668. resultsOfSingleRun.push_back(cr);
  669. }
  670. }
  671. } // if regionSeg != null
  672. timer.stop();
  673. std::cout << "AL time for determination of novel regions: " << timer.getLastAbsolute() << std::endl;
  674. timer.start();
  675. ColorImage imgrgb ( xsize, ysize );
  676. if ( write_results )
  677. {
  678. std::stringstream out;
  679. std::vector< std::string > list2;
  680. StringTools::split ( currentFile, '/', list2 );
  681. out << resultdir << "/" << list2.back();
  682. noveltyImage.writeRaw(out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+".rawfloat");
  683. }
  684. if (visualizeALimages)
  685. {
  686. ICETools::convertToRGB ( noveltyImage, imgrgb );
  687. showImage(imgrgb, "Novelty Image");
  688. ColorImage tmp (xsize, ysize);
  689. cn.labelToRGB(segresult,tmp);
  690. showImage(tmp, "Cl result after region seg");
  691. }
  692. timer.stop();
  693. cout << "AL time for writing the raw novelty image: " << timer.getLastAbsolute() << endl;
  694. }
  695. inline void SemSegNoveltyBinary::computeClassificationResults( const NICE::MultiChannelImageT<double> & feats,
  696. NICE::ImageT<int> & segresult,
  697. NICE::MultiChannelImageT<double> & probabilities,
  698. const int & xsize,
  699. const int & ysize,
  700. const int & featdim
  701. )
  702. {
  703. std::cerr << "featdim: " << featdim << std::endl;
  704. if ( classifier != NULL )
  705. {
  706. #pragma omp parallel for
  707. for ( int y = 0; y < ysize; y += testWSize )
  708. {
  709. Example example;
  710. example.vec = NULL;
  711. example.svec = new SparseVector ( featdim );
  712. for ( int x = 0; x < xsize; x += testWSize)
  713. {
  714. for ( int f = 0; f < featdim; f++ )
  715. {
  716. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  717. if ( val > 1e-10 )
  718. ( *example.svec ) [f] = val;
  719. }
  720. example.svec->normalize();
  721. ClassificationResult cr = classifier->classify ( example );
  722. int xs = std::max(0, x - testWSize/2);
  723. int xe = std::min(xsize - 1, x + testWSize/2);
  724. int ys = std::max(0, y - testWSize/2);
  725. int ye = std::min(ysize - 1, y + testWSize/2);
  726. for (int yl = ys; yl <= ye; yl++)
  727. {
  728. for (int xl = xs; xl <= xe; xl++)
  729. {
  730. for ( int j = 0 ; j < cr.scores.size(); j++ )
  731. {
  732. probabilities ( xl, yl, j ) = cr.scores[j];
  733. }
  734. if ( cr.classno == 1 )
  735. segresult ( xl, yl ) = positiveClass;
  736. else
  737. segresult ( xl, yl ) = 22; //various
  738. }
  739. }
  740. example.svec->clear();
  741. }
  742. delete example.svec;
  743. example.svec = NULL;
  744. }
  745. }
  746. else //vclassifier
  747. {
  748. std::cerr << "compute classification results with vclassifier" << std::endl;
  749. #pragma omp parallel for
  750. for ( int y = 0; y < ysize; y += testWSize )
  751. {
  752. for ( int x = 0; x < xsize; x += testWSize)
  753. {
  754. NICE::Vector v(featdim);
  755. for ( int f = 0; f < featdim; f++ )
  756. {
  757. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  758. v[f] = val;
  759. }
  760. v.normalizeL1();
  761. ClassificationResult cr = vclassifier->classify ( v );
  762. int xs = std::max(0, x - testWSize/2);
  763. int xe = std::min(xsize - 1, x + testWSize/2);
  764. int ys = std::max(0, y - testWSize/2);
  765. int ye = std::min(ysize - 1, y + testWSize/2);
  766. for (int yl = ys; yl <= ye; yl++)
  767. {
  768. for (int xl = xs; xl <= xe; xl++)
  769. {
  770. for ( int j = 0 ; j < cr.scores.size(); j++ )
  771. {
  772. probabilities ( xl, yl, j ) = cr.scores[j];
  773. }
  774. if ( cr.classno == 1 )
  775. segresult ( xl, yl ) = positiveClass;
  776. else
  777. segresult ( xl, yl ) = 22; //various
  778. }
  779. }
  780. }
  781. }
  782. }
  783. }
  784. // compute novelty images depending on the strategy chosen
  785. void SemSegNoveltyBinary::computeNoveltyByRandom( NICE::FloatImage & noveltyImage,
  786. const NICE::MultiChannelImageT<double> & feats,
  787. NICE::ImageT<int> & segresult,
  788. NICE::MultiChannelImageT<double> & probabilities,
  789. const int & xsize, const int & ysize, const int & featdim )
  790. {
  791. #pragma omp parallel for
  792. for ( int y = 0; y < ysize; y += testWSize )
  793. {
  794. Example example;
  795. example.vec = NULL;
  796. example.svec = new SparseVector ( featdim );
  797. for ( int x = 0; x < xsize; x += testWSize)
  798. {
  799. for ( int f = 0; f < featdim; f++ )
  800. {
  801. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  802. if ( val > 1e-10 )
  803. ( *example.svec ) [f] = val;
  804. }
  805. example.svec->normalize();
  806. ClassificationResult cr = classifier->classify ( example );
  807. int xs = std::max(0, x - testWSize/2);
  808. int xe = std::min(xsize - 1, x + testWSize/2);
  809. int ys = std::max(0, y - testWSize/2);
  810. int ye = std::min(ysize - 1, y + testWSize/2);
  811. double randVal = randDouble();
  812. for (int yl = ys; yl <= ye; yl++)
  813. {
  814. for (int xl = xs; xl <= xe; xl++)
  815. {
  816. for ( int j = 0 ; j < cr.scores.size(); j++ )
  817. {
  818. if ( cr.scores[j] == 1)
  819. probabilities ( xl, yl, j ) = cr.scores[j];
  820. else
  821. probabilities ( xl, yl, 0 ) = cr.scores[j];
  822. }
  823. if ( cr.classno == 1 )
  824. segresult ( xl, yl ) = positiveClass;
  825. else
  826. segresult ( xl, yl ) = 22; //various
  827. noveltyImage ( xl, yl ) = randVal;
  828. }
  829. }
  830. }
  831. }
  832. }
  833. void SemSegNoveltyBinary::computeNoveltyByVariance( NICE::FloatImage & noveltyImage,
  834. const NICE::MultiChannelImageT<double> & feats,
  835. NICE::ImageT<int> & segresult,
  836. NICE::MultiChannelImageT<double> & probabilities,
  837. const int & xsize, const int & ysize, const int & featdim )
  838. {
  839. #pragma omp parallel for
  840. for ( int y = 0; y < ysize; y += testWSize )
  841. {
  842. Example example;
  843. example.vec = NULL;
  844. example.svec = new SparseVector ( featdim );
  845. for ( int x = 0; x < xsize; x += testWSize)
  846. {
  847. for ( int f = 0; f < featdim; f++ )
  848. {
  849. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  850. if ( val > 1e-10 )
  851. ( *example.svec ) [f] = val;
  852. }
  853. example.svec->normalize();
  854. ClassificationResult cr = classifier->classify ( example );
  855. int xs = std::max(0, x - testWSize/2);
  856. int xe = std::min(xsize - 1, x + testWSize/2);
  857. int ys = std::max(0, y - testWSize/2);
  858. int ye = std::min(ysize - 1, y + testWSize/2);
  859. for (int yl = ys; yl <= ye; yl++)
  860. {
  861. for (int xl = xs; xl <= xe; xl++)
  862. {
  863. for ( int j = 0 ; j < cr.scores.size(); j++ )
  864. {
  865. if ( cr.scores[j] == 1)
  866. probabilities ( xl, yl, j ) = cr.scores[j];
  867. else
  868. probabilities ( xl, yl, 0 ) = cr.scores[j];
  869. }
  870. if ( cr.classno == 1 )
  871. segresult ( xl, yl ) = positiveClass;
  872. else
  873. segresult ( xl, yl ) = 22; //various
  874. noveltyImage ( xl, yl ) = cr.uncertainty;
  875. }
  876. }
  877. example.svec->clear();
  878. }
  879. delete example.svec;
  880. example.svec = NULL;
  881. }
  882. }
  883. void SemSegNoveltyBinary::computeNoveltyByGPUncertainty( NICE::FloatImage & noveltyImage,
  884. const NICE::MultiChannelImageT<double> & feats,
  885. NICE::ImageT<int> & segresult,
  886. NICE::MultiChannelImageT<double> & probabilities,
  887. const int & xsize, const int & ysize, const int & featdim )
  888. {
  889. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  890. #pragma omp parallel for
  891. for ( int y = 0; y < ysize; y += testWSize )
  892. {
  893. Example example;
  894. example.vec = NULL;
  895. example.svec = new SparseVector ( featdim );
  896. for ( int x = 0; x < xsize; x += testWSize)
  897. {
  898. for ( int f = 0; f < featdim; f++ )
  899. {
  900. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  901. if ( val > 1e-10 )
  902. ( *example.svec ) [f] = val;
  903. }
  904. example.svec->normalize();
  905. ClassificationResult cr = classifier->classify ( example );
  906. double gpMeanVal = abs(cr.scores[0]); //very specific to the binary setting
  907. double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
  908. //compute the heuristic GP-UNCERTAINTY, as proposed by Kapoor et al. in IJCV 2010
  909. // GP-UNCERTAINTY : |mean| / sqrt(var^2 + gpnoise^2)
  910. double gpUncertaintyVal = gpMeanVal*firstTerm; //firstTerm = 1.0 / sqrt(r.uncertainty+gpNoise))
  911. int xs = std::max(0, x - testWSize/2);
  912. int xe = std::min(xsize - 1, x + testWSize/2);
  913. int ys = std::max(0, y - testWSize/2);
  914. int ye = std::min(ysize - 1, y + testWSize/2);
  915. for (int yl = ys; yl <= ye; yl++)
  916. {
  917. for (int xl = xs; xl <= xe; xl++)
  918. {
  919. for ( int j = 0 ; j < cr.scores.size(); j++ )
  920. {
  921. if ( cr.scores[j] == 1)
  922. probabilities ( xl, yl, j ) = cr.scores[j];
  923. else
  924. probabilities ( xl, yl, 0 ) = cr.scores[j];
  925. }
  926. if ( cr.classno == positiveClass )
  927. segresult ( xl, yl ) = cr.classno;
  928. else
  929. segresult ( xl, yl ) = 22; //various
  930. noveltyImage ( xl, yl ) = gpUncertaintyVal;
  931. }
  932. }
  933. example.svec->clear();
  934. }
  935. delete example.svec;
  936. example.svec = NULL;
  937. }
  938. }
  939. void SemSegNoveltyBinary::computeNoveltyByGPMean( NICE::FloatImage & noveltyImage,
  940. const NICE::MultiChannelImageT<double> & feats,
  941. NICE::ImageT<int> & segresult,
  942. NICE::MultiChannelImageT<double> & probabilities,
  943. const int & xsize, const int & ysize, const int & featdim )
  944. {
  945. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  946. #pragma omp parallel for
  947. for ( int y = 0; y < ysize; y += testWSize )
  948. {
  949. Example example;
  950. example.vec = NULL;
  951. example.svec = new SparseVector ( featdim );
  952. for ( int x = 0; x < xsize; x += testWSize)
  953. {
  954. for ( int f = 0; f < featdim; f++ )
  955. {
  956. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  957. if ( val > 1e-10 )
  958. ( *example.svec ) [f] = val;
  959. }
  960. example.svec->normalize();
  961. ClassificationResult cr = classifier->classify ( example );
  962. double gpMeanVal = abs(cr.scores[0]); //very specific to the binary setting
  963. int xs = std::max(0, x - testWSize/2);
  964. int xe = std::min(xsize - 1, x + testWSize/2);
  965. int ys = std::max(0, y - testWSize/2);
  966. int ye = std::min(ysize - 1, y + testWSize/2);
  967. for (int yl = ys; yl <= ye; yl++)
  968. {
  969. for (int xl = xs; xl <= xe; xl++)
  970. {
  971. for ( int j = 0 ; j < cr.scores.size(); j++ )
  972. {
  973. probabilities ( xl, yl, 0 ) = cr.scores[j];
  974. }
  975. if ( cr.classno == 1 )
  976. segresult ( xl, yl ) = positiveClass;
  977. else
  978. segresult ( xl, yl ) = 22; //various
  979. noveltyImage ( xl, yl ) = gpMeanVal;
  980. }
  981. }
  982. }
  983. }
  984. }
  985. void SemSegNoveltyBinary::computeNoveltyByGPMeanRatio( NICE::FloatImage & noveltyImage,
  986. const NICE::MultiChannelImageT<double> & feats,
  987. NICE::ImageT<int> & segresult,
  988. NICE::MultiChannelImageT<double> & probabilities,
  989. const int & xsize, const int & ysize, const int & featdim )
  990. {
  991. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  992. //NOTE in binary settings, this is the same as the same as 2*mean
  993. #pragma omp parallel for
  994. for ( int y = 0; y < ysize; y += testWSize )
  995. {
  996. Example example;
  997. example.vec = NULL;
  998. example.svec = new SparseVector ( featdim );
  999. for ( int x = 0; x < xsize; x += testWSize)
  1000. {
  1001. for ( int f = 0; f < featdim; f++ )
  1002. {
  1003. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  1004. if ( val > 1e-10 )
  1005. ( *example.svec ) [f] = val;
  1006. }
  1007. example.svec->normalize();
  1008. ClassificationResult cr = classifier->classify ( example );
  1009. //look at the difference in the absolut mean values for the most plausible class
  1010. // and the second most plausible class
  1011. double gpMeanRatioVal= 2*abs(cr.scores[0]); //very specific to the binary setting
  1012. int xs = std::max(0, x - testWSize/2);
  1013. int xe = std::min(xsize - 1, x + testWSize/2);
  1014. int ys = std::max(0, y - testWSize/2);
  1015. int ye = std::min(ysize - 1, y + testWSize/2);
  1016. for (int yl = ys; yl <= ye; yl++)
  1017. {
  1018. for (int xl = xs; xl <= xe; xl++)
  1019. {
  1020. for ( int j = 0 ; j < cr.scores.size(); j++ )
  1021. {
  1022. if ( cr.scores[j] == 1)
  1023. probabilities ( xl, yl, j ) = cr.scores[j];
  1024. else
  1025. probabilities ( xl, yl, 0 ) = cr.scores[j];
  1026. }
  1027. if ( cr.classno == positiveClass )
  1028. segresult ( xl, yl ) = cr.classno;
  1029. else
  1030. segresult ( xl, yl ) = 22; //various
  1031. noveltyImage ( xl, yl ) = gpMeanRatioVal;
  1032. }
  1033. }
  1034. example.svec->clear();
  1035. }
  1036. delete example.svec;
  1037. example.svec = NULL;
  1038. }
  1039. }
  1040. void SemSegNoveltyBinary::computeNoveltyByGPWeightAll( NICE::FloatImage & noveltyImage,
  1041. const NICE::MultiChannelImageT<double> & feats,
  1042. NICE::ImageT<int> & segresult,
  1043. NICE::MultiChannelImageT<double> & probabilities,
  1044. const int & xsize, const int & ysize, const int & featdim )
  1045. {
  1046. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  1047. #pragma omp parallel for
  1048. for ( int y = 0; y < ysize; y += testWSize )
  1049. {
  1050. Example example;
  1051. example.vec = NULL;
  1052. example.svec = new SparseVector ( featdim );
  1053. for ( int x = 0; x < xsize; x += testWSize)
  1054. {
  1055. for ( int f = 0; f < featdim; f++ )
  1056. {
  1057. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  1058. if ( val > 1e-10 )
  1059. ( *example.svec ) [f] = val;
  1060. }
  1061. example.svec->normalize();
  1062. ClassificationResult cr = classifier->classify ( example );
  1063. double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
  1064. double gpWeightAllVal ( 0.0 );
  1065. //binary scenario
  1066. gpWeightAllVal = std::min( abs(cr.scores[0]+1), abs(cr.scores[0]-1) );
  1067. gpWeightAllVal *= firstTerm;
  1068. int xs = std::max(0, x - testWSize/2);
  1069. int xe = std::min(xsize - 1, x + testWSize/2);
  1070. int ys = std::max(0, y - testWSize/2);
  1071. int ye = std::min(ysize - 1, y + testWSize/2);
  1072. for (int yl = ys; yl <= ye; yl++)
  1073. {
  1074. for (int xl = xs; xl <= xe; xl++)
  1075. {
  1076. for ( int j = 0 ; j < cr.scores.size(); j++ )
  1077. {
  1078. if ( cr.scores[j] == 1)
  1079. probabilities ( xl, yl, j ) = cr.scores[j];
  1080. else
  1081. probabilities ( xl, yl, 0 ) = cr.scores[j];
  1082. }
  1083. if ( cr.classno == positiveClass )
  1084. segresult ( xl, yl ) = cr.classno;
  1085. else
  1086. segresult ( xl, yl ) = 22; //various
  1087. noveltyImage ( xl, yl ) = gpWeightAllVal;
  1088. }
  1089. }
  1090. example.svec->clear();
  1091. }
  1092. delete example.svec;
  1093. example.svec = NULL;
  1094. }
  1095. }
  1096. void SemSegNoveltyBinary::computeNoveltyByGPWeightRatio( NICE::FloatImage & noveltyImage,
  1097. const NICE::MultiChannelImageT<double> & feats,
  1098. NICE::ImageT<int> & segresult,
  1099. NICE::MultiChannelImageT<double> & probabilities,
  1100. const int & xsize, const int & ysize, const int & featdim )
  1101. {
  1102. double gpNoise = conf->gD("GPHIK", "noise", 0.01);
  1103. //NOTE in binary settings, this is the same as the same as 2*weightAll
  1104. #pragma omp parallel for
  1105. for ( int y = 0; y < ysize; y += testWSize )
  1106. {
  1107. Example example;
  1108. example.vec = NULL;
  1109. example.svec = new SparseVector ( featdim );
  1110. for ( int x = 0; x < xsize; x += testWSize)
  1111. {
  1112. for ( int f = 0; f < featdim; f++ )
  1113. {
  1114. double val = feats.getIntegralValue ( x - whs, y - whs, x + whs, y + whs, f );
  1115. if ( val > 1e-10 )
  1116. ( *example.svec ) [f] = val;
  1117. }
  1118. example.svec->normalize();
  1119. ClassificationResult cr = classifier->classify ( example );
  1120. double firstTerm (1.0 / sqrt(cr.uncertainty+gpNoise));
  1121. double gpWeightRatioVal ( 0.0 );
  1122. //binary scenario
  1123. gpWeightRatioVal = std::min( abs(cr.scores[0]+1), abs(cr.scores[0]-1) );
  1124. gpWeightRatioVal *= 2*firstTerm;
  1125. int xs = std::max(0, x - testWSize/2);
  1126. int xe = std::min(xsize - 1, x + testWSize/2);
  1127. int ys = std::max(0, y - testWSize/2);
  1128. int ye = std::min(ysize - 1, y + testWSize/2);
  1129. for (int yl = ys; yl <= ye; yl++)
  1130. {
  1131. for (int xl = xs; xl <= xe; xl++)
  1132. {
  1133. for ( int j = 0 ; j < cr.scores.size(); j++ )
  1134. {
  1135. if ( cr.scores[j] == 1)
  1136. probabilities ( xl, yl, j ) = cr.scores[j];
  1137. else
  1138. probabilities ( xl, yl, 0 ) = cr.scores[j];
  1139. }
  1140. if ( cr.classno == positiveClass )
  1141. segresult ( xl, yl ) = cr.classno;
  1142. else
  1143. segresult ( xl, yl ) = 22; //various
  1144. noveltyImage ( xl, yl ) = gpWeightRatioVal;
  1145. }
  1146. }
  1147. example.svec->clear();
  1148. }
  1149. delete example.svec;
  1150. example.svec = NULL;
  1151. }
  1152. }
  1153. void SemSegNoveltyBinary::addNewExample(const NICE::Vector& newExample, const int & newClassNo)
  1154. {
  1155. //accept the new class as valid information
  1156. if ( forbidden_classesTrain.find ( newClassNo ) != forbidden_classesTrain.end() )
  1157. {
  1158. forbidden_classesTrain.erase(newClassNo);
  1159. numberOfClasses++;
  1160. }
  1161. if ( classesInUse.find ( newClassNo ) == classesInUse.end() )
  1162. {
  1163. classesInUse.insert( newClassNo );
  1164. }
  1165. //then add it to the classifier used
  1166. if ( classifier != NULL )
  1167. {
  1168. //TODO
  1169. }
  1170. else //vclassifier
  1171. {
  1172. if (this->classifierString.compare("nn") == 0)
  1173. {
  1174. vclassifier->teach ( newClassNo, newExample );
  1175. }
  1176. }
  1177. }
  1178. void SemSegNoveltyBinary::addNovelExamples()
  1179. {
  1180. Timer timer;
  1181. //show the image that contains the most novel region
  1182. if (visualizeALimages)
  1183. showImage(maskedImg, "Most novel region");
  1184. timer.start();
  1185. std::stringstream out;
  1186. std::vector< std::string > list;
  1187. StringTools::split ( currentRegionToQuery.first, '/', list );
  1188. out << resultdir << "/" << list.back();
  1189. maskedImg.writePPM ( out.str() + "_run_" + NICE::intToString(this->iterationCountSuffix) + "_" + noveltyMethodString+ "_query.ppm" );
  1190. timer.stop();
  1191. std::cerr << "AL time for writing queried image: " << timer.getLast() << std::endl;
  1192. timer.start();
  1193. //check which classes will be added using the features from the novel region
  1194. std::set<int> newClassNumbers;
  1195. newClassNumbers.clear(); //just to be sure
  1196. for ( uint i = 0 ; i < newTrainExamples.size() ; i++ )
  1197. {
  1198. if (newClassNumbers.find(newTrainExamples[i].first /* classNumber*/) == newClassNumbers.end() )
  1199. {
  1200. newClassNumbers.insert(newTrainExamples[i].first );
  1201. }
  1202. }
  1203. //accept the new classes as valid information
  1204. for (std::set<int>::const_iterator clNoIt = newClassNumbers.begin(); clNoIt != newClassNumbers.end(); clNoIt++)
  1205. {
  1206. if ( forbidden_classesTrain.find ( *clNoIt ) != forbidden_classesTrain.end() )
  1207. {
  1208. forbidden_classesTrain.erase(*clNoIt);
  1209. numberOfClasses++;
  1210. }
  1211. if ( classesInUse.find ( *clNoIt ) == classesInUse.end() )
  1212. {
  1213. classesInUse.insert( *clNoIt );
  1214. }
  1215. }
  1216. timer.stop();
  1217. std::cerr << "AL time for accepting possible new classes: " << timer.getLast() << std::endl;
  1218. timer.start();
  1219. //then add the new features to the classifier used
  1220. if ( classifier != NULL )
  1221. {
  1222. if (this->classifierString.compare("ClassifierGPHIK") == 0)
  1223. {
  1224. classifier->addMultipleExamples ( this->newTrainExamples );
  1225. }
  1226. }
  1227. else //vclassifier
  1228. {
  1229. //TODO
  1230. }
  1231. timer.stop();
  1232. std::cerr << "AL time for actually updating the classifier: " << timer.getLast() << std::endl;
  1233. std::cerr << "the current region to query is: " << currentRegionToQuery.first << " -- " << currentRegionToQuery.second << std::endl;
  1234. //did we already query a region of this image?
  1235. if ( queriedRegions.find( currentRegionToQuery.first ) != queriedRegions.end() )
  1236. {
  1237. queriedRegions[ currentRegionToQuery.first ].insert(currentRegionToQuery.second);
  1238. }
  1239. else
  1240. {
  1241. std::set<int> tmpSet; tmpSet.insert(currentRegionToQuery.second);
  1242. queriedRegions.insert(std::pair<std::string,std::set<int> > (currentRegionToQuery.first, tmpSet ) );
  1243. }
  1244. std::cerr << "Write already queried regions: " << std::endl;
  1245. for (std::map<std::string,std::set<int> >::const_iterator it = queriedRegions.begin(); it != queriedRegions.end(); it++)
  1246. {
  1247. std::cerr << "image: " << it->first << " -- ";
  1248. for (std::set<int>::const_iterator itReg = it->second.begin(); itReg != it->second.end(); itReg++)
  1249. {
  1250. std::cerr << *itReg << " ";
  1251. }
  1252. std::cerr << std::endl;
  1253. }
  1254. //clear the latest results, since one iteration is over
  1255. globalMaxUncert = -numeric_limits<double>::max();
  1256. if (!mostNoveltyWithMaxScores)
  1257. globalMaxUncert = numeric_limits<double>::max();
  1258. }
  1259. const Examples * SemSegNoveltyBinary::getNovelExamples() const
  1260. {
  1261. return &(this->newTrainExamples);
  1262. }
  1263. double SemSegNoveltyBinary::getAUCPerformance() const
  1264. {
  1265. std::cerr << "evaluate AUC performance" << std::endl;
  1266. int noGTPositives ( 0 );
  1267. int noGTNegatives ( 0 );
  1268. for (std::vector<OBJREC::ClassificationResult>::const_iterator it = resultsOfSingleRun.begin(); it != resultsOfSingleRun.end(); it++)
  1269. {
  1270. if (it->classno_groundtruth == 1)
  1271. {
  1272. noGTPositives++;
  1273. }
  1274. else
  1275. noGTNegatives++;
  1276. }
  1277. std::cerr << "GT positives: " << noGTPositives << " -- GT negatives: " << noGTNegatives << std::endl;
  1278. std::cerr << "ARR: " << resultsOfSingleRun.getAverageRecognitionRate() << std::endl;
  1279. return resultsOfSingleRun.getBinaryClassPerformance( ClassificationResults::PERF_AUC );
  1280. }