1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297 |
- #include "SemSegContextTree.h"
- #include "vislearning/baselib/Globals.h"
- #include "vislearning/baselib/ProgressBar.h"
- #include "core/basics/StringTools.h"
- #include "vislearning/cbaselib/CachedExample.h"
- #include "vislearning/cbaselib/PascalResults.h"
- #include "objrec/segmentation/RSMeanShift.h"
- #include "objrec/segmentation/RSGraphBased.h"
- #include "core/basics/numerictools.h"
- #include "core/basics/Timer.h"
- #include <omp.h>
- #include <iostream>
- #define BOUND(x,min,max) (((x)<(min))?(min):((x)>(max)?(max):(x)))
- #undef LOCALFEATS
- //#define LOCALFEATS
- using namespace OBJREC;
- using namespace std;
- using namespace NICE;
- class MCImageAccess:public ValueAccess
- {
- public:
- virtual double getVal(const Features &feats, const int &x, const int &y, const int &channel)
- {
- return feats.feats->get(x,y,channel);
- }
-
- virtual string writeInfos()
- {
- return "raw";
- }
- };
- class ClassificationResultAcess:public ValueAccess
- {
- public:
- virtual double getVal(const Features &feats, const int &x, const int &y, const int &channel)
- {
- return (*feats.tree)[feats.cfeats->get(x,y,feats.cTree)].dist[channel];
- }
-
- virtual string writeInfos()
- {
- return "context";
- }
- };
- class Minus:public Operation
- {
- public:
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- double v1 = values->getVal(feats, BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),channel1);
- double v2 = values->getVal(feats, BOUND(x+x2,0,xsize-1),BOUND(y+y2,0,ysize-1),channel2);
- return v1-v2;
- }
-
- virtual Operation* clone()
- {
- return new Minus();
- }
-
- virtual string writeInfos()
- {
- return "Minus"+values->writeInfos();
- }
- };
- class MinusAbs:public Operation
- {
- public:
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- double v1 = values->getVal(feats, BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),channel1);
- double v2 = values->getVal(feats, BOUND(x+x2,0,xsize-1),BOUND(y+y2,0,ysize-1),channel2);
- return abs(v1-v2);
- }
-
- virtual Operation* clone()
- {
- return new MinusAbs();
- };
-
- virtual string writeInfos()
- {
- return "MinusAbs"+values->writeInfos();
- }
- };
- class Addition:public Operation
- {
- public:
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- double v1 = values->getVal(feats, BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),channel1);
- double v2 = values->getVal(feats, BOUND(x+x2,0,xsize-1),BOUND(y+y2,0,ysize-1),channel2);
- return v1+v2;
- }
-
- virtual Operation* clone()
- {
- return new Addition();
- }
-
- virtual string writeInfos()
- {
- return "Addition"+values->writeInfos();
- }
- };
- class Only1:public Operation
- {
- public:
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- double v1 = values->getVal(feats, BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),channel1);
- return v1;
- }
-
- virtual Operation* clone()
- {
- return new Only1();
- }
-
- virtual string writeInfos()
- {
- return "Only1"+values->writeInfos();
- }
- };
- // uses mean of classification in window given by (x1,y1) (x2,y2)
- class IntegralOps:public Operation
- {
- public:
- virtual void set(int _x1, int _y1, int _x2, int _y2, int _channel1, int _channel2, ValueAccess *_values)
- {
- x1 = min(_x1,_x2);
- y1 = min(_y1,_y2);
- x2 = max(_x1,_x2);
- y2 = max(_y1,_y2);
- channel1 = _channel1;
- channel2 = _channel2;
- values = _values;
- }
-
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- return computeMean(*feats.integralImg,BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),BOUND(x+x2,0,xsize-1),BOUND(y+y2,0,ysize-1),channel1);
- }
-
- inline double computeMean(const NICE::MultiChannelImageT<double> &intImg, const int &uLx, const int &uLy, const int &lRx, const int &lRy, const int &chan)
- {
- double val1 = intImg.get(uLx,uLy, chan);
- double val2 = intImg.get(lRx,uLy, chan);
- double val3 = intImg.get(uLx,lRy, chan);
- double val4 = intImg.get(lRx,lRy, chan);
- double area = (lRx-uLx)*(lRy-uLy);
- return (val1+val4-val2-val3)/area;
- }
-
- virtual Operation* clone()
- {
- return new IntegralOps();
- }
-
- virtual string writeInfos()
- {
- return "IntegralOps";
- }
- };
- //uses mean of Integral image given by x1, y1 with current pixel as center
- class IntegralCenteredOps:public IntegralOps
- {
- public:
- virtual void set(int _x1, int _y1, int _x2, int _y2, int _channel1, int _channel2)
- {
- x1 = abs(_x1);
- y1 = abs(_y1);
- x2 = abs(_x2);
- y2 = abs(_y2);
- channel1 = _channel1;
- channel2 = _channel2;
- }
-
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- return computeMean(*feats.integralImg,BOUND(x-x1,0,xsize-1),BOUND(y-y1,0,ysize-1),BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),channel1);
- }
-
- virtual Operation* clone()
- {
- return new IntegralCenteredOps();
- }
-
- virtual string writeInfos()
- {
- return "IntegralCenteredOps";
- }
- };
- //uses different of mean of Integral image given by two windows, where (x1,y1) is the width and height of window1 and (x2,y2) of window 2
- class BiIntegralCenteredOps:public IntegralCenteredOps
- {
- public:
- virtual void set(int _x1, int _y1, int _x2, int _y2, int _channel1, int _channel2)
- {
- x1 = min(abs(_x1),abs(_x2));
- y1 = min(abs(_y1),abs(_y2));
- x2 = max(abs(_x1),abs(_x2));
- y2 = max(abs(_y1),abs(_y2));
- channel1 = _channel1;
- channel2 = _channel2;
- }
-
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
- return computeMean(*feats.integralImg,BOUND(x-x1,0,xsize-1),BOUND(y-y1,0,ysize-1),BOUND(x+x1,0,xsize-1),BOUND(y+y1,0,ysize-1),channel1) - computeMean(*feats.integralImg,BOUND(x-x2,0,xsize-1),BOUND(y-y2,0,ysize-1),BOUND(x+x2,0,xsize-1),BOUND(y+y2,0,ysize-1),channel1);
- }
-
- virtual Operation* clone()
- {
- return new BiIntegralCenteredOps();
- }
-
- virtual string writeInfos()
- {
- return "BiIntegralCenteredOps";
- }
- };
- /** horizontal Haar features
- * ++
- * --
- */
- class HaarHorizontal:public IntegralCenteredOps
- {
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
-
- int tlx = BOUND(x-x1,0,xsize-1);
- int tly = BOUND(y-y1,0,ysize-1);
- int lrx = BOUND(x+x1,0,xsize-1);
- int lry = BOUND(y+y1,0,ysize-1);
-
- return computeMean(*feats.integralImg,tlx,tly,lrx, y,channel1)-computeMean(*feats.integralImg,tlx,y,lrx, lry,channel1);
- }
-
- virtual string writeInfos()
- {
- return "HaarHorizontal";
- }
- };
- /** vertical Haar features
- * +-
- * +-
- */
- class HaarVertical:public IntegralCenteredOps
- {
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
-
- int tlx = BOUND(x-x1,0,xsize-1);
- int tly = BOUND(y-y1,0,ysize-1);
- int lrx = BOUND(x+x1,0,xsize-1);
- int lry = BOUND(y+y1,0,ysize-1);
-
- return computeMean(*feats.integralImg,tlx,tly,x, lry,channel1)-computeMean(*feats.integralImg,x,tly,lrx, lry,channel1);
- }
-
- virtual string writeInfos()
- {
- return "HaarVertical";
- }
- };
- /** vertical Haar features
- * +-
- * -+
- */
- class HaarDiag:public IntegralCenteredOps
- {
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
-
- int tlx = BOUND(x-x1,0,xsize-1);
- int tly = BOUND(y-y1,0,ysize-1);
- int lrx = BOUND(x+x1,0,xsize-1);
- int lry = BOUND(y+y1,0,ysize-1);
-
- return computeMean(*feats.integralImg,tlx,tly,x, y,channel1)+computeMean(*feats.integralImg,x,y,lrx, lry,channel1) - computeMean(*feats.integralImg,tlx,y,x, lry,channel1)-computeMean(*feats.integralImg,x,tly,lrx, y,channel1);
- }
-
- virtual string writeInfos()
- {
- return "HaarDiag";
- }
- };
- /** horizontal Haar features
- * +++
- * ---
- * +++
- */
- class Haar3Horiz:public BiIntegralCenteredOps
- {
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
-
- int tlx = BOUND(x-x2,0,xsize-1);
- int tly = BOUND(y-y2,0,ysize-1);
- int mtly = BOUND(y-y1,0,ysize-1);
- int mlry = BOUND(y-y1,0,ysize-1);
- int lrx = BOUND(x+x2,0,xsize-1);
- int lry = BOUND(y+y2,0,ysize-1);
-
- return computeMean(*feats.integralImg,tlx,tly,lrx, mtly,channel1) -computeMean(*feats.integralImg,tlx,mtly,lrx, mlry,channel1) + computeMean(*feats.integralImg,tlx,mlry,lrx, lry,channel1);
- }
-
- virtual string writeInfos()
- {
- return "Haar3Horiz";
- }
- };
- /** vertical Haar features
- * +-+
- * +-+
- * +-+
- */
- class Haar3Vert:public BiIntegralCenteredOps
- {
- virtual double getVal(const Features &feats, const int &x, const int &y)
- {
- int xsize, ysize;
- getXY(feats, xsize, ysize);
-
- int tlx = BOUND(x-x2,0,xsize-1);
- int tly = BOUND(y-y2,0,ysize-1);
- int mtlx = BOUND(x-x1,0,xsize-1);
- int mlrx = BOUND(x-x1,0,xsize-1);
- int lrx = BOUND(x+x2,0,xsize-1);
- int lry = BOUND(y+y2,0,ysize-1);
-
- return computeMean(*feats.integralImg,tlx,tly,mtlx, lry,channel1) -computeMean(*feats.integralImg,mtlx,tly,mlrx, lry,channel1) + computeMean(*feats.integralImg,mlrx,tly,lrx, lry,channel1);
- }
-
- virtual string writeInfos()
- {
- return "Haar3Vert";
- }
- };
- SemSegContextTree::SemSegContextTree( const Config *conf, const MultiDataset *md )
- : SemanticSegmentation ( conf, &(md->getClassNames("train")) )
- {
- this->conf = conf;
- string section = "SSContextTree";
- lfcw = new LFColorWeijer(conf);
-
- grid = conf->gI(section, "grid", 10 );
-
- maxSamples = conf->gI(section, "max_samples", 2000);
-
- minFeats = conf->gI(section, "min_feats", 50 );
-
- maxDepth = conf->gI(section, "max_depth", 10 );
-
- windowSize = conf->gI(section, "window_size", 16);
-
- featsPerSplit = conf->gI(section, "feats_per_split", 200);
-
- useShannonEntropy = conf->gB(section, "use_shannon_entropy", true);
-
- nbTrees = conf->gI(section, "amount_trees", 1);
-
- string segmentationtype = conf->gS(section, "segmentation_type", "meanshift");
-
- useGaussian = conf->gB(section, "use_gaussian", true);
- if(useGaussian)
- throw("there something wrong with using gaussian! first fix it!");
-
- pixelWiseLabeling = false;
-
- if(segmentationtype == "meanshift")
- segmentation = new RSMeanShift(conf);
- else if (segmentationtype == "none")
- {
- segmentation = NULL;
- pixelWiseLabeling = true;
- }
- else if (segmentationtype == "felzenszwalb")
- segmentation = new RSGraphBased(conf);
- else
- throw("no valid segmenation_type\n please choose between none, meanshift and felzenszwalb\n");
-
-
- ftypes = conf->gI(section, "features", 2);;
-
- ops.push_back(new Minus());
- ops.push_back(new MinusAbs());
- ops.push_back(new Addition());
- ops.push_back(new Only1());
-
- cops.push_back(new BiIntegralCenteredOps());
- cops.push_back(new IntegralCenteredOps());
- cops.push_back(new IntegralOps());
- cops.push_back(new HaarHorizontal());
- cops.push_back(new HaarVertical());
- cops.push_back(new HaarDiag());
- cops.push_back(new Haar3Horiz());
- cops.push_back(new Haar3Vert());
-
- calcVal.push_back(new MCImageAccess());
- calcVal.push_back(new ClassificationResultAcess());
-
- classnames = md->getClassNames ( "train" );
-
- ///////////////////////////////////
- // Train Segmentation Context Trees
- ///////////////////////////////////
- train ( md );
- }
- SemSegContextTree::~SemSegContextTree()
- {
- }
- double SemSegContextTree::getBestSplit(std::vector<NICE::MultiChannelImageT<double> > &feats, std::vector<NICE::MultiChannelImageT<int> > ¤tfeats, std::vector<NICE::MultiChannelImageT<double> > &integralImgs, const std::vector<NICE::MatrixT<int> > &labels, int node, Operation *&splitop, double &splitval, const int &tree)
- {
- int imgCount = 0, featdim = 0;
- try
- {
- imgCount = (int)feats.size();
- featdim = feats[0].channels();
- }
- catch(Exception)
- {
- cerr << "no features computed?" << endl;
- }
-
- double bestig = -numeric_limits< double >::max();
- splitop = NULL;
- splitval = -1.0;
-
- set<vector<int> >selFeats;
- map<int,int> e;
- int featcounter = 0;
-
- for(int iCounter = 0; iCounter < imgCount; iCounter++)
- {
- int xsize = (int)currentfeats[iCounter].width();
- int ysize = (int)currentfeats[iCounter].height();
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- if(currentfeats[iCounter].get(x,y,tree) == node)
- {
- featcounter++;
- }
- }
- }
- }
-
- if(featcounter < minFeats)
- {
- cout << "only " << featcounter << " feats in current node -> it's a leaf" << endl;
- return 0.0;
- }
-
- vector<double> fraction(a.size(),0.0);
- for(uint i = 0; i < fraction.size(); i++)
- {
- if ( forbidden_classes.find ( labelmapback[i] ) != forbidden_classes.end() )
- fraction[i] = 0;
- else
- fraction[i] = ((double)maxSamples)/((double)featcounter*a[i]*a.size());
- //cout << "fraction["<<i<<"]: "<< fraction[i] << " a[" << i << "]: " << a[i] << endl;
- }
- //cout << "a.size(): " << a.size() << endl;
- //getchar();
- featcounter = 0;
-
- for(int iCounter = 0; iCounter < imgCount; iCounter++)
- {
- int xsize = (int)currentfeats[iCounter].width();
- int ysize = (int)currentfeats[iCounter].height();
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- if(currentfeats[iCounter].get(x,y,tree) == node)
- {
- int cn = labels[iCounter](x,y);
- double randD = (double)rand()/(double)RAND_MAX;
- if(randD < fraction[labelmap[cn]])
- {
- vector<int> tmp(3,0);
- tmp[0] = iCounter;
- tmp[1] = x;
- tmp[2] = y;
- featcounter++;
- selFeats.insert(tmp);
- e[cn]++;
- }
- }
- }
- }
- }
- //cout << "size: " << selFeats.size() << endl;
- //getchar();
-
- map<int,int>::iterator mapit;
- double globent = 0.0;
- for ( mapit=e.begin() ; mapit != e.end(); mapit++ )
- {
- //cout << "class: " << mapit->first << ": " << mapit->second << endl;
- double p = (double)(*mapit).second/(double)featcounter;
- globent += p*log2(p);
- }
- globent = -globent;
-
- if(globent < 0.5)
- {
- cout << "globent to small: " << globent << endl;
- return 0.0;
- }
-
- int classes = (int)forest[tree][0].dist.size();
- featsel.clear();
- for(int i = 0; i < featsPerSplit; i++)
- {
- int x1, x2, y1, y2;
- int ft = (int)((double)rand()/(double)RAND_MAX*(double)ftypes);
-
- int tmpws = windowSize;
-
- if(integralImgs[0].width() == 0)
- ft = 0;
-
- if(ft > 0)
- {
- tmpws *= 2;
- }
-
- if(useGaussian)
- {
- double sigma = (double)tmpws/2.0;
- x1 = randGaussDouble(sigma)*(double)tmpws;
- x2 = randGaussDouble(sigma)*(double)tmpws;
- y1 = randGaussDouble(sigma)*(double)tmpws;
- y2 = randGaussDouble(sigma)*(double)tmpws;
- }
- else
- {
- x1 = (int)((double)rand()/(double)RAND_MAX*(double)tmpws)-tmpws/2;
- x2 = (int)((double)rand()/(double)RAND_MAX*(double)tmpws)-tmpws/2;
- y1 = (int)((double)rand()/(double)RAND_MAX*(double)tmpws)-tmpws/2;
- y2 = (int)((double)rand()/(double)RAND_MAX*(double)tmpws)-tmpws/2;
- }
-
- if(ft == 0)
- {
- int f1 = (int)((double)rand()/(double)RAND_MAX*(double)featdim);
- int f2 = (int)((double)rand()/(double)RAND_MAX*(double)featdim);
- int o = (int)((double)rand()/(double)RAND_MAX*(double)ops.size());
- Operation *op = ops[o]->clone();
- op->set(x1,y1,x2,y2,f1,f2, calcVal[ft]);
- featsel.push_back(op);
- }
- else if(ft == 1)
- {
- int chans = integralImgs[0].channels;
- int opssize = (int)ops.size();
- int f1 = (int)((double)rand()/(double)RAND_MAX*(double)chans);
- int f2 = (int)((double)rand()/(double)RAND_MAX*(double)chans);
- int o = (int)((double)rand()/(double)RAND_MAX*((double)cops.size())+(double)opssize);
- Operation *op;
- if(o < opssize)
- {
- op = ops[o]->clone();
- op->set(x1,y1,x2,y2,f1,f2, calcVal[ft]);
- }
- else
- {
- o -= opssize;
- op = cops[o]->clone();
- op->set(x1,y1,x2,y2,f1,f2, calcVal[ft]);
- }
- featsel.push_back(op);
- }
- }
-
- #pragma omp parallel for private(mapit)
- for(int f = 0; f < featsPerSplit; f++)
- {
- double l_bestig = -numeric_limits< double >::max();
- double l_splitval = -1.0;
- set<vector<int> >::iterator it;
- vector<double> vals;
-
- for ( it=selFeats.begin() ; it != selFeats.end(); it++ )
- {
- Features feat;
- feat.feats = &feats[(*it)[0]];
- feat.cfeats = ¤tfeats[(*it)[0]];
- feat.cTree = tree;
- feat.tree = &forest[tree];
- feat.integralImg = &integralImgs[(*it)[0]];
- vals.push_back(featsel[f]->getVal(feat, (*it)[1], (*it)[2]));
- }
-
- int counter = 0;
- for ( it=selFeats.begin() ; it != selFeats.end(); it++ , counter++)
- {
- set<vector<int> >::iterator it2;
- double val = vals[counter];
-
- map<int,int> eL, eR;
- int counterL = 0, counterR = 0;
- int counter2 = 0;
- for ( it2=selFeats.begin() ; it2 != selFeats.end(); it2++, counter2++ )
- {
- int cn = labels[(*it2)[0]]((*it2)[1], (*it2)[2]);
- //cout << "vals[counter2] " << vals[counter2] << " val: " << val << endl;
- if(vals[counter2] < val)
- {
- //left entropie:
- eL[cn] = eL[cn]+1;
- counterL++;
- }
- else
- {
- //right entropie:
- eR[cn] = eR[cn]+1;
- counterR++;
- }
- }
-
- double leftent = 0.0;
- for ( mapit=eL.begin() ; mapit != eL.end(); mapit++ )
- {
- double p = (double)(*mapit).second/(double)counterL;
- leftent -= p*log2(p);
- }
-
- double rightent = 0.0;
- for ( mapit=eR.begin() ; mapit != eR.end(); mapit++ )
- {
- double p = (double)(*mapit).second/(double)counterR;
- rightent -= p*log2(p);
- }
- //cout << "rightent: " << rightent << " leftent: " << leftent << endl;
-
- double pl = (double)counterL/(double)(counterL+counterR);
- double ig = globent - (1.0-pl) * rightent - pl*leftent;
- //double ig = globent - rightent - leftent;
-
- if(useShannonEntropy)
- {
- double esplit = - ( pl*log(pl) + (1-pl)*log(1-pl) );
- ig = 2*ig / ( globent + esplit );
- }
-
- if(ig > l_bestig)
- {
- l_bestig = ig;
- l_splitval = val;
- }
- }
-
- #pragma omp critical
- {
- //cout << "globent: " << globent << " bestig " << bestig << " splitfeat: " << splitfeat << " splitval: " << splitval << endl;
- //cout << "globent: " << globent << " l_bestig " << l_bestig << " f: " << p << " l_splitval: " << l_splitval << endl;
- //cout << "p: " << featsubset[f] << endl;
- if(l_bestig > bestig)
- {
- bestig = l_bestig;
- splitop = featsel[f];
- splitval = l_splitval;
- }
- }
- }
- //splitop->writeInfos();
- //cout<< "ig: " << bestig << endl;
-
- /*for(int i = 0; i < featsPerSplit; i++)
- {
- if(featsel[i] != splitop)
- delete featsel[i];
- }*/
- #ifdef debug
- cout << "globent: " << globent << " bestig " << bestig << " splitval: " << splitval << endl;
- #endif
- return bestig;
- }
- inline double SemSegContextTree::getMeanProb(const int &x,const int &y,const int &channel, const MultiChannelImageT<int> ¤tfeats)
- {
- double val = 0.0;
- for(int tree = 0; tree < nbTrees; tree++)
- {
- val += forest[tree][currentfeats.get(x,y,tree)].dist[channel];
- }
-
- return val / (double)nbTrees;
- }
- void SemSegContextTree::computeIntegralImage(const NICE::MultiChannelImageT<int> ¤tfeats, const NICE::MultiChannelImageT<int> &lfeats, NICE::MultiChannelImageT<double> &integralImage)
- {
- int xsize = currentfeats.width();
- int ysize = currentfeats.height();
-
- int channels = (int)forest[0][0].dist.size();
- #pragma omp parallel for
- for(int c = 0; c < channels; c++)
- {
- integralImage.set(0,0,getMeanProb(0,0,c, currentfeats), c);
-
- //first column
- for(int y = 1; y < ysize; y++)
- {
- integralImage.set(0,y,getMeanProb(0,y,c, currentfeats)+integralImage.get(0,y,c), c);
- }
-
- //first row
- for(int x = 1; x < xsize; x++)
- {
- integralImage.set(x,0,getMeanProb(x,0,c, currentfeats)+integralImage.get(x,0,c), c);
- }
-
- //rest
- for(int y = 1; y < ysize; y++)
- {
- for(int x = 1; x < xsize; x++)
- {
- double val = getMeanProb(x,y,c,currentfeats)+integralImage.get(x,y-1,c)+integralImage.get(x-1,y,c)-integralImage.get(x-1,y-1,c);
- integralImage.set(x, y, val, c);
- }
- }
- }
-
- int channels2 = (int)lfeats.size();
- if(lfeats.get(xsize-1,ysize-1,0) == 0)
- {
- #pragma omp parallel for
- for(int c = channels, int c1 = 0; c1 < channels2; c++, c1++)
- {
- integralImage.set(0,0,lfeats.get(0,0,c1), c);
-
- //first column
- for(int y = 1; y < ysize; y++)
- {
- integralImage.set(0,y,lfeats.get(0,y,c1)+integralImage.get(0,y,c), c);
- }
-
- //first row
- for(int x = 1; x < xsize; x++)
- {
- integralImage.set(x,0,lfeats.get(x,0,c1)+integralImage.get(x,0,c), c);
- }
-
- //rest
- for(int y = 1; y < ysize; y++)
- {
- for(int x = 1; x < xsize; x++)
- {
- double val = lfeats.get(x,y,c1)+integralImage.get(x,y-1,c)+integralImage.get(x-1,y,c)-integralImage.get(x-1,y-1,c);
- integralImage.set(x, y, val, c);
- }
- }
- }
- }
- }
- void SemSegContextTree::train ( const MultiDataset *md )
- {
- const LabeledSet train = * ( *md ) ["train"];
- const LabeledSet *trainp = &train;
-
- ProgressBar pb ( "compute feats" );
- pb.show();
-
- //TODO: Speichefresser!, lohnt sich sparse?
- vector<MultiChannelImageT<double> > allfeats;
- vector<MultiChannelImageT<int> > currentfeats;
- vector<MatrixT<int> > labels;
- std::string forbidden_classes_s = conf->gS ( "analysis", "donttrain", "" );
- if ( forbidden_classes_s == "" )
- {
- forbidden_classes_s = conf->gS ( "analysis", "forbidden_classes", "" );
- }
-
- classnames.getSelection ( forbidden_classes_s, forbidden_classes );
-
- int imgcounter = 0;
-
- LOOP_ALL_S ( *trainp )
- {
- EACH_INFO ( classno,info );
- NICE::ColorImage img;
- std::string currentFile = info.img();
- CachedExample *ce = new CachedExample ( currentFile );
- const LocalizationResult *locResult = info.localization();
- if ( locResult->size() <= 0 )
- {
- fprintf ( stderr, "WARNING: NO ground truth polygons found for %s !\n",
- currentFile.c_str() );
- continue;
- }
- fprintf ( stderr, "SemSegCsurka: Collecting pixel examples from localization info: %s\n", currentFile.c_str() );
- int xsize, ysize;
- ce->getImageSize ( xsize, ysize );
-
- MatrixT<int> tmpMat(xsize,ysize);
-
- currentfeats.push_back(MultiChannelImageT<int>(xsize,ysize,nbTrees));
- currentfeats[imgcounter].setAll(0);
- labels.push_back(tmpMat);
- try {
- img = ColorImage(currentFile);
- } catch (Exception) {
- cerr << "SemSeg: error opening image file <" << currentFile << ">" << endl;
- continue;
- }
- Globals::setCurrentImgFN ( currentFile );
- //TODO: resize image?!
- MultiChannelImageT<double> feats;
- allfeats.push_back(feats);
- #ifdef LOCALFEATS
- lfcw->getFeats(img, allfeats[imgcounter]);
- #else
- allfeats[imgcounter].reInit(xsize, ysize, 3, true);
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- for(int r = 0; r < 3; r++)
- {
- allfeats[imgcounter].set(x,y,img.getPixel(x,y,r),r);
- }
- }
- }
- #endif
-
- // getting groundtruth
- NICE::Image pixelLabels (xsize, ysize);
- pixelLabels.set(0);
- locResult->calcLabeledImage ( pixelLabels, ( *classNames ).getBackgroundClass() );
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- classno = pixelLabels.getPixel(x, y);
- labels[imgcounter](x,y) = classno;
- if ( forbidden_classes.find ( classno ) != forbidden_classes.end() )
- continue;
- labelcounter[classno]++;
-
- }
- }
-
- imgcounter++;
- pb.update ( trainp->count());
- delete ce;
- }
- pb.hide();
-
- map<int,int>::iterator mapit;
- int classes = 0;
- for(mapit = labelcounter.begin(); mapit != labelcounter.end(); mapit++)
- {
- labelmap[mapit->first] = classes;
-
- labelmapback[classes] = mapit->first;
- classes++;
- }
-
- //balancing
- int featcounter = 0;
- a = vector<double>(classes,0.0);
- for(int iCounter = 0; iCounter < imgcounter; iCounter++)
- {
- int xsize = (int)currentfeats[iCounter].width();
- int ysize = (int)currentfeats[iCounter].height();
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- featcounter++;
- int cn = labels[iCounter](x,y);
- a[labelmap[cn]] ++;
- }
- }
- }
-
- for(int i = 0; i < (int)a.size(); i++)
- {
- a[i] /= (double)featcounter;
- }
-
- #ifdef DEBUG
- for(int i = 0; i < (int)a.size(); i++)
- {
- cout << "a["<<i<<"]: " << a[i] << endl;
- }
- cout << "a.size: " << a.size() << endl;
- #endif
-
- int depth = 0;
- for(int t = 0; t < nbTrees; t++)
- {
- vector<TreeNode> tree;
- tree.push_back(TreeNode());
- tree[0].dist = vector<double>(classes,0.0);
- tree[0].depth = depth;
- forest.push_back(tree);
- }
-
- vector<int> startnode(nbTrees,0);
- bool allleaf = false;
- //int baseFeatSize = allfeats[0].size();
-
- vector<MultiChannelImageT<double> > integralImgs(imgcounter,MultiChannelImageT<double>());
-
- while(!allleaf && depth < maxDepth)
- {
- allleaf = true;
- vector<MultiChannelImageT<int> > lastfeats = currentfeats;
-
- #if 1
- Timer timer;
- timer.start();
- #endif
-
- for(int tree = 0; tree < nbTrees; tree++)
- {
- int t = (int) forest[tree].size();
- int s = startnode[tree];
- startnode[tree] = t;
- //TODO vielleicht parallel wenn nächste schleife trotzdem noch parallelsiert würde, die hat mehr gewicht
- //#pragma omp parallel for
- for(int i = s; i < t; i++)
- {
- if(!forest[tree][i].isleaf && forest[tree][i].left < 0)
- {
- Operation *splitfeat = NULL;
- double splitval;
- double bestig = getBestSplit(allfeats, lastfeats, integralImgs, labels, i, splitfeat, splitval, tree);
-
- forest[tree][i].feat = splitfeat;
- forest[tree][i].decision = splitval;
-
- if(splitfeat != NULL)
- {
- allleaf = false;
- int left = forest[tree].size();
- forest[tree].push_back(TreeNode());
- forest[tree].push_back(TreeNode());
- int right = left+1;
- forest[tree][i].left = left;
- forest[tree][i].right = right;
- forest[tree][left].dist = vector<double>(classes, 0.0);
- forest[tree][right].dist = vector<double>(classes, 0.0);
- forest[tree][left].depth = depth+1;
- forest[tree][right].depth = depth+1;
-
- #pragma omp parallel for
- for(int iCounter = 0; iCounter < imgcounter; iCounter++)
- {
- int xsize = currentfeats[iCounter].width();
- int ysize = currentfeats[iCounter].height();
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- if(currentfeats[iCounter].get(x, y, tree) == i)
- {
- Features feat;
- feat.feats = &allfeats[iCounter];
- feat.cfeats = &lastfeats[iCounter];
- feat.cTree = tree;
- feat.tree = &forest[tree];
- feat.integralImg = &integralImgs[iCounter];
- double val = splitfeat->getVal(feat,x,y);
- if(val < splitval)
- {
- currentfeats[iCounter].set(x,y,left,tree);
- forest[tree][left].dist[labelmap[labels[iCounter](x,y)]]++;
- }
- else
- {
- currentfeats[iCounter].set(x,y,right,tree);
- forest[tree][right].dist[labelmap[labels[iCounter](x,y)]]++;
- }
- }
- }
- }
- }
-
- double lcounter = 0.0, rcounter = 0.0;
- for(uint d = 0; d < forest[tree][left].dist.size(); d++)
- {
- if ( forbidden_classes.find ( labelmapback[d] ) != forbidden_classes.end() )
- {
- forest[tree][left].dist[d] = 0;
- forest[tree][right].dist[d] = 0;
- }
- else
- {
- forest[tree][left].dist[d]/=a[d];
- lcounter +=forest[tree][left].dist[d];
- forest[tree][right].dist[d]/=a[d];
- rcounter +=forest[tree][right].dist[d];
- }
- }
-
- assert(lcounter > 0 && rcounter > 0);
- for(uint d = 0; d < forest[tree][left].dist.size(); d++)
- {
- forest[tree][left].dist[d]/=lcounter;
- forest[tree][right].dist[d]/=rcounter;
- }
- }
- else
- {
- forest[tree][i].isleaf = true;
- }
- }
- }
- }
- //TODO: features neu berechnen!
-
- //compute integral image
- int channels = classes+allfeats.size();
-
- if(integralImgs[0].width() == 0)
- {
- for(int i = 0; i < imgcounter; i++)
- {
- int xsize = allfeats[i].width();
- int ysize = allfeats[i].height();
- integralImgs[i].reInit(xsize, ysize, channels);
- }
- }
-
- for(int i = 0; i < imgcounter; i++)
- {
- computeIntegralImage(currentfeats[i],allfeats[i], integralImgs[i]);
- }
- #if 1
- timer.stop();
- cout << "time for depth " << depth << ": " << timer.getLast() << endl;
- #endif
- depth++;
- #ifdef DEBUG
- cout << "depth: " << depth << endl;
- #endif
- }
-
-
- #ifdef DEBUG
- for(int tree = 0; tree < nbTrees; tree++)
- {
- int t = (int) forest[tree].size();
- for(int i = 0; i < t; i++)
- {
- printf("tree[%i]: left: %i, right: %i", i, forest[tree][i].left, forest[tree][i].right);
- if(!forest[tree][i].isleaf && forest[tree][i].left != -1)
- cout << ", feat: " << forest[tree][i].feat->writeInfos() << " ";
- for(int d = 0; d < (int)forest[tree][i].dist.size(); d++)
- {
- cout << " " << forest[tree][i].dist[d];
- }
- cout << endl;
- }
- }
- #endif
- }
- void SemSegContextTree::semanticseg ( CachedExample *ce, NICE::Image & segresult,NICE::MultiChannelImageT<double> & probabilities )
- {
- int xsize;
- int ysize;
- ce->getImageSize ( xsize, ysize );
-
- int numClasses = classNames->numClasses();
-
- fprintf (stderr, "ContextTree classification !\n");
- probabilities.reInit ( xsize, ysize, numClasses, true );
- probabilities.setAll ( 0 );
- NICE::ColorImage img;
- std::string currentFile = Globals::getCurrentImgFN();
-
- try {
- img = ColorImage(currentFile);
- } catch (Exception) {
- cerr << "SemSeg: error opening image file <" << currentFile << ">" << endl;
- return;
- }
-
- //TODO: resize image?!
-
- MultiChannelImageT<double> feats;
- #ifdef LOCALFEATS
- lfcw->getFeats(img, feats);
- #else
- feats.reInit (xsize, ysize, 3, true);
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- for(int r = 0; r < 3; r++)
- {
- feats.set(x,y,img.getPixel(x,y,r),r);
- }
- }
- }
- #endif
-
- bool allleaf = false;
-
- MultiChannelImageT<double> integralImg;
-
- MultiChannelImageT<int> currentfeats(xsize, ysize, nbTrees);
- currentfeats.setAll(0);
- int depth = 0;
- while(!allleaf)
- {
- allleaf = true;
- //TODO vielleicht parallel wenn nächste schleife auch noch parallelsiert würde, die hat mehr gewicht
- //#pragma omp parallel for
- MultiChannelImageT<int> lastfeats = currentfeats;
- for(int tree = 0; tree < nbTrees; tree++)
- {
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++)
- {
- int t = currentfeats.get(x,y,tree);
- if(forest[tree][t].left > 0)
- {
- allleaf = false;
- Features feat;
- feat.feats = &feats;
- feat.cfeats = &lastfeats;
- feat.cTree = tree;
- feat.tree = &forest[tree];
- feat.integralImg = &integralImg;
-
- double val = forest[tree][t].feat->getVal(feat,x,y);
-
- if(val < forest[tree][t].decision)
- {
- currentfeats.set(x, y, forest[tree][t].left, tree);
- }
- else
- {
- currentfeats.set(x, y, forest[tree][t].right, tree);
- }
- }
- }
- }
-
- //compute integral image
- int channels = (int)labelmap.size()+feats.size();
-
- if(integralImg.width() == 0)
- {
- int xsize = feats.width();
- int ysize = feats.height();
-
- integralImg.reInit(xsize, ysize, channels);
- }
- }
- computeIntegralImage(currentfeats,feats, integralImg);
-
- depth++;
- }
-
- if(pixelWiseLabeling)
- {
- //finales labeln:
- long int offset = 0;
- for(int x = 0; x < xsize; x++)
- {
- for(int y = 0; y < ysize; y++,offset++)
- {
- double maxvalue = - numeric_limits<double>::max(); //TODO: das muss nur pro knoten gemacht werden, nicht pro pixel
- int maxindex = 0;
- uint s = forest[0][0].dist.size();
- for(uint i = 0; i < s; i++)
- {
- probabilities.data[labelmapback[i]][offset] = getMeanProb(x,y,i,currentfeats);
- if(probabilities.data[labelmapback[i]][offset] > maxvalue)
- {
- maxvalue = probabilities.data[labelmapback[i]][offset];
- maxindex = labelmapback[i];
- }
- segresult.setPixel(x,y,maxindex);
- }
- }
- }
- }
- else
- {
- //final labeling using segmentation
- //TODO: segmentation
- Matrix regions;
- int regionNumber = segmentation->segRegions(img,regions);
- cout << "regions: " << regionNumber << endl;
- int dSize = (int)labelmap.size();
- vector<vector<double> > regionProbs(regionNumber, vector<double>(dSize,0.0));
- vector<int> bestlabels(regionNumber, 0);
-
- for(int y = 0; y < img.height(); y++)
- {
- for(int x = 0; x < img.width(); x++)
- {
- int cregion = regions(x,y);
- for(int d = 0; d < dSize; d++)
- {
- regionProbs[cregion][d]+=getMeanProb(x,y,d,currentfeats);
- }
- }
- }
-
- for(int r = 0; r < regionNumber; r++)
- {
- double maxval = regionProbs[r][0];
- for(int d = 1; d < dSize; d++)
- {
- if(maxval < regionProbs[r][d])
- {
- maxval = regionProbs[r][d];
- bestlabels[r] = d;
- }
- }
- bestlabels[r] = labelmapback[bestlabels[r]];
- }
-
- for(int y = 0; y < img.height(); y++)
- {
- for(int x = 0; x < img.width(); x++)
- {
- segresult.setPixel(x,y,bestlabels[regions(x,y)]);
- }
- }
- }
- }
|