/** * @file SemSegContextTree.h * @brief Context Trees -> Combination of decision tree and context information * @author Björn Fröhlich * @date 29.11.2011 */ #ifndef SemSegContextTreeINCLUDE #define SemSegContextTreeINCLUDE #include "SemanticSegmentation.h" #include "core/vector/VVector.h" #include "core/image/MultiChannelImage3DT.h" #include "vislearning/features/localfeatures/LFColorWeijer.h" #include "segmentation/RegionSegmentationMethod.h" #include "semseg3d/semseg/operations/Operations.h" #include "gp-hik-core/GPHIKClassifier.h" namespace OBJREC { /** Localization system */ class SemSegContextTree : public SemanticSegmentation, public NICE::Persistent { private: /** Segmentation Method */ RegionSegmentationMethod *segmentation; /** tree -> saved as vector of nodes */ std::vector > forest; /** local features */ LFColorWeijer *lfcw; /** number of featuretype -> currently: local and context features = 2 */ int ftypes; /** maximum samples for tree */ int maxSamples; /** size for neighbourhood */ int windowSize; /** multiplier for window size if context feature */ int contextMultiplier; /** how many feats should be considered for a split */ int featsPerSplit; /** count samples per label */ std::map labelcounter; /** map of labels */ std::map labelmap; /** map of labels inverse*/ std::map labelmapback; /** scalefactor for balancing for each class */ std::vector a; /** the minimum number of features allowed in a leaf */ int minFeats; /** maximal depth of tree */ int maxDepth; /** current depth for training */ int depth; /** how many splittests */ int randomTests; /** prototype operations for pairwise features */ std::vector > ops; /** use alternative calculation for information gain */ bool useShannonEntropy; /** Classnames */ ClassNames classnames; /** train selection */ std::set forbidden_classes; /** Configfile */ const NICE::Config *conf; /** use pixelwise labeling or regionlabeling with additional segmenation */ bool pixelWiseLabeling; /** Number of trees used for the forest */ int nbTrees; /** use Gradient image or not */ bool useGradient; /** use Color features from van de Weijer or not */ bool useWeijer; /** use additional input Layer or not */ bool useAdditionalLayer; /** use Regions as extra feature channel or not */ bool useRegionFeature; /** use external image categorization to avoid some classes */ bool useCategorization; /** categorization information for external categorization */ std::string cndir; /** how to handle each channel * 0: simple grayvalue features * 1: which pixel belongs to which region * 2: grayvalue integral images * 3: context integral images * 4: simple context features */ std::vector channelType; /** list of channels per feature type */ std::vector > channelsPerType; /** whether we should use the geometric features of Hoeim (only offline computation with MATLAB supported) */ bool useHoiemFeatures; /** save / load trained icf classifier */ bool saveLoadData; /** file location of trained icf classifier */ std::string fileLocation; /** first iteration or not */ bool firstiteration; /** which IntegralImage channel belongs to which raw value channel */ std::vector > integralMap; /** amount of grayvalue Channels */ int rawChannels; /** classifier for categorization */ NICE::GPHIKClassifier *fasthik; /** unique numbers for nodes */ int uniquenumber; /** * the actual training method * @param trainp pointer to training data */ void train ( const LabeledSet * trainp ); public: /** simple constructor */ SemSegContextTree ( const NICE::Config *conf, const MultiDataset *md ); /** simple destructor */ virtual ~SemSegContextTree(); /** * classify each pixel of a single 3d image * @param imgData input data * @param segresult segmentation results * @param probabilities probabilities for each pixel */ void classify ( NICE::MultiChannelImage3DT &imgData, NICE::MultiChannelImageT & segresult, NICE::MultiChannelImage3DT & probabilities, const std::vector & filelist ); /** * the training method with checking for already existing trained classifier from file * @param md training data */ void train ( const MultiDataset *md ); /** * @brief computes integral image of given feats * * @param nodeIndices matrix with current node for each feature * @param integralImage output image (must be initilized) * @param firstChannel index of the first channel * @return void **/ void computeIntegralImage ( const NICE::MultiChannelImage3DT &nodeIndices, NICE::MultiChannelImage3DT &integralImage, int firstChannel ); /** * @brief reads image and does some simple convertions * * @param feats output image * @param currentFile image filename * @return void **/ void addFeatureMaps ( NICE::MultiChannelImage3DT &imgData, const std::vector &filelist, int &amountRegions ); /** * compute best split for current settings * @param feats features * @param nodeIndices matrix with current node for each feature * @param labels labels for each feature * @param node current node * @param splitfeat output selected feature dimension * @param splitval output threshold for selected feature * @return double best information gain value */ double getBestSplit ( std::vector > &feats, std::vector > &nodeIndices, const std::vector > &labels, int node, Operation *&splitop, double &splitval, const int &tree, std::vector > > ®ionProbs ); /** * @brief computes the mean probability for a given class over all trees * @param x x position * @param y y position * @param z z position * @param channel current class * @param nodeIndices matrix with current node for each feature * @return double mean value **/ inline double getMeanProb ( const int &x, const int &y, const int &z, const int &channel, const NICE::MultiChannelImage3DT &nodeIndices ); /** * @brief load all data to is stream * * @param is input stream * @param format has no influence * @return void **/ virtual void restore ( std::istream & is, int format = 0 ); /** * @brief save all data to is stream * * @param os output stream * @param format has no influence * @return void **/ virtual void store ( std::ostream & os, int format = 0 ) const; /** * @brief clean up * * @return void **/ virtual void clear () {} }; } // namespace #endif