Browse Source

Merge branch 'master' of dbv.inf-cv.uni-jena.de:nice/nice-vislearning

Erik Rodner 9 years ago
parent
commit
6825e7a112
100 changed files with 5937 additions and 417 deletions
  1. 3 14
      .gitignore
  2. 2 0
      CMakeLists.txt
  3. 1 1
      baselib/ProgressBarQt.cpp
  4. 1 1
      baselib/cc.cpp
  5. 4 0
      baselib/progs/scaleKernelMatrix.cpp
  6. 5 0
      baselib/progs/testProgressBar.cpp
  7. 17 1
      cbaselib/ClassNames.cpp
  8. 9 0
      cbaselib/ClassNames.h
  9. 23 7
      cbaselib/ClassificationResult.cpp
  10. 6 0
      cbaselib/ClassificationResult.h
  11. 53 1
      cbaselib/Example.cpp
  12. 22 0
      cbaselib/Example.h
  13. 1 1
      cbaselib/FeaturePool.cpp
  14. 24 15
      cbaselib/LabeledFileList.cpp
  15. 121 14
      cbaselib/LocalizationResult.cpp
  16. 3 1
      cbaselib/LocalizationResult.h
  17. 3 3
      cbaselib/MultiDataset.cpp
  18. 117 7
      cbaselib/Polygon.cpp
  19. 15 9
      cbaselib/Polygon.h
  20. 4 0
      cbaselib/progs/calcCurves.cpp
  21. 4 0
      cbaselib/progs/createNormTrainingSet.cpp
  22. 4 0
      cbaselib/progs/splitLabeledSetVector.cpp
  23. 4 0
      cbaselib/progs/statisticsTrainingSet.cpp
  24. 5 0
      cbaselib/progs/testCachedExample.cpp
  25. 4 0
      cbaselib/progs/testLabeledSet.cpp
  26. 1 1
      classifier/GenericFPClassifierSelection.h
  27. 10 6
      classifier/classifierbase/VecClassifier.h
  28. 1 1
      classifier/classifiercombination/VCPreRandomForest.cpp
  29. 1 1
      classifier/classifierinterfaces/VCFeaturePool.cpp
  30. 111 52
      classifier/fpclassifier/gphik/FPCGPHIK.cpp
  31. 34 4
      classifier/fpclassifier/gphik/FPCGPHIK.h
  32. 8 0
      classifier/fpclassifier/gphik/Makefile
  33. 103 0
      classifier/fpclassifier/gphik/Makefile.inc
  34. 0 0
      classifier/fpclassifier/gphik/tests/Makefile.inc
  35. 1 1
      classifier/fpclassifier/gphik/tests/TestFPCGPHIK.cpp
  36. 0 0
      classifier/fpclassifier/gphik/tests/TestFPCGPHIK.h
  37. 0 0
      classifier/fpclassifier/gphik/tests/sparse20x30matrixM.mat
  38. 0 0
      classifier/fpclassifier/gphik/tests/sparse3x3matrixA.mat
  39. 0 0
      classifier/fpclassifier/gphik/tests/toyExample1.data
  40. 0 0
      classifier/fpclassifier/gphik/tests/toyExample2.data
  41. 0 0
      classifier/fpclassifier/gphik/tests/toyExampleLargeLargeScale.data
  42. 0 0
      classifier/fpclassifier/gphik/tests/toyExampleLargeScale.data
  43. 1 1
      classifier/fpclassifier/logisticregression/libdepend.inc
  44. 574 0
      classifier/fpclassifier/randomforest/DTBObliqueLS.cpp
  45. 209 0
      classifier/fpclassifier/randomforest/DTBObliqueLS.h
  46. 1 1
      classifier/fpclassifier/randomforest/DTBRandom.cpp
  47. 27 17
      classifier/fpclassifier/randomforest/FPCRandomForests.cpp
  48. 3 3
      classifier/fpclassifier/randomforest/FPCRandomForests.h
  49. 1 1
      classifier/fpclassifier/randomforest/libdepend.inc
  50. 1 1
      classifier/genericClassifierSelection.h
  51. 7 7
      classifier/kernelclassifier/LaplaceApproximation.cpp
  52. 2 2
      classifier/kernelclassifier/libdepend.inc
  53. 11 5
      classifier/kernelclassifier/tests/TestLaplace.cpp
  54. 2 2
      classifier/libdepend.inc
  55. 5 0
      classifier/progs/toyExampleUnsupervisedGP.cpp
  56. 2 2
      classifier/vclassifier/libdepend.inc
  57. 5 1
      featureLearning/progs/testFeatureLearning.cpp
  58. 304 0
      features/fpfeatures/ConvolutionFeature.cpp
  59. 159 0
      features/fpfeatures/ConvolutionFeature.h
  60. 0 0
      features/fpfeatures/VectorFeature.cpp
  61. 0 0
      features/fpfeatures/VectorFeature.h
  62. 6 1
      features/fpfeatures/createFeatures.cpp
  63. 99 0
      features/fpfeatures/tests/TestVectorFeature.cpp
  64. 26 0
      features/fpfeatures/tests/TestVectorFeature.h
  65. 42 0
      features/fpfeatures/tests/toyExample1.data
  66. 20 1
      features/localfeatures/LocalFeatureColorWeijer.cpp
  67. 9 0
      features/localfeatures/LocalFeatureColorWeijer.h
  68. 2 2
      features/localfeatures/libdepend.inc
  69. 5 1
      features/localfeatures/progs/computeLocalFeatures.cpp
  70. 2 2
      features/localfeatures/progs/sift-driver.cpp
  71. 3 3
      features/regionfeatures/libdepend.inc
  72. 68 6
      features/simplefeatures/Codebook.cpp
  73. 4 2
      features/simplefeatures/Codebook.h
  74. 582 0
      features/simplefeatures/CodebookRandomForest.cpp
  75. 139 0
      features/simplefeatures/CodebookRandomForest.h
  76. 2 1
      features/simplefeatures/libdepend.inc
  77. 444 0
      features/simplefeatures/matlab/CodebookRandomForestMex.cpp
  78. 9 0
      features/simplefeatures/matlab/Makefile
  79. 21 0
      features/simplefeatures/matlab/config.conf
  80. 2 0
      features/simplefeatures/matlab/libdepend.inc
  81. 102 0
      features/simplefeatures/matlab/testHelperDataConversionMex.cpp
  82. 45 0
      features/simplefeatures/matlab/unittestCodebookRandomForestMex.m
  83. 19 0
      features/simplefeatures/matlab/unittestHelperDataConversionMex.m
  84. 2 0
      features/simplefeatures/progs/libdepend.inc
  85. 407 0
      features/simplefeatures/progs/progCodebookRandomForest.cpp
  86. 89 0
      features/simplefeatures/tests/Makefile.inc
  87. 173 0
      features/simplefeatures/tests/TestCodebookRandomForest.cpp
  88. 26 0
      features/simplefeatures/tests/TestCodebookRandomForest.h
  89. 21 0
      features/simplefeatures/tests/config.conf
  90. 12 0
      features/simplefeatures/tests/libdepend.inc
  91. 1502 0
      features/simplefeatures/tests/toyExampleLargeLargeScale.data
  92. 3 3
      math/cluster/libdepend.inc
  93. 5 0
      math/cluster/progs/testKMeans.cpp
  94. 0 84
      math/distances/Kernel.cpp
  95. 0 43
      math/distances/Kernel.h
  96. 0 37
      math/distances/KernelExp.cpp
  97. 0 45
      math/distances/KernelExp.h
  98. 1 1
      math/distances/KernelStd.h
  99. 1 1
      math/ftransform/PCA.h
  100. 5 0
      math/pdf/PDFGaussian.h

+ 3 - 14
.gitignore

@@ -1,15 +1,4 @@
 # Lines starting with '#' are considered comments.
-# Ignore any file named paper_occ.pdf
-#paper_occ.pdf
-# Ignore (generated) html files,
-#*.html
-#Ignore tex-specific files
-#
-#Think about whether or not to include the resulting moc files
-#*.moc
-# Definitely do not look for temporary files
-*~
-# except foo.html which is maintained by hand.
-#!foo.html
-# Ignore objects and archives.
-#*.[oa]
+
+# ignore unit test result xml files
+*Tests_testresults.xml

+ 2 - 0
CMakeLists.txt

@@ -18,6 +18,8 @@ nice_build_library()
 
 nice_add_progs()
 
+nice_add_mexes()
+
 nice_add_unittests()
 
 #####

+ 1 - 1
baselib/ProgressBarQt.cpp

@@ -45,7 +45,7 @@ ProgressBarQt::ProgressBarQt ( const std::string & _name, bool _useGraphics )
 
     progressdialog = new QProgressDialog ( "Process at work ...", "Cancel",
                                            0, 100 );
-    layout = new QGridLayout ( dialogwindow, 1, 1 );
+    layout = new QGridLayout ( dialogwindow );
     layout->addWidget ( progressdialog, 0, 0 );
     dialogwindow->setLayout ( layout );
   }

+ 1 - 1
baselib/cc.cpp

@@ -270,7 +270,7 @@ double ColorConversion::ccmax(double val1, double val2, double val3)
 
      if      ( r == var_Max ) *h = del_B - del_G;
      else if ( g == var_Max ) *h = ( 1./3 ) + del_R - del_B;
-     else if ( g == var_Max ) *h = ( 2./3 ) + del_G - del_R;
+     else if ( b == var_Max ) *h = ( 2./3 ) + del_G - del_R;
 
      if ( *h < 0 ) *h += 1;
      if ( *h > 1 ) *h -= 1;

+ 4 - 0
baselib/progs/scaleKernelMatrix.cpp

@@ -19,7 +19,11 @@ using namespace NICE;
 */
 int main (int argc, char **argv)
 {   
+#ifndef __clang__
+#ifndef __llvm__
     std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
 
     Config conf ( argc, argv );
     

+ 5 - 0
baselib/progs/testProgressBar.cpp

@@ -34,7 +34,12 @@ using namespace std;
 
 int main(int argc, char **argv)
 {
+#ifndef __clang__
+#ifndef __llvm__
 	std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
+
 	Config mainconf(argc, argv);
 
 	OBJREC::ProgressBar P;

+ 17 - 1
cbaselib/ClassNames.cpp

@@ -395,7 +395,12 @@ void ClassNames::getClassnoFromColor ( int & classno, int r, int g, int b ) cons
   boost::unordered_map<long, int>::const_iterator i = tbl_color_classno.find ( color );
 #else
 //  __gnu_cxx::hash_map<long, int>::const_iterator i = tbl_color_classno.find ( color );
+#ifdef __clang__
+  std::unordered_map<long, int>::const_iterator i = tbl_color_classno.find(color);
+#else
   std::tr1::unordered_map<long, int>::const_iterator i = tbl_color_classno.find ( color );  
+#endif
+
 #endif
 
   if ( i == tbl_color_classno.end() )
@@ -406,7 +411,7 @@ void ClassNames::getClassnoFromColor ( int & classno, int r, int g, int b ) cons
   }
 }
 
-void ClassNames::labelToRGB ( const NICE::Image & img, NICE::ColorImage & rgb ) const
+void ClassNames::labelToRGB ( const NICE::ImageT<int> & img, NICE::ColorImage & rgb ) const
 {
   int red, green, blue;
 
@@ -424,6 +429,17 @@ void ClassNames::labelToRGB ( const NICE::Image & img, NICE::ColorImage & rgb )
 
 }
 
+void ClassNames::labelToRGB ( const NICE::Image & img, NICE::ColorImage & rgb ) const
+{
+    NICE::ImageT<int> imgInt ( img.width(), img.height() );
+
+    for ( int y = 0 ; y < img.height(); y++ )
+      for ( int x = 0 ; x < img.width(); x++ )
+          imgInt.setPixelQuick( x, y, img.getPixelQuick(x,y) );
+
+    labelToRGB ( imgInt, rgb );
+}
+
 int ClassNames::getBackgroundClass () const
 {
   if ( existsClassCode ( "various" ) )

+ 9 - 0
cbaselib/ClassNames.h

@@ -23,8 +23,12 @@
 #endif
 #else
 //#include <ext/hash_map>
+#ifdef __clang__
+#include <unordered_map>
+#else
 #include <tr1/unordered_map>
 #endif
+#endif
 
 #include "core/basics/Config.h"
 #include "core/basics/Persistent.h"
@@ -46,7 +50,11 @@ class ClassNames : public NICE::Persistent
     boost::unordered_map<long, int> tbl_color_classno;
 #else
 //	__gnu_cxx::hash_map<long, int> tbl_color_classno;
+#ifdef __clang__
+    std::unordered_map<long, int> tbl_color_classno;
+#else
     std::tr1::unordered_map<long, int> tbl_color_classno;
+#endif
 #endif
     std::map<int, long> tbl_classno_color;
 
@@ -110,6 +118,7 @@ class ClassNames : public NICE::Persistent
     /** colorize a labeled image using color information given in the class
      *  information file */
     void labelToRGB ( const NICE::Image & img, NICE::ColorImage & rgb ) const;
+    void labelToRGB ( const NICE::ImageT<int> & img, NICE::ColorImage & rgb ) const;
 
     int getBackgroundClass () const;
 

+ 23 - 7
cbaselib/ClassificationResult.cpp

@@ -39,14 +39,30 @@ ClassificationResult::ClassificationResult ( int _classno, double _score, int ma
   uncertainty = 0.0;
 }
 
-ClassificationResult::ClassificationResult ( int _classno, const FullVector & _scores )
+ClassificationResult::ClassificationResult ( int _classno, 
+                                             const FullVector & _scores 
+                                           )
 {
-  rejection_status = REJECTION_NONE;
-  classno = _classno;
-  scores = _scores;
-  classname = "unknown";
-  classno_groundtruth = -1;
-  uncertainty = 0.0;
+  this->rejection_status = REJECTION_NONE;
+  this->classno = _classno;
+  this->scores = _scores;
+  this->classname = "unknown";
+  this->classno_groundtruth = -1;
+  this->uncertainty = 0.0;
+}
+
+ClassificationResult::ClassificationResult ( int _classno, 
+                                             const SparseVector & _scores 
+                                           )
+{
+  throw("No conversion from SparseVector to FullVector available. Aborting!");
+  
+  this->rejection_status = REJECTION_NONE;
+  this->classno = _classno;  
+
+  this->classname = "unknown";
+  this->classno_groundtruth = -1;
+  this->uncertainty = 0.0;
 }
 
 ClassificationResult::~ClassificationResult()

+ 6 - 0
cbaselib/ClassificationResult.h

@@ -71,6 +71,12 @@ class ClassificationResult
     /** result of classification consists of most probable class @p classno
         and a score for each of the other classes */
     ClassificationResult ( int classno, const FullVector & scores );
+    
+    /** result of classification consists of most probable class @p classno
+        and a (sparse) score for each of the other classes */
+    ClassificationResult ( int _classno, 
+                           const NICE::SparseVector & _scores 
+                         );    
 
     /** simple destructor */
     virtual ~ClassificationResult();

+ 53 - 1
cbaselib/Example.cpp

@@ -1,5 +1,9 @@
 #include "Example.h"
 
+#ifdef NICE_USELIB_OPENMP
+#include <omp.h>
+#endif
+
 using namespace OBJREC;
 using namespace std;
 using namespace NICE;
@@ -191,5 +195,53 @@ void Examples::clean ()
 		Example & example = i->second;
 		example.clean();
 	}
-	clear();
+    clear();
+}
+
+bool Examples::wrapExamplesAroundFeatureMatrix(const Matrix &p_MatFeaturesColumWiseSamples, const Vector &p_VecLabels, Examples &p_Examples)
+{
+    size_t t_iNumSamples  = p_MatFeaturesColumWiseSamples.cols();
+    size_t t_iNumFeatures = p_MatFeaturesColumWiseSamples.rows();
+
+    if(p_VecLabels.size() != t_iNumSamples) // for every columnwise sample there need to be a label
+        return false;
+
+    p_Examples.reserve( t_iNumSamples );
+
+    const double *pDataPtr = p_MatFeaturesColumWiseSamples.getDataPointer();
+    for (size_t i = 0; i < t_iNumSamples; i++, pDataPtr+= t_iNumFeatures )
+    {
+        NICE::Vector *t_pVecTrainData = new NICE::Vector( pDataPtr , t_iNumFeatures);
+        double t_fWeight = 1.0f;
+        OBJREC::Example t_Example(t_pVecTrainData, t_fWeight);
+
+        p_Examples.push_back( std::pair<int, OBJREC::Example>( (int)p_VecLabels[i], t_Example ) );
+    }
+
+    return true;
+}
+
+bool Examples::wrapExamplesAroundFeatureMatrix(const Matrix &p_MatFeaturesColumWiseSamples, const VectorT<int> &p_VecLabels, Examples &p_Examples)
+{
+    size_t t_iNumSamples  = p_MatFeaturesColumWiseSamples.cols();
+    size_t t_iNumFeatures = p_MatFeaturesColumWiseSamples.rows();
+
+    if(p_VecLabels.size() != t_iNumSamples) // for every columnwise sample there need to be a label
+        return false;
+    p_Examples.resize( t_iNumSamples );
+    const double *pDataPtr = p_MatFeaturesColumWiseSamples.getDataPointer();
+    
+#ifdef NICE_USELIB_OPENMP
+#pragma omp parallel for default(none) shared(p_VecLabels, p_Examples, t_iNumFeatures, t_iNumSamples, pDataPtr)
+#endif
+    for (size_t i = 0; i < t_iNumSamples; i++)
+    {
+	const double *pDataIteration = pDataPtr + (i * t_iNumFeatures);
+        NICE::Vector *t_pVecTrainData = new NICE::Vector( (double*) pDataIteration , t_iNumFeatures, VectorBase::external);
+        // OBJREC::Example t_Example(t_pVecTrainData, t_fWeight);
+
+	p_Examples[i] = std::pair<int, OBJREC::Example>( p_VecLabels[i], OBJREC::Example(t_pVecTrainData, 1.0) ) ;
+    }
+
+    return true;
 }

+ 22 - 0
cbaselib/Example.h

@@ -130,6 +130,28 @@ class Examples : public std::vector< std::pair<int, Example> >
     /** delete all data associated with all examples
         (sparse vector, vector, cached example, etc.) */
     void clean ();
+
+    /**
+     * @brief Create an Examples object from a given full matrix of features and a vector of sample labels.
+     *
+     * The Examples object consists of individual Example objects containing the label and a pointer to the provided raw feature data (stored in variable Example::vec).
+     * Note: No feature data is copied - an Example only contains a pointer to the raw double data.
+     * An NICE::Vector is created as an wrapper around this raw double pointer using it, but not copying it.
+     * You need to take care to delete these wrapper vectors once you're finished working with the Examples object, otherwise you generate a memory leak.
+     * Calling Examples::clean() handles this.
+     *
+     * Note: memory layout needs to be transposed into rows x column: features x samples
+     * features must lay next to each other in memory, so that each feature vector can
+     * be adressed by a starting pointer and the number of feature dimensions to come.
+     *
+     * @param p_MatFeaturesColumWiseSamples matrix containing the features (dimension M) of N samples ( M x N matrix )
+     * @param p_VecLabels matrix containing the labels for N samples (1xN)
+     * @param p_Examples created Examples object (vector of N Example object with each Example containing a valid vec-ptr to the feature data [uncopied] )
+     * @return true for successful Examples creation
+     * @author Johannes Ruehle
+     */
+    static bool wrapExamplesAroundFeatureMatrix(const NICE::Matrix &p_MatFeaturesColumWiseSamples, const NICE::Vector &p_VecLabels, Examples &p_Examples);
+    static bool wrapExamplesAroundFeatureMatrix(const NICE::Matrix &p_MatFeaturesColumWiseSamples, const NICE::VectorT<int> &p_VecLabels, Examples &p_Examples);
 };
 
 

+ 1 - 1
cbaselib/FeaturePool.cpp

@@ -9,7 +9,7 @@
 #include <stdlib.h>
 
 #include "FeaturePool.h"
-#include "VectorFeature.h"
+#include "vislearning/features/fpfeatures/VectorFeature.h"
 
 using namespace OBJREC;
 

+ 24 - 15
cbaselib/LabeledFileList.cpp

@@ -90,7 +90,8 @@ LocalizationResult *LabeledFileList::getLocalizationInfo ( const ClassNames & cl
 
     lr = new LocalizationResult ( &classnames, mask, classno );
 
-  } else if ( format == "imagergb" ) {
+  }
+  else if ( format == "imagergb" ) {
     NICE::ColorImage mask;
     try {
       mask.read ( lfile );
@@ -102,27 +103,35 @@ LocalizationResult *LabeledFileList::getLocalizationInfo ( const ClassNames & cl
     }
     lr = new LocalizationResult ( &classnames, mask );
 
-  } else if ( format == "polygon" ) {
-    lr = new LocalizationResult ( &classnames );
+  }
+  else if ( format == "polygon" ) {
+      lr = new LocalizationResult ( &classnames );
 
-    lr->read ( lfile, LocalizationResult::FILEFORMAT_POLYGON );
+      lr->read ( lfile, LocalizationResult::FILEFORMAT_POLYGON );
 
       if ( debug_dataset )
-	  fprintf (stderr, "LabeledFileList: object localization %d\n", (int)lr->size() );
-    }
-    else if ( format == "imagelabeler" ) {
+        fprintf (stderr, "LabeledFileList: object localization %d\n", (int)lr->size() );
+  }
+  else if ( format == "polygon_siftflow" ) {
+      lr = new LocalizationResult ( &classnames );
 
-        lr = new LocalizationResult ( &classnames );
-        lr->loadImageInfo(lfile);
+      lr->read ( lfile, LocalizationResult::FILEFORMAT_POLYGON_SIFTFLOW );
 
-    }
-    else {
-      fthrow(Exception, "Localization format not yet supported !!\n");
-    }
+      if ( debug_dataset )
+        fprintf (stderr, "LabeledFileList: object localization %d\n", (int)lr->size() );
+  }
+  else if ( format == "imagelabeler" ) {
+    lr = new LocalizationResult ( &classnames );
+    lr->loadImageInfo(lfile);
+
+  }
+  else {
+    fthrow(Exception, "Localization format not yet supported !!\n");
+  }
    
-    if ( debug_dataset )
+  if ( debug_dataset )
 	if ( lr != NULL )
-		    fprintf (stderr, "%s (%d objects)\n", lfile.c_str(), (int)lr->size() );
+      fprintf (stderr, "%s (%d objects)\n", lfile.c_str(), (int)lr->size() );
 
   return lr;
 }

+ 121 - 14
cbaselib/LocalizationResult.cpp

@@ -357,6 +357,91 @@ void LocalizationResult::restore (istream & is, int format)
 		}
 
 		//sortEmpricalDepth();
+    } else if ( format == FILEFORMAT_POLYGON_SIFTFLOW ) {
+        // parser for xml annotations of SIFTFlow dataset
+        if ( is.good() )
+        {
+            std::string tmp;
+            is >> tmp;      // annotation tag
+
+            bool b_endOfBlock = false;
+
+            while ( !b_endOfBlock )
+            {
+                is >> tmp;  // get current line
+
+                // reached end of file properly
+                if ( this->isEndTag ( tmp, "annotation") )
+                {
+                    b_endOfBlock = true;
+                    continue;
+                }
+
+                StringTools::normalize_string( tmp );
+                tmp = this->removeStartTag ( tmp );
+
+                // found new single localization result
+                if ( tmp.compare("object") == 0 )
+                {
+                    std::string classname;
+                    is >> classname;
+                    classname = classname.substr( 6, classname.length()-13 );  //remove tags
+                    int classno = cn->classnoFromText(classname);
+
+                    bool foundPolygonBlock = false;
+
+                    while ( !foundPolygonBlock )
+                    {
+                        is >> tmp;
+                        StringTools::normalize_string( tmp );
+                        tmp = this->removeStartTag ( tmp );
+
+                        if ( tmp.compare("polygon") == 0 )
+                            foundPolygonBlock = true;
+                    }
+                    is >> tmp;  // 'username' line
+
+                    NICE::Region newPolygon;
+                    bool endOfPolyBlock = false;
+
+                    while ( !endOfPolyBlock )
+                    {
+                        is >> tmp;      // <pt> or </polygon> ?
+                        if ( this->isEndTag ( tmp, "polygon" ) )
+                        {
+                            endOfPolyBlock = true;
+                            continue;
+                        }
+
+                        int x, y;
+
+                        is >> tmp;      // <x> ... </x>
+                        StringTools::normalize_string( tmp );
+                        tmp = tmp.substr( 3, tmp.length()-7 );  //remove tags
+                        x = atoi ( tmp.c_str() );
+
+                        is >> tmp;      // <y> ... </y>
+                        StringTools::normalize_string( tmp );
+                        tmp = tmp.substr( 3, tmp.length()-7 );  //remove tags
+                        y = atoi ( tmp.c_str() );
+
+                        newPolygon.add( x, y );
+
+                        is >> tmp;      // </pt>
+                    }
+                    if ( classno >= 0 ) {
+                        ClassificationResult *r = new ClassificationResult ( classno, 1.0, cn->getMaxClassno() );
+                        SingleLocalizationResult *sr = new SingleLocalizationResult ( r, newPolygon );
+                        push_back ( sr );
+                    }
+                }
+
+            }
+        }
+        else
+        {
+            fthrow(IOException, "LocalizationResult::restore: InStream not initialized !");
+        }
     }
     else {
 		fthrow(IOException, "LocalizationResult::restore: file format not yet supported !");
@@ -501,25 +586,47 @@ void LocalizationResult::sortEmpricalDepth()
     sort ( begin(), end(), depthCompare );
 }
 
-void LocalizationResult::calcLabeledImage ( NICE::Image & mark, int backgroundClassNo ) const
+void LocalizationResult::calcLabeledImage (
+        NICE::ImageT<int> & mark,
+        int backgroundClassNo ) const
 {
     mark.set(backgroundClassNo);
 
     fprintf (stderr, "LocalizationResult: calcLabeledImage %zd\n", size() );
     for ( int y = 0 ; y < mark.height(); y++ )
-	for ( int x = 0 ; x < mark.width(); x++ )
-	{
-	    for ( LocalizationResult::const_iterator k = begin(); k != end() ; k++ )
-	    {
-			SingleLocalizationResult *slr = *k;
-			const NICE::Region & r = slr->getRegion();
-
-			if ( r.inside(x,y) ) {
-				mark.setPixel(x,y,slr->r->classno);
-				break;
-			}
-	    }
-	}
+        for ( int x = 0 ; x < mark.width(); x++ )
+            for ( LocalizationResult::const_iterator k = begin(); k != end() ; k++ )
+            {
+                SingleLocalizationResult *slr = *k;
+                const NICE::Region & r = slr->getRegion();
+
+                if ( r.inside(x,y) ) {
+                    mark.setPixel(x,y,slr->r->classno);
+                    break;
+                }
+            }
+}
+
+void LocalizationResult::calcLabeledImage (
+        NICE::Image & mark,
+        int backgroundClassNo ) const
+{
+    NICE::ImageT<int> markInt;
+    markInt.resize ( mark.width(), mark.height() );
+    calcLabeledImage( markInt, backgroundClassNo );
+
+    for ( int y = 0; y < markInt.height(); y++ )
+        for ( int x = 0; x < markInt.width(); x++ )
+        {
+            int cLabel = markInt.getPixelQuick( x, y );
+
+            if ( cLabel > std::numeric_limits<unsigned char>::max() )
+                std::cerr << "LocalizationResult::calcLabeledImage: To many classes! Labeled image with UCHAR is not sufficient!"
+                          << std::endl;
+
+            mark.setPixelQuick( x, y, (unsigned char) cLabel );
+        }
+
 }
 
 void LocalizationResult::getLabeledImageCache ( NICE::Image & mark ) const

+ 3 - 1
cbaselib/LocalizationResult.h

@@ -106,7 +106,8 @@ class LocalizationResult : public std::vector<SingleLocalizationResult *>, publi
     enum {
 		FILEFORMAT_PASCAL2006_RESULT = 0,
 		FILEFORMAT_PASCAL2006_GROUNDTRUTH,
-        FILEFORMAT_POLYGON
+        FILEFORMAT_POLYGON,
+        FILEFORMAT_POLYGON_SIFTFLOW
     };
 
     LocalizationResult ( int xsize = -1, int ysize = -1 );
@@ -120,6 +121,7 @@ class LocalizationResult : public std::vector<SingleLocalizationResult *>, publi
     void sortDescendingConfidence();
     void getLabeledImageCache ( NICE::Image & mark ) const;
     void calcLabeledImage ( NICE::Image & mark, int backgroundClassNo ) const;
+    void calcLabeledImage ( NICE::ImageT<int> & mark, int backgroundClassNo ) const;
     void setMap ( const NICE::Image & labeledImage );
 
     void displayBoxes ( NICE::ColorImage & img, 

+ 3 - 3
cbaselib/MultiDataset.cpp

@@ -16,7 +16,6 @@
 #endif
 #endif
 
-
 #include "vislearning/cbaselib/ClassNames.h"
 
 #include "core/basics/StringTools.h"
@@ -192,9 +191,10 @@ MultiDataset::MultiDataset( const Config *conf , LabeledSetFactory *pSetFactory)
       // given config's directory
       if( t_DatasetFilename.isRelative() )
       {
-          dataset = t_ConfigFilename.extractPath().str() + dataset;
+          t_DatasetFilename.set( t_ConfigFilename.extractPath().str() + dataset );
       }
-      std::string sDatasetConfFilename =  dataset + "/dataset.conf";
+      t_DatasetFilename.convertToRealPath();
+      std::string sDatasetConfFilename =  t_DatasetFilename.str() + "/dataset.conf";
       Config dsconf ( sDatasetConfFilename.c_str() );
 
       dirs[*i] = dataset;

+ 117 - 7
cbaselib/Polygon.cpp

@@ -1,8 +1,8 @@
-/*!
+/**
  * \file Polygon.cpp
- * \brief
- * \author Gapchich Vladislav
- * \date 23/10/11
+ * \brief a polygon class
+ * \author Gapchich Vladislav, Sven Sickert
+ * \date 23/10/2011 (07/10/2015)
  */
 
 #include "vislearning/cbaselib/Polygon.h"
@@ -100,6 +100,116 @@ Polygon::id() const
 	return id_;
 }
 	
-/*
- * 
- */
+// check whether point is inside polygon or not
+bool
+Polygon::handleEdge ( const int px, const int py,
+                      const int x1, const int y1,
+                      const int x2, const int y2,
+                      int & lastdir, int & c )
+{
+    if (py == y1)
+    {
+        if (px == x1) return true;
+
+        if (y1 > y2)
+        {
+            if (lastdir == -1) // decreasing (cont.)
+                if (x1 < px) c++;
+        }
+
+        if (y1 < y2)
+        {
+            if (lastdir == 1) // increasing (cont.)
+                if (x1 < px) c++;
+        }
+
+        if (y1 == y2)
+        {
+            if ((x1 <= px) && (x2 >= px)) return true;
+        }
+    }
+
+    if ( (y1 > py && y2 < py) || (y1 < py && y2 > py) )
+    {
+        int xz = (int)( (py - y1) * (x2 - x1) / (y2 - y1) + x1 );
+
+        /* is point laying on the polygon curve? */
+        if (xz == px) return true;
+
+        /* does the scanning line cut the polygon curve left of the point? */
+        if (xz < px) c++;
+    }
+
+    if (y2 > y1) lastdir = 1;
+
+    if (y2 < y1) lastdir = -1;
+
+    return false;
+}
+
+
+bool
+Polygon::insidePolygon ( const int &px, const int &py )
+{
+    int i, j, c = 0;
+    int lastdir = 0;
+
+    if ( points_.size() < 2 )
+    {
+        cerr << "Polygon::insidePolygon: Not a valid Polygon curve" << endl;
+        return false;
+    }
+
+    for ( PointsList::const_iterator i = points_.begin();
+          i != points_.end(); ++i )
+    {
+        PointsList::const_iterator j = i;
+        j++;
+
+        if ( j == points_.end() ) j = points_.begin();
+
+        CoordT<int> pi = *i;
+        CoordT<int> pj = *j;
+
+        if (pj.y > pi.y) lastdir = 1;
+
+        if (pj.y < pi.y) lastdir = -1;
+    }
+
+    if (lastdir == 0)
+    {
+        cerr << "Polygon::insidePolygon: Polygon is degenerated" << endl;
+        return false;
+    }
+
+    for ( PointsList::const_iterator i = points_.begin();
+          i != points_.end(); ++i )
+    {
+        PointsList::const_iterator j = i;
+        j++;
+
+        if ( j == points_.end() ) j = points_.begin();
+
+        CoordT<int> pi = *i;
+        CoordT<int> pj = *j;
+
+        if ( handleEdge( px, py, pi.x, pi.y, pj.x, pj.y, lastdir, c ) )
+            return false;
+      }
+
+    if (c & 1) return true;
+
+    return false;
+}
+
+bool
+Polygon::insidePolygon ( const CoordT< int > &aPoint )
+{
+    if (aPoint.x < 0 || aPoint.y < 0) {
+        cerr << "Polygon::insidePolygon(): point does not have valid coordinates"
+             << endl;
+        return false;
+    }
+
+    return insidePolygon ( aPoint.x, aPoint.y );
+}

+ 15 - 9
cbaselib/Polygon.h

@@ -1,8 +1,8 @@
-/*!
- * \file Polygon.h
- * \brief
- * \author Gapchich Vladislav
- * \date 23/10/11
+/**
+ * \file Polygon.cpp
+ * \brief a polygon class
+ * \author Gapchich Vladislav, Sven Sickert
+ * \date 23/10/2011 (07/10/2015)
  */
 
 #ifndef __POLYGON_H__
@@ -33,12 +33,22 @@ class Polygon
     NICE::CoordT< int > pop();
     int id() const;
 
+    // check whether point is inside polygon or not
+    bool insidePolygon ( const NICE::CoordT< int > &aPoint );
+    bool insidePolygon ( const int &px, const int &py );
+
   private:
     PointsList points_;
 
     /// id interpreted as a class label
     int id_;
 
+    // helper function for point in polygon test
+    bool handleEdge ( const int px, const int py,
+                      const int x1, const int y1,
+                      const int x2, const int y2,
+                      int & lastdir, int & c );
+
   public:
     /// unique id that distinguishs this particular bounding box object from all others
     int unique_id_;
@@ -47,7 +57,3 @@ class Polygon
 } //namespace
 
 #endif /* __POLYGON_H__ */
-
-/*
- *
- */

+ 4 - 0
cbaselib/progs/calcCurves.cpp

@@ -49,7 +49,11 @@ void readResults ( const string & resultsfn, vector<pair<double, int> > & result
 */
 int main (int argc, char **argv)
 {   
+#ifndef __clang__
+#ifndef __llvm__
     std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
 
 	Config conf ( argc, argv );
 

+ 4 - 0
cbaselib/progs/createNormTrainingSet.cpp

@@ -47,7 +47,11 @@ using namespace std;
 */
 int main (int argc, char **argv)
 {   
+#ifndef __clang__
+#ifndef __llvm__
     std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
 
     char configfile [300];
     char objectclass_c [1024];

+ 4 - 0
cbaselib/progs/splitLabeledSetVector.cpp

@@ -88,7 +88,11 @@ void normalizeLabeledSetVector(const LabeledSetVector &teachSet,
  */
 int main(int argc, char **argv)
 {
+#ifndef __clang__
+#ifndef __llvm__
 	std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
 
 	Config conf(argc, argv);
 

+ 4 - 0
cbaselib/progs/statisticsTrainingSet.cpp

@@ -42,7 +42,11 @@ using namespace std;
 */
 int main (int argc, char **argv)
 {   
+#ifndef __clang__
+#ifndef __llvm__
     std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
 
     char configfile [300];
     char objectclass_c [1024];

+ 5 - 0
cbaselib/progs/testCachedExample.cpp

@@ -20,7 +20,12 @@ using namespace std;
 
 int main ( int argc, char **argv )
 {
+#ifndef __clang__
+#ifndef __llvm__
   std::set_terminate ( __gnu_cxx::__verbose_terminate_handler );
+#endif
+#endif
+
 
   char configfile [300];
 

+ 4 - 0
cbaselib/progs/testLabeledSet.cpp

@@ -26,7 +26,11 @@ using namespace std;
 */
 int main ( int argc, char **argv )
 {
+#ifndef __clang__
+#ifndef __llvm__
   std::set_terminate ( __gnu_cxx::__verbose_terminate_handler );
+#endif
+#endif
 
   char configfile [300];
 

+ 1 - 1
classifier/GenericFPClassifierSelection.h

@@ -12,7 +12,7 @@
 #include "vislearning/classifier/fpclassifier/randomforest/FPCRandomForests.h"
 #include "vislearning/classifier/fpclassifier/randomforest/FPCDecisionTree.h"
 #include "vislearning/classifier/fpclassifier/logisticregression/FPCSMLR.h"
-#include "vislearning/classifier/fpclassifier/FPCGPHIK.h"
+#include "vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h"
 
 
 

+ 10 - 6
classifier/classifierbase/VecClassifier.h

@@ -14,8 +14,12 @@
 #include "vislearning/cbaselib/LabeledSet.h"
 #include "vislearning/cbaselib/ClassificationResult.h"
 
-#define ROADWORKS fthrow(NICE::Exception, "clone(): not yet implemented!");
-#define ROADWORKSADD fthrow(NICE::Exception, "teach (int classno, const NICE::Vector & x ): not yet implemented!");
+#ifndef ROADWORKSVC
+#define ROADWORKSVC fthrow(NICE::Exception, "clone(): not yet implemented!");
+#endif
+#ifndef ROADWORKSADDVC
+#define ROADWORKSADDVC fthrow(NICE::Exception, "teach (int classno, const NICE::Vector & x ): not yet implemented!");
+#endif
 
 namespace OBJREC
 {
@@ -55,18 +59,18 @@ class VecClassifier : public NICE::Persistent
     /** clone this object */
     virtual VecClassifier *clone ( void ) const
     {
-      ROADWORKS;
+      ROADWORKSVC;
     };
     
     virtual void teach (int classno, const NICE::Vector & x )
     {
-      ROADWORKSADD;
+      ROADWORKSADDVC;
     };
 
 };
 
-#undef ROADWORKS
-
+#undef ROADWORKSVC
+#undef ROADWORKSADDVC
 
 } // namespace
 

+ 1 - 1
classifier/classifiercombination/VCPreRandomForest.cpp

@@ -10,7 +10,7 @@
 
 #include <iostream>
 
-#include <vislearning/cbaselib/VectorFeature.h>
+#include <vislearning/features/fpfeatures/VectorFeature.h>
 
 #include "core/image/ImageT.h"
 //#include "core/imagedisplay/ImageDisplay.h"

+ 1 - 1
classifier/classifierinterfaces/VCFeaturePool.cpp

@@ -8,7 +8,7 @@
 #include <iostream>
 
 #include "VCFeaturePool.h"
-#include "vislearning/cbaselib/VectorFeature.h"
+#include "vislearning/features/fpfeatures/VectorFeature.h"
 #include "core/image/ImageT.h"
 //#include "core/imagedisplay/ImageDisplay.h"
 

+ 111 - 52
classifier/fpclassifier/FPCGPHIK.cpp → classifier/fpclassifier/gphik/FPCGPHIK.cpp

@@ -20,60 +20,56 @@ using namespace NICE;
 using namespace OBJREC;
 
 
-FPCGPHIK::FPCGPHIK( const Config *conf, const string & confSection ) 
+void FPCGPHIK::init ( const NICE::Config *conf, const std::string & s_confSection )
 {
-  this->verbose = conf->gB(confSection, "verbose", false);
-  this->useSimpleBalancing = conf->gB(confSection, "use_simple_balancing", false);
-  this->minSamples = conf->gI(confSection, "min_samples", -1);
-  this->performOptimizationAfterIncrement = conf->gB(confSection, "performOptimizationAfterIncrement", true);
+  this->verbose = conf->gB( s_confSection, "verbose", false );
+  this->useSimpleBalancing = conf->gB( s_confSection, "use_simple_balancing", false );
+  this->minSamples = conf->gI( s_confSection, "min_samples", -1 );
+  this->performOptimizationAfterIncrement = conf->gB( s_confSection, "performOptimizationAfterIncrement", false );
   
-  classifier = new GPHIKClassifier(conf, confSection);
+  this->classifier = new GPHIKClassifier(conf, s_confSection);
 }
 
-FPCGPHIK::~FPCGPHIK()
+FPCGPHIK::FPCGPHIK( ) 
 {
-  if ( classifier != NULL )
-    delete classifier;
+  this->classifier = NULL;
 }
 
-ClassificationResult FPCGPHIK::classify ( Example & pe )
-{  
-
-  NICE::SparseVector *svec;// = pe.svec;  
-  
-  // was only a NICE::Vector given?
-  // Than we had to allocate a new NICE::SparseVector and converted the given NICE::Vector into it.
-  bool newvec = false;  
+FPCGPHIK::FPCGPHIK( const Config *conf, const string & confSection ) 
+{
+  this->classifier = NULL;
   
-  if ( pe.svec != NULL )
+  // if no config file was given, we either restore the classifier from an external file, or run ::init with 
+  // an emtpy config (using default values thereby) when calling the train-method
+  if ( conf != NULL )
   {
-    svec = pe.svec;
+    this->init(conf, confSection);
   }
-  else
-  {
-    NICE::Vector x;
-
-    x = * ( pe.vec );
-
-    svec = new NICE::SparseVector ( x );
+}
 
-    svec->setDim ( x.size() );
+FPCGPHIK::~FPCGPHIK()
+{
+  if ( this->classifier != NULL )
+    delete this->classifier;
+  this->classifier = NULL;
+}
 
-    newvec = true;
-  }  
-    
-  ClassificationResult result ( this->classify( svec )  );
-    
-  if ( newvec )
-    delete svec;
+ClassificationResult FPCGPHIK::classify ( Example & pe )
+{
+  const SparseVector *svec = pe.svec;
 
-  return result;  
+  if ( svec == NULL )
+    fthrow(Exception, "FPCGPHIK requires example.svec (SparseVector stored in an Example struct)");
+ return this->classify( svec ); 
 }
 
 ClassificationResult FPCGPHIK::classify ( const NICE::SparseVector * example )
 {
+  if ( this->classifier == NULL )
+    fthrow(Exception, "Classifier not trained yet -- aborting!" );
+  
   NICE::SparseVector scores;
-  int result;
+  uint result;
   
   double uncertainty;
  
@@ -82,11 +78,12 @@ ClassificationResult FPCGPHIK::classify ( const NICE::SparseVector * example )
   if ( scores.size() == 0 ) {
     fthrow(Exception, "Zero scores, something is likely to be wrong here: svec.size() = " << example->size() );
   }
-  int classes = scores.getDim();
+  
+  uint classes = scores.getDim();
   FullVector fvscores(classes);
   
   NICE::SparseVector::const_iterator it;
-  for(int c = 0; c < classes; c++)
+  for( uint c = 0; c < classes; c++ )
   {
     it = scores.find(c);
     if ( it == scores.end() )
@@ -112,6 +109,13 @@ ClassificationResult FPCGPHIK::classify ( const NICE::SparseVector * example )
 /** training process */
 void FPCGPHIK::train ( FeaturePool & fp, Examples & examples )
 {
+  if ( this->classifier == NULL )
+  {
+    std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
+    NICE::Config tmpConfEmpty ;
+    this->init ( &tmpConfEmpty );
+  }  
+  
   // we completely ignore the feature pool :)
   //
   initRand(0);
@@ -175,12 +179,9 @@ void FPCGPHIK::train ( FeaturePool & fp, Examples & examples )
 }
 
 /** training process */
-void FPCGPHIK::train ( const std::vector< const SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
+void FPCGPHIK::train ( const std::vector< const SparseVector *> & examples, std::map< uint, NICE::Vector > & binLabels )
 {
-  
-  std::cerr << "call internal train method " << std::endl;
   classifier->train(examples, binLabels);
-  std::cerr << "training done" << std::endl;
 }
 
 void FPCGPHIK::clear ()
@@ -210,18 +211,62 @@ void FPCGPHIK::predictUncertainty( const NICE::SparseVector * example, double &
   classifier->predictUncertainty(example, uncertainty);
 }
 
-//---------------------------------------------------------------------
-//                           protected methods
-//---------------------------------------------------------------------
+///////////////////// INTERFACE PERSISTENT /////////////////////
+// interface specific methods for store and restore
+///////////////////// INTERFACE PERSISTENT ///////////////////// 
 void FPCGPHIK::restore ( std::istream & is, int format )
 {
   if (is.good())
-  {
-    classifier->restore(is, format);  
-    
+  {    
     std::string tmp;
-    is >> tmp; //"performOptimizationAfterIncrement: "
-    is >> this->performOptimizationAfterIncrement;
+    is >> tmp; //class name 
+    
+    if ( ! this->isStartTag( tmp, "FPCGPHIK" ) )
+    {
+      std::cerr << " WARNING - attempt to restore FPCGPHIK, but start flag " << tmp << " does not match! Aborting... " << std::endl;
+      throw;
+    } 
+    
+    is.precision (numeric_limits<double>::digits10 + 1);
+    
+    bool b_endOfBlock ( false ) ;
+    
+    while ( !b_endOfBlock )
+    {
+      is >> tmp; // start of block 
+      
+      if ( this->isEndTag( tmp, "FPCGPHIK" ) )
+      {
+        b_endOfBlock = true;
+        continue;
+      } 
+
+      tmp = this->removeStartTag( tmp );
+
+      if ( tmp.compare("classifier") == 0 )
+      {
+        if ( classifier == NULL )
+          classifier = new NICE::GPHIKClassifier();
+        
+        //then, load everything that we stored explicitely,
+        // including precomputed matrices, LUTs, eigenvalues, ... and all that stuff
+        classifier->restore(is, format);  
+          
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }
+      else if ( tmp.compare("performOptimizationAfterIncrement") == 0 )
+      {
+        is >> performOptimizationAfterIncrement;        
+        is >> tmp; // end of block 
+        tmp = this->removeEndTag ( tmp );
+      }      
+      else
+      {
+      std::cerr << "WARNING -- unexpected FPCGPHIK object -- " << tmp << " -- for restoration... aborting" << std::endl;
+      throw;
+      }
+    } // while-loop
   }
   else
   {
@@ -235,9 +280,19 @@ void FPCGPHIK::store ( std::ostream & os, int format ) const
   {
     os.precision (numeric_limits<double>::digits10 + 1);
     
+    // show starting point
+    os << this->createStartTag( "FPCGPHIK" ) << std::endl;        
+    
+    os << this->createStartTag( "classifier" ) << std::endl;
     classifier->store(os, format);
+    os << this->createEndTag( "classifier" ) << std::endl;
     
-    os << "performOptimizationAfterIncrement: " << performOptimizationAfterIncrement << std::endl;
+    os << this->createStartTag( "performOptimizationAfterIncrement" ) << std::endl;  
+    os << performOptimizationAfterIncrement << std::endl;
+    os << this->createEndTag( "performOptimizationAfterIncrement" ) << std::endl;     
+    
+    // done
+    os << this->createEndTag( "FPCGPHIK" ) << std::endl;
   }
   else
   {
@@ -245,6 +300,10 @@ void FPCGPHIK::store ( std::ostream & os, int format ) const
   }
 }
 
+///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+// interface specific methods for incremental extensions
+///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+
 void FPCGPHIK::addExample( const Example & pe, const double & label)
 {
   const SparseVector *svec = pe.svec;
@@ -258,7 +317,7 @@ void FPCGPHIK::addMultipleExamples( Examples & newExamples)
     return;
   
   // (multi-class) label vector
-  Vector y ( newExamples.size() );
+  NICE::Vector y ( newExamples.size() );
 
   // flat structure of our training data
   std::vector< const SparseVector * > sparseExamples;

+ 34 - 4
classifier/fpclassifier/FPCGPHIK.h → classifier/fpclassifier/gphik/FPCGPHIK.h

@@ -33,6 +33,12 @@ class FPCGPHIK : public FeaturePoolClassifier
 
   protected:
     
+    /////////////////////////
+    /////////////////////////
+    // PROTECTED VARIABLES //
+    /////////////////////////
+    /////////////////////////    
+    
     NICE::GPHIKClassifier * classifier;
     
     /** verbose flag for useful output*/
@@ -44,10 +50,27 @@ class FPCGPHIK : public FeaturePoolClassifier
     
     /** When adding new examples, do we want to run a whole optimization of all involved hyperparameters? default: true*/
     bool performOptimizationAfterIncrement;
+    
+    /////////////////////////
+    /////////////////////////
+    //  PROTECTED METHODS  //
+    /////////////////////////
+    /////////////////////////    
+    
+    /** 
+    * @brief Setup internal variables and objects used
+    * @author Alexander Freytag
+    * @param conf Config file to specify variable settings
+    * @param s_confSection
+    */    
+    void init ( const NICE::Config *conf, const std::string & s_confSection = "GPHIKClassifier" );
 
   public:
 
     /** simple constructor */
+    FPCGPHIK( );
+    
+    /** default constructor */
     FPCGPHIK( const NICE::Config *conf, const std::string & confSection = "GPHIKClassifier" );
       
     /** simple destructor */
@@ -75,7 +98,7 @@ class FPCGPHIK : public FeaturePoolClassifier
      * @param examples examples to use given in a sparse data structure
      * @param binLabels corresponding binary labels with class no. There is no need here that every examples has only on positive entry in this set (1,-1)
      */
-    void train ( const std::vector< const NICE::SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels );
+    void train ( const std::vector< const NICE::SparseVector *> & examples, std::map< uint, NICE::Vector > & binLabels );
     
     ///////////////////// INTERFACE PERSISTENT /////////////////////
     // interface specific methods for store and restore
@@ -86,20 +109,27 @@ class FPCGPHIK : public FeaturePoolClassifier
 
     virtual FeaturePoolClassifier *clone () const;
     
-    /** prediction of classification uncertainty */
+    /** 
+     * @brief prediction of classification uncertainty
+     * @date 19-06-2012 (dd-mm-yyyy)
+     * @author Alexander Freytag
+     * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
+     * @param uncertainty contains the resulting classification uncertainty
+     */
     void predictUncertainty( OBJREC::Example & pe, double & uncertainty );
+    
     /** 
      * @brief prediction of classification uncertainty
      * @date 19-06-2012 (dd-mm-yyyy)
      * @author Alexander Freytag
      * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
-     * @param uncertainties contains the resulting classification uncertainties (1 entry for standard setting, m entries for binary-balanced setting)
+     * @param uncertainty contains the resulting classification uncertainty
      */       
     void predictUncertainty( const NICE::SparseVector * example, double & uncertainty );
     
     ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
     // interface specific methods for incremental extensions
-    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////       
+    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////    
     
     void addExample( const OBJREC::Example & pe, const double & label);
     virtual void addMultipleExamples( OBJREC::Examples & newExamples);

+ 8 - 0
classifier/fpclassifier/gphik/Makefile

@@ -0,0 +1,8 @@
+#TARGETS_FROM:=$(notdir $(patsubst %/,%,$(shell pwd)))/$(TARGETS_FROM)
+#$(info recursivly going up: $(TARGETS_FROM) ($(shell pwd)))
+
+all:
+
+%:
+	$(MAKE) TARGETS_FROM=$(notdir $(patsubst %/,%,$(shell pwd)))/$(TARGETS_FROM) -C .. $@
+

+ 103 - 0
classifier/fpclassifier/gphik/Makefile.inc

@@ -0,0 +1,103 @@
+# LIBRARY-DIRECTORY-MAKEFILE
+# conventions:
+# - all subdirectories containing a "Makefile.inc" are considered sublibraries
+#   exception: "progs/" and "tests/" subdirectories!
+# - all ".C", ".cpp" and ".c" files in the current directory are linked to a
+#   library
+# - the library depends on all sublibraries 
+# - the library name is created with $(LIBNAME), i.e. it will be somehow
+#   related to the directory name and with the extension .a
+#   (e.g. lib1/sublib -> lib1_sublib.a)
+# - the library will be added to the default build list ALL_LIBRARIES
+
+# --------------------------------
+# - remember the last subdirectory
+#
+# set the variable $(SUBDIR) correctly to the current subdirectory. this
+# variable can be used throughout the current makefile.inc. The many 
+# SUBDIR_before, _add, and everything are only required so that we can recover
+# the previous content of SUBDIR before exitting the makefile.inc
+
+SUBDIR_add:=$(dir $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)))
+SUBDIR_before:=$(SUBDIR)
+SUBDIR:=$(strip $(SUBDIR_add))
+SUBDIR_before_$(SUBDIR):=$(SUBDIR_before)
+ifeq "$(SUBDIR)" "./"
+SUBDIR:=
+endif
+
+# ------------------------
+# - include subdirectories
+#
+# note the variables $(SUBDIRS_OF_$(SUBDIR)) are required later on to recover
+# the dependencies automatically. if you handle dependencies on your own, you
+# can also dump the $(SUBDIRS_OF_$(SUBDIR)) variable, and include the
+# makefile.inc of the subdirectories on your own...
+
+SUBDIRS_OF_$(SUBDIR):=$(patsubst %/Makefile.inc,%,$(wildcard $(SUBDIR)*/Makefile.inc))
+include $(SUBDIRS_OF_$(SUBDIR):%=%/Makefile.inc)
+
+# ----------------------------
+# - include local dependencies
+#
+# you can specify libraries needed by the individual objects or by the whole
+# directory. the object specific additional libraries are only considered
+# when compiling the specific object files
+# TODO: update documentation...
+
+-include $(SUBDIR)libdepend.inc
+
+$(foreach d,$(filter-out %progs %tests,$(SUBDIRS_OF_$(SUBDIR))),$(eval $(call PKG_DEPEND_INT,$(d))))
+
+# ---------------------------
+# - objects in this directory
+#
+# the use of the variable $(OBJS) is not mandatory. it is mandatory however
+# to update $(ALL_OBJS) in a way that it contains the path and name of
+# all objects. otherwise we can not include the appropriate .d files.
+
+OBJS:=$(patsubst %.cpp,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.cpp))) \
+      $(patsubst %.C,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.C))) \
+	  $(shell grep -ls Q_OBJECT $(SUBDIR)*.h | sed -e's@^@/@;s@.*/@$(OBJDIR)moc_@;s@\.h$$@.o@') \
+      $(patsubst %.c,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.c)))
+ALL_OBJS += $(OBJS)
+
+# ----------------------------
+# - binaries in this directory
+#
+# output of binaries in this directory. none of the variables has to be used.
+# but everything you add to $(ALL_LIBRARIES) and $(ALL_BINARIES) will be
+# compiled with `make all`. be sure again to add the files with full path.
+
+LIBRARY_BASENAME:=$(call LIBNAME,$(SUBDIR))
+ifneq "$(SUBDIR)" ""
+ALL_LIBRARIES+=$(LIBDIR)$(LIBRARY_BASENAME).$(LINK_FILE_EXTENSION)
+endif
+
+# ---------------------
+# - binary dependencies
+#
+# there is no way of determining the binary dependencies automatically, so we
+# follow conventions. the current library depends on all sublibraries.
+# all other dependencies have to be added manually by specifying, that the
+# current .pc file depends on some other .pc file. binaries depending on
+# libraries should exclusivelly use the .pc files as well.
+
+ifeq "$(SKIP_BUILD_$(OBJDIR))" "1"
+$(LIBDIR)$(LIBRARY_BASENAME).a:
+else
+$(LIBDIR)$(LIBRARY_BASENAME).a:$(OBJS) \
+	$(call PRINT_INTLIB_DEPS,$(PKGDIR)$(LIBRARY_BASENAME).a,.$(LINK_FILE_EXTENSION))
+endif
+
+$(PKGDIR)$(LIBRARY_BASENAME).pc: \
+	$(call PRINT_INTLIB_DEPS,$(PKGDIR)$(LIBRARY_BASENAME).pc,.pc)
+
+# -------------------
+# - subdir management
+#
+# as the last step, always add this line to correctly recover the subdirectory
+# of the makefile including this one!
+
+SUBDIR:=$(SUBDIR_before_$(SUBDIR))
+

+ 0 - 0
classifier/fpclassifier/tests/Makefile.inc → classifier/fpclassifier/gphik/tests/Makefile.inc


+ 1 - 1
classifier/fpclassifier/tests/TestFPCGPHIK.cpp → classifier/fpclassifier/gphik/tests/TestFPCGPHIK.cpp

@@ -16,7 +16,7 @@
 
 //----------
 
-#include "vislearning/classifier/fpclassifier/FPCGPHIK.h"
+#include "vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h"
 
 //----------
 

+ 0 - 0
classifier/fpclassifier/tests/TestFPCGPHIK.h → classifier/fpclassifier/gphik/tests/TestFPCGPHIK.h


+ 0 - 0
classifier/fpclassifier/tests/sparse20x30matrixM.mat → classifier/fpclassifier/gphik/tests/sparse20x30matrixM.mat


+ 0 - 0
classifier/fpclassifier/tests/sparse3x3matrixA.mat → classifier/fpclassifier/gphik/tests/sparse3x3matrixA.mat


+ 0 - 0
classifier/fpclassifier/tests/toyExample1.data → classifier/fpclassifier/gphik/tests/toyExample1.data


+ 0 - 0
classifier/fpclassifier/tests/toyExample2.data → classifier/fpclassifier/gphik/tests/toyExample2.data


+ 0 - 0
classifier/fpclassifier/tests/toyExampleLargeLargeScale.data → classifier/fpclassifier/gphik/tests/toyExampleLargeLargeScale.data


+ 0 - 0
classifier/fpclassifier/tests/toyExampleLargeScale.data → classifier/fpclassifier/gphik/tests/toyExampleLargeScale.data


+ 1 - 1
classifier/fpclassifier/logisticregression/libdepend.inc

@@ -1,4 +1,4 @@
-$(call PKG_DEPEND_EXT,OPENMP)
+# $(call PKG_DEPEND_EXT,OPENMP)
 $(call PKG_DEPEND_INT,vislearning/classifier/classifierbase)
 $(call PKG_DEPEND_INT,vislearning/features/fpfeatures)
 $(call PKG_DEPEND_INT,vislearning/optimization/mapestimation)

+ 574 - 0
classifier/fpclassifier/randomforest/DTBObliqueLS.cpp

@@ -0,0 +1,574 @@
+/**
+ * @file DTBObliqueLS.cpp
+ * @brief random oblique decision tree
+ * @author Sven Sickert
+ * @date 10/15/2014
+
+*/
+#include <iostream>
+#include <time.h>
+
+#include "DTBObliqueLS.h"
+#include "vislearning/features/fpfeatures/ConvolutionFeature.h"
+
+#include "core/vector/Algorithms.h"
+
+using namespace OBJREC;
+
+//#define DEBUGTREE
+
+
+using namespace std;
+using namespace NICE;
+
+DTBObliqueLS::DTBObliqueLS ( const Config *conf, string section )
+{
+    saveIndices = conf->gB( section, "save_indices", false);
+    useShannonEntropy = conf->gB( section, "use_shannon_entropy", false );
+    useOneVsOne = conf->gB( section, "use_one_vs_one", false );
+    useDynamicRegularization = conf->gB( section, "use_dynamic_regularization", true );
+
+    splitSteps = conf->gI( section, "split_steps", 20 );
+    maxDepth = conf->gI( section, "max_depth", 10 );
+    minExamples = conf->gI( section, "min_examples", 50);
+    regularizationType = conf->gI( section, "regularization_type", 1 );
+
+    minimumEntropy = conf->gD( section, "minimum_entropy", 10e-5 );
+    minimumInformationGain = conf->gD( section, "minimum_information_gain", 10e-7 );
+    lambdaInit = conf->gD( section, "lambda_init", 0.5 );
+
+}
+
+DTBObliqueLS::~DTBObliqueLS()
+{
+
+}
+
+bool DTBObliqueLS::entropyLeftRight (
+        const FeatureValuesUnsorted & values,
+        double threshold,
+        double* stat_left,
+        double* stat_right,
+        double & entropy_left,
+        double & entropy_right,
+        double & count_left,
+        double & count_right,
+        int maxClassNo )
+{
+    count_left = 0;
+    count_right = 0;
+    int count_unweighted_left = 0;
+    int count_unweighted_right = 0;
+    for ( FeatureValuesUnsorted::const_iterator i = values.begin();
+          i != values.end();
+          i++ )
+    {
+        int classno = i->second;
+        double value = i->first;
+        if ( value < threshold ) {
+            stat_left[classno] += i->fourth;
+            count_left+=i->fourth;
+            count_unweighted_left++;
+        }
+        else
+        {
+            stat_right[classno] += i->fourth;
+            count_right+=i->fourth;
+            count_unweighted_right++;
+        }
+    }
+
+    if (  (count_unweighted_left < minExamples)
+       || (count_unweighted_right < minExamples) )
+        return false;
+
+    entropy_left = 0.0;
+    for ( int j = 0 ; j <= maxClassNo ; j++ )
+        if ( stat_left[j] != 0 )
+            entropy_left -= stat_left[j] * log(stat_left[j]);
+    entropy_left /= count_left;
+    entropy_left += log(count_left);
+
+    entropy_right = 0.0;
+    for ( int j = 0 ; j <= maxClassNo ; j++ )
+        if ( stat_right[j] != 0 )
+            entropy_right -= stat_right[j] * log(stat_right[j]);
+    entropy_right /= count_right;
+    entropy_right += log (count_right);
+
+    return true;
+}
+
+bool DTBObliqueLS::adaptDataAndLabelForMultiClass (
+        const int posClass,
+        const int negClass,
+        NICE::Matrix & X,
+        NICE::Vector & y )
+{
+    bool posHasExamples = false;
+    bool negHasExamples = false;
+    int posCount = 0;
+    int negCount = 0;
+
+    // One-vs-one: Transforming into {-1,0,+1} problem
+    if ( useOneVsOne )
+        for ( int i = 0; i < y.size(); i++ )
+        {
+            if ( y[i] == posClass )
+            {
+                y[i] = 1.0;
+                posHasExamples = true;
+                posCount++;
+            }
+            else if ( y[i] == negClass )
+            {
+                y[i] = -1.0;
+                negHasExamples = true;
+                negCount++;
+            }
+            else
+            {
+                y[i] = 0.0;
+                X.setRow( i, NICE::Vector( X.cols(), 0.0 ) );
+            }
+        }
+    // One-vs-all: Transforming into {-1,+1} problem
+    else
+        for ( int i = 0; i < y.size(); i++ )
+        {
+            if ( y[i] == posClass )
+            {
+                y[i] = 1.0;
+                posHasExamples = true;
+                posCount++;
+            }
+            else
+            {
+                y[i] = -1.0;
+                negHasExamples = true;
+                negCount++;
+            }
+        }
+
+    if ( posHasExamples && negHasExamples )
+        return true;
+    else
+        return false;
+}
+
+/** refresh data matrix X and label vector y */
+void DTBObliqueLS::getDataAndLabel(
+        const FeaturePool &fp,
+        const Examples &examples,
+        const std::vector<int> &examples_selection,
+        NICE::Matrix & X,
+        NICE::Vector & y,
+        NICE::Vector & w )
+{
+    ConvolutionFeature *f = (ConvolutionFeature*)fp.begin()->second;
+    int amountParams = f->getParameterLength();
+    int amountExamples = examples_selection.size();
+
+    X = NICE::Matrix(amountExamples, amountParams, 0.0 );
+    y = NICE::Vector(amountExamples, 0.0);
+    w = NICE::Vector(amountExamples, 1.0);
+
+    int matIndex = 0;
+    for ( vector<int>::const_iterator si = examples_selection.begin();
+          si != examples_selection.end();
+          si++ )
+    {
+        const pair<int, Example> & p = examples[*si];
+        const Example & ex = p.second;
+
+        NICE::Vector pixelRepr (amountParams, 1.0);
+        f->getFeatureVector( &ex, pixelRepr );
+
+        double label = p.first;
+        pixelRepr *= ex.weight;
+
+        w.set    ( matIndex, ex.weight );
+        y.set    ( matIndex, label );
+        X.setRow ( matIndex, pixelRepr );
+
+        matIndex++;
+    }
+
+}
+
+void DTBObliqueLS::regularizeDataMatrix(
+        const NICE::Matrix &X,
+        NICE::Matrix &XTXreg,
+        const int regOption,
+        const double lambda )
+{
+    XTXreg = X.transpose()*X;
+    NICE::Matrix R;
+    const int dim = X.cols();
+
+    switch (regOption)
+    {
+        // identity matrix
+        case 0:
+            R.resize(dim,dim);
+            R.setIdentity();
+            R *= lambda;
+            XTXreg += R;
+            break;
+
+        // differences operator, k=1
+        case 1:
+            R.resize(dim-1,dim);
+            R.set( 0.0 );
+            for ( int r = 0; r < dim-1; r++ )
+            {
+                R(r,r)   =  1.0;
+                R(r,r+1) = -1.0;
+            }
+            R = R.transpose()*R;
+            R *= lambda;
+            XTXreg += R;
+            break;
+
+        // difference operator, k=2
+        case 2:
+            R.resize(dim-2,dim);
+            R.set( 0.0 );
+            for ( int r = 0; r < dim-2; r++ )
+            {
+                R(r,r)   =  1.0;
+                R(r,r+1) = -2.0;
+                R(r,r+2) =  1.0;
+            }
+            R = R.transpose()*R;
+            R *= lambda;
+            XTXreg += R;
+            break;
+
+        // as in [Chen et al., 2012]
+        case 3:
+        {
+            NICE::Vector q ( dim, (1.0-lambda) );
+            q[0] = 1.0;
+            NICE::Matrix Q;
+            Q.tensorProduct(q,q);
+            R.resize(dim,dim);
+            for ( int r = 0; r < dim; r++ )
+            {
+                for ( int c = 0; c < dim; c++ )
+                    R(r,c) = XTXreg(r,c) * Q(r,c);
+
+                R(r,r) = q[r] * XTXreg(r,r);
+            }
+
+            XTXreg = R;
+            break;
+        }
+
+        // no regularization
+        default:
+            std::cerr << "DTBObliqueLS::regularizeDataMatrix: No regularization applied!"
+                      << std::endl;
+            break;
+    }
+}
+
+void DTBObliqueLS::findBestSplitThreshold (
+        FeatureValuesUnsorted &values,
+        SplitInfo &bestSplitInfo,
+        const NICE::Vector &params,
+        const double &e,
+        const int &maxClassNo )
+{
+    double *distribution_left = new double [maxClassNo+1];
+    double *distribution_right = new double [maxClassNo+1];
+    double minValue = (min_element ( values.begin(), values.end() ))->first;
+    double maxValue = (max_element ( values.begin(), values.end() ))->first;
+
+    if ( maxValue - minValue < 1e-7 )
+        std::cerr << "DTBObliqueLS: Difference between min and max of features values to small!"
+                  << " [" << minValue << "," << maxValue << "]" << std::endl;
+
+    // get best thresholds using complete search
+    for ( int i = 0; i < splitSteps; i++ )
+    {
+        double threshold = (i * (maxValue - minValue ) / (double)splitSteps)
+                            + minValue;
+        // preparations
+        double el, er;
+        for ( int k = 0 ; k <= maxClassNo ; k++ )
+        {
+            distribution_left[k] = 0.0;
+            distribution_right[k] = 0.0;
+        }
+
+        /** Test the current split */
+        // Does another split make sense?
+        double count_left;
+        double count_right;
+        if ( ! entropyLeftRight ( values, threshold,
+                                  distribution_left, distribution_right,
+                                  el, er, count_left, count_right, maxClassNo ) )
+            continue;
+
+        // information gain and entropy
+        double pl = (count_left) / (count_left + count_right);
+        double ig = e - pl*el - (1-pl)*er;
+
+        if ( useShannonEntropy )
+        {
+            double esplit = - ( pl*log(pl) + (1-pl)*log(1-pl) );
+            ig = 2*ig / ( e + esplit );
+        }
+
+        if ( ig > bestSplitInfo.informationGain )
+        {
+            bestSplitInfo.informationGain = ig;
+            bestSplitInfo.threshold = threshold;
+            bestSplitInfo.params = params;
+
+            for ( int k = 0 ; k <= maxClassNo ; k++ )
+            {
+                bestSplitInfo.distLeft[k] = distribution_left[k];
+                bestSplitInfo.distRight[k] = distribution_right[k];
+            }
+            bestSplitInfo.entropyLeft = el;
+            bestSplitInfo.entropyRight = er;
+        }
+    }
+
+    //cleaning up
+    delete [] distribution_left;
+    delete [] distribution_right;
+}
+
+/** recursive building method */
+DecisionNode *DTBObliqueLS::buildRecursive(
+        const FeaturePool & fp,
+        const Examples & examples,
+        std::vector<int> & examples_selection,
+        FullVector & distribution,
+        double e,
+        int maxClassNo,
+        int depth,
+        double lambdaCurrent )
+{
+
+    std::cerr << "DTBObliqueLS: Examples: " << (int)examples_selection.size()
+              << ", Depth: " << (int)depth << ", Entropy: " << e << std::endl;
+
+    // initialize new node
+    DecisionNode *node = new DecisionNode ();
+    node->distribution = distribution;
+
+    // stop criteria: maxDepth, minExamples, min_entropy
+    if (    ( e <= minimumEntropy )
+//         || ( (int)examples_selection.size() < minExamples )
+         || ( depth > maxDepth ) )
+
+    {
+#ifdef DEBUGTREE
+        std::cerr << "DTBObliqueLS: Stopping criteria applied!" << std::endl;
+#endif
+        node->trainExamplesIndices = examples_selection;
+        return node;
+    }
+
+    // variables
+    FeatureValuesUnsorted values;
+    SplitInfo bestSplitInfo;
+    bestSplitInfo.threshold = 0.0;
+    bestSplitInfo.informationGain = -1.0;
+    bestSplitInfo.distLeft = new double [maxClassNo+1];
+    bestSplitInfo.distRight = new double [maxClassNo+1];
+    bestSplitInfo.entropyLeft = 0.0;
+    bestSplitInfo.entropyRight = 0.0;
+
+    ConvolutionFeature *f = (ConvolutionFeature*)fp.begin()->second;
+    bestSplitInfo.params = f->getParameterVector();
+
+    // Creating data matrix X and label vector y
+    NICE::Matrix X;
+    NICE::Vector y, params, weights;
+    getDataAndLabel( fp, examples, examples_selection, X, y, weights );
+
+    // Transforming into multi-class problem
+    bool hasExamples = false;
+    NICE::Vector yCur;
+    NICE::Matrix XCur;
+
+    while ( !hasExamples )
+    {
+        int posClass, negClass;
+
+        posClass = rand() % (maxClassNo+1);
+        negClass = posClass;
+
+        while ( posClass == negClass )
+            negClass = rand() % (maxClassNo+1);
+
+        yCur = y;
+        XCur = X;
+
+        hasExamples = adaptDataAndLabelForMultiClass(
+            posClass, negClass, XCur, yCur );
+    }
+
+    yCur *= weights;
+
+    // Preparing system of linear equations
+    NICE::Matrix XTXr, G, temp;
+    regularizeDataMatrix( XCur, XTXr, regularizationType, lambdaCurrent );
+    choleskyDecomp(XTXr, G);
+    choleskyInvert(G, XTXr);
+    temp = XTXr * XCur.transpose();
+
+    // Solve system of linear equations in a least squares manner
+    params.multiply(temp,yCur,false);
+
+    // Updating parameter vector in convolutional feature
+    f->setParameterVector( params );
+
+    // Feature Values
+    values.clear();
+    f->calcFeatureValues( examples, examples_selection, values);
+
+    // complete search for threshold
+    findBestSplitThreshold ( values, bestSplitInfo, params, e, maxClassNo );
+
+//    f->setRandomParameterVector();
+//    params = f->getParameterVector();
+//    f->calcFeatureValues( examples, examples_selection, values);
+//    findBestSplitThreshold ( values, bestSplitInfo, params, e, maxClassNo );
+
+    // supress strange behaviour for values near zero (8.88178e-16)
+    if (bestSplitInfo.entropyLeft < 1.0e-10 ) bestSplitInfo.entropyLeft = 0.0;
+    if (bestSplitInfo.entropyRight < 1.0e-10 ) bestSplitInfo.entropyRight = 0.0;
+
+    // stop criteria: minimum information gain
+    if ( bestSplitInfo.informationGain < minimumInformationGain )
+    {
+#ifdef DEBUGTREE
+        std::cerr << "DTBObliqueLS: Minimum information gain reached!" << std::endl;
+#endif
+        delete [] bestSplitInfo.distLeft;
+        delete [] bestSplitInfo.distRight;
+        node->trainExamplesIndices = examples_selection;
+        return node;
+    }
+
+    /** Save the best split to current node */
+    f->setParameterVector( bestSplitInfo.params );
+    values.clear();
+    f->calcFeatureValues( examples, examples_selection, values);
+    node->f = f->clone();
+    node->threshold = bestSplitInfo.threshold;
+
+    /** Split examples according to best split function */
+    vector<int> examples_left;
+    vector<int> examples_right;
+
+    examples_left.reserve ( values.size() / 2 );
+    examples_right.reserve ( values.size() / 2 );
+    for ( FeatureValuesUnsorted::const_iterator i = values.begin();
+          i != values.end(); i++ )
+    {
+        if ( i->first < bestSplitInfo.threshold )
+            examples_left.push_back ( i->third );
+        else
+            examples_right.push_back ( i->third );
+    }
+
+#ifdef DEBUGTREE
+//    node->f->store( std::cerr );
+//    std::cerr << std::endl;
+    std::cerr << "DTBObliqueLS: Information Gain: " << bestSplitInfo.informationGain
+              << ", Left Entropy: " <<  bestSplitInfo.entropyLeft << ", Right Entropy: "
+              << bestSplitInfo.entropyRight << std::endl;
+#endif
+
+    FullVector distribution_left_sparse ( distribution.size() );
+    FullVector distribution_right_sparse ( distribution.size() );
+    for ( int k = 0 ; k <= maxClassNo ; k++ )
+    {
+        double l = bestSplitInfo.distLeft[k];
+        double r = bestSplitInfo.distRight[k];
+        if ( l != 0 )
+            distribution_left_sparse[k] = l;
+        if ( r != 0 )
+            distribution_right_sparse[k] = r;
+//#ifdef DEBUGTREE
+//        std::cerr << "DTBObliqueLS: Split of Class " << k << " ("
+//                  << l << " <-> " << r << ") " << std::endl;
+//#endif
+    }
+
+    delete [] bestSplitInfo.distLeft;
+    delete [] bestSplitInfo.distRight;
+
+    // update lambda by heuristic [Laptev/Buhmann, 2014]
+    double lambdaLeft, lambdaRight;
+
+    if (useDynamicRegularization)
+    {
+        lambdaLeft = lambdaCurrent *
+            pow(((double)examples_selection.size()/(double)examples_left.size()),(2./f->getParameterLength()));
+        lambdaRight = lambdaCurrent *
+            pow(((double)examples_selection.size()/(double)examples_right.size()),(2./f->getParameterLength()));
+    }
+    else
+    {
+        lambdaLeft = lambdaCurrent;
+        lambdaRight = lambdaCurrent;
+    }
+
+
+    /** Recursion */
+    // left child
+    node->left  = buildRecursive ( fp, examples, examples_left,
+                                   distribution_left_sparse, bestSplitInfo.entropyLeft,
+                                   maxClassNo, depth+1, lambdaLeft );
+    // right child
+    node->right = buildRecursive ( fp, examples, examples_right,
+                                   distribution_right_sparse, bestSplitInfo.entropyRight,
+                                   maxClassNo, depth+1, lambdaRight );
+
+    return node;
+}
+
+/** initial building method */
+DecisionNode *DTBObliqueLS::build ( const FeaturePool & fp,
+                                        const Examples & examples,
+                                        int maxClassNo )
+{
+    int index = 0;
+
+    FullVector distribution ( maxClassNo+1 );
+    vector<int> all;
+
+    all.reserve ( examples.size() );
+    for ( Examples::const_iterator j = examples.begin();
+          j != examples.end(); j++ )
+    {
+        int classno = j->first;
+        distribution[classno] += j->second.weight;
+
+        all.push_back ( index );
+        index++;
+    }
+
+    double entropy = 0.0;
+    double sum = 0.0;
+    for ( int i = 0 ; i < distribution.size(); i++ )
+    {
+        double val = distribution[i];
+        if ( val <= 0.0 ) continue;
+        entropy -= val*log(val);
+        sum += val;
+    }
+    entropy /= sum;
+    entropy += log(sum);
+
+    return buildRecursive ( fp, examples, all, distribution,
+                            entropy, maxClassNo, 0, lambdaInit );
+}

+ 209 - 0
classifier/fpclassifier/randomforest/DTBObliqueLS.h

@@ -0,0 +1,209 @@
+/**
+ * @file DTBObliqueLS.h
+ * @brief oblique decision tree
+ * @author Sven Sickert
+ * @date 10/15/2014
+
+*/
+#ifndef DTBOBLIQUELSINCLUDE
+#define DTBOBLIQUELSINCLUDE
+
+#include "core/vector/VectorT.h"
+#include "core/vector/MatrixT.h"
+
+#include "core/basics/Config.h"
+#include "DecisionTreeBuilder.h"
+#include "vislearning/cbaselib/CachedExample.h"
+
+
+namespace OBJREC {
+
+struct SplitInfo {
+    double threshold;
+    double informationGain;
+    double entropyLeft;
+    double entropyRight;
+    double *distLeft;
+    double *distRight;
+    NICE::Vector params;
+};
+
+/** random oblique decision tree */
+class DTBObliqueLS : public DecisionTreeBuilder
+{
+  protected:
+
+    /////////////////////////
+    /////////////////////////
+    // PROTECTED VARIABLES //
+    /////////////////////////
+    /////////////////////////
+
+    /** Whether to use shannon entropy or not */
+    bool useShannonEntropy;
+
+    /** Whether to save indices in leaves or not */
+    bool saveIndices;
+
+    /** Whether to use one-vs-one or one-vs-all for multiclass scenarios */
+    bool useOneVsOne;
+
+    /** Whether to increase the influence of regularization over time or not */
+    bool useDynamicRegularization;
+
+    /** Amount of steps for complete search for best threshold */
+    int splitSteps;
+
+    /** Maximum allowed depth of a tree */
+    int maxDepth;
+
+    /* Minimum amount of features in a leaf node */
+    int minExamples;
+
+    /** Regularization type */
+    int regularizationType;
+
+    /** Minimum entropy to continue with splitting */
+    double minimumEntropy;
+
+    /** Minimum information gain to continue with splitting */
+    double minimumInformationGain;
+
+    /** Regularization parameter */
+    double lambdaInit;
+
+    /////////////////////////
+    /////////////////////////
+    //  PROTECTED METHODS  //
+    /////////////////////////
+    /////////////////////////
+
+    /**
+     * @brief adaptDataAndLabelForMultiClass
+     * @param posClass positive class number
+     * @param negClass negative class number
+     * @param matX adapted data matrix
+     * @param vecY adapted label vector
+     * @param weights example weights
+     * @return whether positive and negative classes have examples or not
+     */
+    bool adaptDataAndLabelForMultiClass (
+            const int posClass,
+            const int negClass,
+            NICE::Matrix & matX,
+            NICE::Vector & vecY );
+
+    /**
+    * @brief get data matrix X and label vector y
+    * @param fp feature pool
+    * @param examples all examples of the training
+    * @param examples_selection indeces of selected example subset
+    * @param matX data matrix (amountExamples x amountParameters)
+    * @param vecY label vector (amountExamples)
+    */
+    void getDataAndLabel(
+            const FeaturePool &fp,
+            const Examples &examples,
+            const std::vector<int> & examples_selection,
+            NICE::Matrix &X,
+            NICE::Vector &y,
+            NICE::Vector &w );
+
+    /**
+     * @brief return a regularization matrix of size (dimParams)x(dimParams)
+     * @param X data matrix
+     * @param XTXreg return regularized X'*X
+     * @param regOption which kind of regularization
+     * @param lambda regularization parameter (weigthing)
+     */
+    void regularizeDataMatrix (
+            const NICE::Matrix & X,
+            NICE::Matrix &XTXreg,
+            const int regOption,
+            const double lambda );
+
+    /**
+     * @brief find best threshold for current splitting
+     * @param values feature values
+     * @param bestSplitInfo struct including best split information
+     * @param e entropy before split
+     * @param maxClassNo maximum class number
+     */
+    void findBestSplitThreshold (
+            FeatureValuesUnsorted & values,
+            SplitInfo & bestSplitInfo,
+            const NICE::Vector & params,
+            const double & e,
+            const int & maxClassNo );
+
+    /**
+     * @brief recursive building method
+     * @param fp feature pool
+     * @param examples all examples of the training
+     * @param examples_selection indeces of selected example subset
+     * @param distribution class distribution in current node
+     * @param entropy current entropy
+     * @param maxClassNo maximum class number
+     * @param depth current depth
+     * @return Pointer to root/parent node
+     */
+    DecisionNode *buildRecursive (
+           const FeaturePool & fp,
+           const Examples & examples,
+           std::vector<int> & examples_selection,
+           FullVector & distribution,
+           double entropy,
+           int maxClassNo,
+           int depth,
+           double curLambda );
+
+    /**
+     * @brief compute entropy for left and right child
+     * @param values feature values
+     * @param threshold threshold for split
+     * @param stat_left statistics for left child
+     * @param stat_right statistics for right child
+     * @param entropy_left entropy for left child
+     * @param entropy_right entropy for right child
+     * @param count_left amount of features in left child
+     * @param count_right amount of features in right child
+     * @param maxClassNo maximum class number
+     * @return whether another split is possible or not
+     */
+    bool entropyLeftRight ( const FeatureValuesUnsorted & values,
+           double threshold,
+           double* stat_left,
+           double* stat_right,
+           double & entropy_left,
+           double & entropy_right,
+           double & count_left,
+           double & count_right,
+           int maxClassNo );
+
+  public:
+
+    /** simple constructor */
+    DTBObliqueLS ( const NICE::Config *conf,
+                       std::string section = "DTBObliqueLS" );
+
+    /** simple destructor */
+    virtual ~DTBObliqueLS();
+
+    /**
+     * @brief initial building method
+     * @param fp feature pool
+     * @param examples all examples of the training
+     * @param maxClassNo maximum class number
+     * @return Pointer to root/parent node
+     */
+    DecisionNode *build ( const FeaturePool &fp,
+                          const Examples &examples,
+                          int maxClassNo );
+
+};
+
+
+
+} //namespace
+
+#endif

+ 1 - 1
classifier/fpclassifier/randomforest/DTBRandom.cpp

@@ -8,7 +8,7 @@
 #include <iostream>
 #include <time.h>
 
-#include "vislearning/classifier/fpclassifier/randomforest/DTBRandom.h"
+#include "DTBRandom.h"
 
 using namespace OBJREC;
 

+ 27 - 17
classifier/fpclassifier/randomforest/FPCRandomForests.cpp

@@ -19,6 +19,7 @@
 #include "vislearning/classifier/fpclassifier/randomforest/DTBStandard.h"
 #include "vislearning/classifier/fpclassifier/randomforest/DTBRandom.h"
 #include "vislearning/classifier/fpclassifier/randomforest/DTBClusterRandom.h"
+#include "vislearning/classifier/fpclassifier/randomforest/DTBObliqueLS.h"
 #include "vislearning/cbaselib/FeaturePool.h"
 
 using namespace OBJREC;
@@ -64,6 +65,8 @@ FPCRandomForests::FPCRandomForests(const Config *_conf, std::string section) : c
 			builder = new DTBRandom ( conf, builder_section );
 		else if (builder_method == "cluster_random" )
 			builder = new DTBClusterRandom ( conf, builder_section );
+        else if (builder_method == "oblique_ls" )
+            builder = new DTBObliqueLS ( conf, builder_section );
 		else {
 			fprintf (stderr, "DecisionTreeBuilder %s not yet implemented !\n", builder_method.c_str() );
 			exit(-1);
@@ -226,7 +229,7 @@ int FPCRandomForests::classify_optimize(Example & pce)
 
 void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 {
-	cerr << "FPCRandomForests::train()" << endl;
+
 	assert(builder != NULL);
 
 	if (maxClassNo < 0)
@@ -247,9 +250,8 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 
 	if (weight_examples)
 	{
-		for (Examples::iterator i = examples.begin();
-							i != examples.end();
-							i++, index++)
+        for ( Examples::iterator i = examples.begin();
+             i != examples.end(); i++, index++ )
 			i->second.weight = examples.size() / example_distribution[i->first];
 	}
 
@@ -269,7 +271,7 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 		}
 	}
 
-	fprintf(stderr, "FPCRandomForests: minimum number of examples: %f (classno: %d)\n", minExamples, minExamplesClassNo);
+    fprintf(stderr, "FPCRandomForests: minimum number of examples: %d (classno: %d)\n", (int)minExamples, minExamplesClassNo);
 
 	int featuresCount = (int)(fp.size() * features_per_tree);
 	fprintf(stderr, "FPCRandomForests: number of features %d\n", (int)fp.size());
@@ -286,7 +288,7 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 #pragma omp parallel for
 	for (int k = 0 ; k < number_of_trees ; k++)
 	{
-		fprintf(stderr, "[ -- building tree %d/%d -- ]\n", k + 1, number_of_trees);
+        fprintf(stderr, "FPCRandomForests: [ -- building tree %d/%d -- ]\n", k + 1, number_of_trees);
 
 		FeaturePool fp_subset;
 		Examples examples_subset;
@@ -302,7 +304,7 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 			else
 				trainingExamples = (int)(examples_index.size() * samples_per_tree);
 		
-			fprintf (stderr, "FPCRandomForests: selection of %d examples for each tree\n", trainingExamples );
+            fprintf (stderr, "FPCRandomForests: selection of %d examples for each tree (classno: %d)\n", trainingExamples, j->first );
 		
 			if ( (trainingExamples < 3) && ((int)examples_index.size() > trainingExamples) )
 			{
@@ -323,7 +325,6 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 				examples_subset.push_back(examples[examples_index[e]]);
 				exselection[k].push_back(examples_index[e]);
 			}
-			
 
 			// set out of bag trees
 			for (uint e = trainingExamples; e < examples_index.size() ; e++)
@@ -356,7 +357,8 @@ void FPCRandomForests::train(FeaturePool & fp, Examples & examples)
 		/******* training of an individual tree ****/
 		DecisionTree *tree = new DecisionTree(conf, maxClassNo);
 
-		builder->build(*tree, fp_subset, examples_subset, maxClassNo);
+        #pragma omp critical
+        builder->build(*tree, fp_subset, examples_subset, maxClassNo);
 
 		/******* prune tree using a simple minimum entropy criterion *****/
 		if (minimum_entropy != 0.0)
@@ -397,20 +399,26 @@ void FPCRandomForests::restore(istream & is, int format)
     std::string tag;
     int index;
 
-    while ( (is >> tag) && (tag == "TREE") )
+    if( (is >> tag) && (tag == "FOREST") )
     {
-		is >> index;
-		DecisionTree *dt = new DecisionTree ( conf, maxClassNo );
-		dt->restore ( is );
-		if ( minimum_entropy != 0.0 )
-			dt->pruneTreeEntropy ( minimum_entropy );
 
-		forest.push_back(dt);
-	}
+        while ( (is >> tag) && (tag == "TREE") )
+        {
+            is >> index;
+            DecisionTree *dt = new DecisionTree ( conf, maxClassNo );
+            dt->restore ( is );
+            if ( minimum_entropy != 0.0 )
+                dt->pruneTreeEntropy ( minimum_entropy );
+
+            forest.push_back(dt);
+        }
+
+    }
 }
 
 void FPCRandomForests::store(ostream & os, int format) const
 {
+    os << "FOREST " << endl;
     int index = 0;
     for ( vector<DecisionTree *>::const_iterator i = forest.begin();
 					    i != forest.end();
@@ -421,6 +429,8 @@ void FPCRandomForests::store(ostream & os, int format) const
 		dt.store ( os, format );
 		os << "ENDTREE ";
     }
+    os << endl;
+    os << "ENDFOREST " << endl;
 }
 
 void FPCRandomForests::clear()

+ 3 - 3
classifier/fpclassifier/randomforest/FPCRandomForests.h

@@ -116,13 +116,13 @@ class FPCRandomForests : public FeaturePoolClassifier
     const std::vector<DecisionTree *> & getForest () const
     {
       return forest;
-    };
+    }
 
     /** direct write access to all trees */
     std::vector<DecisionTree *> & getForestNonConst ()
     {
       return forest;
-    };
+    }
 
     /** clone this object */
     FeaturePoolClassifier *clone () const;
@@ -131,7 +131,7 @@ class FPCRandomForests : public FeaturePoolClassifier
     std::vector<std::pair<double, int> > & getOutOfBagResults ()
     {
       return oobResults;
-    };
+    }
 
     /** set the number of trees */
     void setComplexity ( int size );

+ 1 - 1
classifier/fpclassifier/randomforest/libdepend.inc

@@ -1,4 +1,4 @@
-$(call PKG_DEPEND_EXT,OPENMP)
+# $(call PKG_DEPEND_EXT,OPENMP)
 $(call PKG_DEPEND_INT,vislearning/classifier/classifierbase)
 $(call PKG_DEPEND_INT,vislearning/features/fpfeatures)
 $(call PKG_DEPEND_INT,vislearning/optimization/mapestimation)

+ 1 - 1
classifier/genericClassifierSelection.h

@@ -40,7 +40,7 @@
 #include "vislearning/classifier/fpclassifier/randomforest/FPCRandomForests.h"
 #include "vislearning/classifier/fpclassifier/randomforest/FPCDecisionTree.h"
 #include "vislearning/classifier/fpclassifier/logisticregression/FPCSMLR.h"
-#include "vislearning/classifier/fpclassifier/FPCGPHIK.h"
+#include "vislearning/classifier/fpclassifier/gphik/FPCGPHIK.h"
 
 //vislearning -- classifier combinations
 #include "vislearning/classifier/classifiercombination/VCPreRandomForest.h"

+ 7 - 7
classifier/kernelclassifier/LaplaceApproximation.cpp

@@ -6,12 +6,12 @@
 
 */
 #include <iostream>
+#include <cmath>
 
 #include "core/vector/Algorithms.h"
 #include "LaplaceApproximation.h"
 #include "LHCumulativeGauss.h"
 
-using namespace std;
 using namespace NICE;
 using namespace OBJREC;
 
@@ -24,7 +24,7 @@ LaplaceApproximation::LaplaceApproximation()
 	verbose = false;
 }
 
-LaplaceApproximation::LaplaceApproximation( const Config *conf, const string & section )
+LaplaceApproximation::LaplaceApproximation( const Config *conf, const std::string & section )
 {
 	maxiterations = conf->gI(section, "laplace_max_iterations", 40 );
 	minimumDelta = conf->gD(section, "laplace_minimum_delta", 1e-14 );
@@ -110,7 +110,7 @@ void LaplaceApproximation::approximate ( KernelData *kernelData, const Vector &
 
 		// gradient of the objective function is gradientL - a
 		if ( verbose )
-			cerr << "findModeLaplace: gradient norm = " << (gradientL - a).normL2() << endl;
+			std::cerr << "findModeLaplace: gradient norm = " << (gradientL - a).normL2() << std::endl;
 
 		mode = kernelMatrix * a;
 	
@@ -126,8 +126,8 @@ void LaplaceApproximation::approximate ( KernelData *kernelData, const Vector &
 		{
 			if ( verbose ) 
 			{
-				cerr << "findModeLaplace: log likelihood is positive infinity...we cannot optimize any more in this case." << endl;
-				cerr << "findModeLaplace: mode = " << mode << endl;
+				std::cerr << "findModeLaplace: log likelihood is positive infinity...we cannot optimize any more in this case." << std::endl;
+				std::cerr << "findModeLaplace: mode = " << mode << std::endl;
 			}
 			break;
 		}
@@ -145,11 +145,11 @@ void LaplaceApproximation::approximate ( KernelData *kernelData, const Vector &
 		objective += loglikelihood;
 
 		if ( verbose ) 
-			cerr << "findModeLaplace: objective = " << objective << endl;
+			std::cerr << "findModeLaplace: objective = " << objective << std::endl;
 
 		double delta = fabs(oldobjective-objective)/(fabs(objective)+1);
 		if ( verbose )
-			cerr << "findModeLaplace: delta = " << delta << endl;
+			std::cerr << "findModeLaplace: delta = " << delta << std::endl;
 		if ( delta < minimumDelta ) {
 			break;
 		}

+ 2 - 2
classifier/kernelclassifier/libdepend.inc

@@ -1,5 +1,5 @@
-$(call PKG_DEPEND_EXT,SVMLIGHT)
-$(call PKG_DEPEND_EXT,OPENMP)
+# $(call PKG_DEPEND_EXT,SVMLIGHT)
+# $(call PKG_DEPEND_EXT,OPENMP)
 $(call PKG_DEPEND_INT,core/optimization)
 $(call PKG_DEPEND_INT,vislearning/cbaselib)
 $(call PKG_DEPEND_INT,vislearning/classifier/classifierbase)

+ 11 - 5
classifier/kernelclassifier/tests/TestLaplace.cpp

@@ -86,8 +86,9 @@ void TestLaplace::testCumGaussPredict()
 	}
 
 	sum /= numSteps;
-    
-	CPPUNIT_ASSERT_DOUBLES_EQUAL(sum, l.predictAnalytically( mean, variance ), 1e-2);
+
+    double fActual= l.predictAnalytically( mean, variance );
+    CPPUNIT_ASSERT_DOUBLES_EQUAL(sum, fActual , 1e-2);
 }
 
 void TestLaplace::testHyperParameterOptGradients()
@@ -136,7 +137,8 @@ void TestLaplace::testHyperParameterOptGradients()
         double obj = gp_problem.computeObjective ( );
 
         Vector gradient;
-        gp_problem.computeGradient( gradient );
+        CPPUNIT_ASSERT_NO_THROW(gp_problem.computeGradient( gradient ) );
+
         double deriv = gradient[0];
 
         if ( ! firstIteration )
@@ -205,7 +207,10 @@ void TestLaplace::testModeFinding()
 		const Vector & mode = laplaceApproximation.getMode();
 
 		for ( uint k = 0 ; k < mode.size(); k++ )
-			CPPUNIT_ASSERT_DOUBLES_EQUAL( 0.541132, y[k] * mode[k], 1e-5 );
+        {
+            double fActual = y[k] * mode[k];
+            CPPUNIT_ASSERT_DOUBLES_EQUAL( 0.541132, fActual, 1e-5 );
+        }
 	}
 	
 	/** Test 2: Check the self-consistent equation of the Mode */
@@ -233,7 +238,8 @@ void TestLaplace::testModeFinding()
 
 		// the mode should obey the following equation: mode = K gradLikelihood 
 		// (cf. eq. 3.17 Rasmussen and Williams)
-		CPPUNIT_ASSERT_DOUBLES_EQUAL ( 0.0, ( mode - K * gradLikelihood ).normL2(), 1e-15 );
+        double fActual =  ( mode - K * gradLikelihood ).normL2();
+        CPPUNIT_ASSERT_DOUBLES_EQUAL ( 0.0,fActual, 1e-10 );
 	}
 }
 

+ 2 - 2
classifier/libdepend.inc

@@ -1,5 +1,5 @@
 # we need both statements
 # (1) set the flag (see Makefile.config)
-$(call PKG_DEPEND_EXT,NICEDTSVM)
+# $(call PKG_DEPEND_EXT,NICEDTSVM)
 # (2) build in a dependent manner
-$(call PKG_DEPEND_INT,nice-dtsvm)
+# $(call PKG_DEPEND_INT,nice-dtsvm)

+ 5 - 0
classifier/progs/toyExampleUnsupervisedGP.cpp

@@ -83,7 +83,12 @@ void markBoundary ( const NICE::Image & imgclassno, NICE::Image & mark )
 */
 int main (int argc, char **argv)
 {   
+#ifndef __clang__
+#ifndef __llvm__
     std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
+
 
     Config conf ( argc, argv );
     conf.store(cout);

+ 2 - 2
classifier/vclassifier/libdepend.inc

@@ -1,4 +1,4 @@
-$(call PKG_DEPEND_EXT,ICE)
-$(call PKG_DEPEND_EXT,SVMLIGHT)
+# $(call PKG_DEPEND_EXT,ICE)
+# $(call PKG_DEPEND_EXT,SVMLIGHT)
 $(call PKG_DEPEND_INT,vislearning/cbaselib)
 $(call PKG_DEPEND_INT,vislearning/classifier/classifierbase)

+ 5 - 1
featureLearning/progs/testFeatureLearning.cpp

@@ -38,7 +38,11 @@ using namespace OBJREC;
 */
 int main( int argc, char **argv )
 {
+#ifndef __clang__
+#ifndef __llvm__
   std::set_terminate( __gnu_cxx::__verbose_terminate_handler );
+#endif
+#endif
 
   Config * conf = new Config ( argc, argv );
   
@@ -343,4 +347,4 @@ int main( int argc, char **argv )
     delete conf;
   
    return 0;
-}
+}

+ 304 - 0
features/fpfeatures/ConvolutionFeature.cpp

@@ -0,0 +1,304 @@
+/**
+* @file ConvolutionFeature.cpp
+* @brief convolutional feature
+* @author Sven Sickert
+* @date 10/13/2008
+
+*/
+#include <iostream>
+#include <time.h>
+
+#include "ConvolutionFeature.h"
+#include "vislearning/cbaselib/FeaturePool.h"
+
+using namespace OBJREC;
+
+using namespace NICE;
+
+/* Convolutional feature consists of shift parameter params[0] and the
+   convolutional mask, which is stored in the rest of the parameter vector
+   params */
+
+
+/** simple constructor */
+ConvolutionFeature::ConvolutionFeature ( )
+{
+    window_size_x = 15;
+    window_size_y = 15;
+    isColor = false;
+    useSpatialPriors = false;
+
+    initializeParameterVector();
+}
+
+/** alternative constructor */
+ConvolutionFeature::ConvolutionFeature (
+        const int wsize_x,
+        const int wsize_y,
+        const bool color,
+        const bool prior )
+{
+    window_size_x = wsize_x;
+    window_size_y = wsize_y;
+    isColor = color;
+    useSpatialPriors = prior;
+
+    initializeParameterVector();
+}
+
+/** default constructor */
+ConvolutionFeature::ConvolutionFeature ( const Config *conf )
+{
+    std::string section = "ConvolutionFeature";
+    window_size_x = conf->gI ( section, "window_size_x", 15 );
+    window_size_y = conf->gI ( section, "window_size_y", 15 );
+    isColor = conf->gB ( section, "is_color", false );
+    useSpatialPriors = conf->gB ( section, "use_spatial_priors", false );
+
+    initializeParameterVector();
+}
+
+/** copy constructor */
+ConvolutionFeature::ConvolutionFeature ( const ConvolutionFeature *confFeat )
+{
+    window_size_x = confFeat->window_size_x;
+    window_size_y = confFeat->window_size_y;
+    paramsLength = confFeat->paramsLength;
+    isColor = confFeat->isColor;
+    useSpatialPriors = confFeat->useSpatialPriors;
+    numChannels = confFeat->numChannels;
+    params = new NICE::Vector( paramsLength, 0.0 );
+
+    int i = 0;
+    for ( NICE::Vector::iterator it = confFeat->params->begin();
+          it != confFeat->params->end(); ++it, i++ )
+    {
+        params[i] = *it;
+    }
+}
+
+/** simple destructor */
+ConvolutionFeature::~ConvolutionFeature ( )
+{
+    if ( params != NULL)
+        delete params;
+}
+
+
+/** (re)initialize parameter vector */
+void ConvolutionFeature::initializeParameterVector()
+{
+    if (window_size_x > 0 && window_size_y > 0)
+    {
+        if (isColor)
+            numChannels = 3;
+        else
+            numChannels = 1;
+
+        paramsLength = numChannels*window_size_x*window_size_y + 1;
+
+        if (useSpatialPriors) paramsLength += 2;
+
+        params = new NICE::Vector( paramsLength, (1.0/(double)(paramsLength-1) ) );
+        params[0] = 1;
+    }
+    else
+        std::cerr << "ConvolutionFeature::initializeVector: Size of window is Zero! Could not initialize..."
+                  << std::endl;
+}
+
+bool ConvolutionFeature::isColorMode() const
+{
+    return isColor;
+}
+
+/** return parameter vector */
+NICE::Vector ConvolutionFeature::getParameterVector() const
+{
+    NICE::Vector res = (*this->params);
+    return res;
+}
+
+/** return feature vector */
+void ConvolutionFeature::getFeatureVector(
+        const Example *example,
+        NICE::Vector & vec ) const
+{
+    NICE::MultiChannelImageT<double> * imgD = NULL;
+    imgD = & example->ce->getDChannel( CachedExample::D_EOH );
+    double** data = imgD->getDataPointer();
+
+    int xsize, ysize;
+    example->ce->getImageSize( xsize, ysize );
+
+    const int x = example->x;
+    const int y = example->y;
+    const int halfwsx = std::floor ( window_size_x / 2 );
+    const int halfwsy = std::floor ( window_size_y / 2 );
+    //const int step = window_size_x*window_size_y;
+
+    int k = 1;
+    for ( int c = 0; c < numChannels; c++)
+        for ( int v = -halfwsy; v <= halfwsy; v++ )
+            for ( int u = -halfwsx; u <= halfwsx; u++, k++ )
+            {
+                int uu = u;
+                int vv = v;
+                if (x+u < 0 || x+u >= xsize) uu=-u;
+                if (y+v < 0 || y+v >= ysize) vv=-v;
+
+                //vec[k] = imgD->get(x+uu,y+vv,c);
+                vec[k] = data[c][(x+uu)+(y+vv)*xsize];
+
+            }
+
+    if (useSpatialPriors)
+    {
+        vec[paramsLength-2] = (double)x/(double)xsize;
+        vec[paramsLength-1] = (double)y/(double)ysize;
+    }
+
+}
+
+/** return length of parameter vector */
+int ConvolutionFeature::getParameterLength() const
+{
+    return paramsLength;
+}
+
+void ConvolutionFeature::setRandomParameterVector ( )
+{
+    srand (time(NULL));
+    for ( NICE::Vector::iterator it = params->begin();
+          it != params->end(); ++it )
+    {
+        double b = (double) rand() / (double) RAND_MAX;
+        *it = b;
+    }
+    params->normalizeL2();
+}
+
+/** set parameter vector */
+void ConvolutionFeature::setParameterVector( const Vector & vec )
+{
+    if ( params->size() == vec.size() )
+    {
+        int i = 0;
+        for ( NICE::Vector::iterator it = params->begin();
+              it != params->end(); ++it, i++ )
+        {
+            *it = vec[i];
+        }
+        params->normalizeL2();
+    }
+    else
+        std::cerr << "ConvolutionFeature::setParameterVector: Vector sizes do not match!"
+                  << " expected: " << params->size() << ", got: " << vec.size()
+                  << std::endl;
+
+}
+
+/** return feature value */
+double ConvolutionFeature::val ( const Example *example ) const
+{
+    double val1 = 0.0;
+
+    // is parameter vector and image data available?
+    if (params == NULL)
+    {
+        std::cerr << "ConvolutionalFeature::val: Missing parameter vector!"
+                  << std::endl;
+
+        return val1;
+    }
+
+    NICE::Vector featVec (paramsLength, 1.0);
+    getFeatureVector ( example, featVec );
+
+//    for ( int i = 0; i < featVec.size(); i++ )
+//        val1 += featVec[i] * params->operator [](i);
+    val1 = params->scalarProduct ( featVec );
+
+    return val1;
+}
+
+/** creature feature pool */
+void ConvolutionFeature::explode ( FeaturePool &featurePool, bool variableWindow ) const
+{
+    ConvolutionFeature *f = new ConvolutionFeature (
+                this->window_size_x,
+                this->window_size_y,
+                this->isColor,
+                this->useSpatialPriors );
+
+    featurePool.addFeature(f);
+}
+
+/** clone current feature */
+Feature *ConvolutionFeature::clone ( ) const
+{
+    ConvolutionFeature *f = new ConvolutionFeature (
+                this->window_size_x,
+                this->window_size_y,
+                this->isColor,
+                this->useSpatialPriors );
+
+    f->setParameterVector( *params );
+
+    return f;
+}
+
+Feature *ConvolutionFeature::generateFirstParameter () const
+{
+    return clone();
+}
+
+void ConvolutionFeature::restore ( std::istream & is, int format )
+{
+    is >> window_size_x;
+    is >> window_size_y;
+    is >> paramsLength;
+
+    isColor = false;
+    useSpatialPriors = false;
+    numChannels = 1;
+
+    if ( paramsLength == (window_size_x*window_size_y+3) )
+    {
+        useSpatialPriors = true;
+    }
+    else if ( paramsLength == (3*window_size_x*window_size_y+1) )
+    {
+        isColor = true;
+        numChannels = 3;
+    }
+    else if ( paramsLength == (3*window_size_x*window_size_y+3) )
+    {
+        isColor = true;
+        numChannels = 3;
+        useSpatialPriors = true;
+    }
+
+    params = new NICE::Vector( paramsLength, 1.0 );
+    for ( NICE::Vector::iterator it = params->begin();
+          it != params->end(); ++it )
+        is >> *it;
+}
+
+void ConvolutionFeature::store ( std::ostream & os, int format ) const
+{
+    os << "ConvolutionFeature "
+       << window_size_x << " "
+       << window_size_y << " "
+       << paramsLength;
+
+    for ( NICE::Vector::const_iterator it = params->begin();
+          it != params->end(); ++it )
+        os << ' ' << *it;
+
+}
+
+void ConvolutionFeature::clear ()
+{
+    params->clear();
+}

+ 159 - 0
features/fpfeatures/ConvolutionFeature.h

@@ -0,0 +1,159 @@
+/**
+* @file ConvolutionFeature.h
+* @brief convolutional feature
+* @author Sven Sickert
+* @date 10/13/2008
+
+*/
+#ifndef ConvolutionFeatureINCLUDE
+#define ConvolutionFeatureINCLUDE
+
+#include "core/vector/VectorT.h"
+#include "core/vector/MatrixT.h"
+
+#include "core/basics/Config.h"
+#include "vislearning/cbaselib/Feature.h"
+
+
+namespace OBJREC{
+
+/** convolutional feature */
+class ConvolutionFeature : public Feature
+{
+
+  protected:
+
+  /////////////////////////
+  /////////////////////////
+  // PROTECTED VARIABLES //
+  /////////////////////////
+  /////////////////////////
+
+    /** feature parameter */
+    int window_size_x;
+    int window_size_y;
+    int paramsLength;
+    int numChannels;
+    bool isColor;
+    bool useSpatialPriors;
+
+    NICE::Vector *params;
+
+    /**
+     * @brief (re)initialize parameter vector
+     */
+    void initializeParameterVector();
+
+  public:
+
+    ///////////////////// ///////////////////// /////////////////////
+    //                   CONSTRUCTORS / DESTRUCTORS
+    ///////////////////// ///////////////////// /////////////////////
+
+    /** simple constructor */
+    ConvolutionFeature ( );
+
+    /** alternative constructor */
+    ConvolutionFeature ( const int wsize_x,
+                         const int wsize_y,
+                         const bool color = false,
+                         const bool prior = false );
+
+    /** default constructor */
+    ConvolutionFeature ( const NICE::Config *conf );
+
+    /** copy constructor */
+    ConvolutionFeature ( const ConvolutionFeature *convFeat );
+
+    /** simple destructor */
+    virtual ~ConvolutionFeature ( );
+
+
+    ///////////////////// ///////////////////// /////////////////////
+    //                      FEATURE STUFF
+    ///////////////////// ///////////////////// /////////////////////
+
+    /**
+     * @brief return isColor variable
+     * @return isColor
+     */
+    bool isColorMode () const;
+
+    /**
+     * @brief return parameter vector
+     * @return parameter vector
+     */
+    NICE::Vector getParameterVector () const;
+
+    /**
+     * @brief return feature vector
+     * @param example current example
+     * @param returned feature vector
+     */
+    void getFeatureVector ( const Example *example, NICE::Vector &vec ) const;
+
+    /**
+     * @brief return length of parameter vector
+     * @return length of vector
+     */
+    int getParameterLength () const;
+
+    /**
+     * @brief set parameter vector
+     * @param vec new parameter vector
+     */
+    void setParameterVector ( const NICE::Vector &vec );
+
+    /**
+     * @brief set a random parameter vector
+     */
+    void setRandomParameterVector ();
+
+    /**
+     * @brief return feature value for given example
+     * @param example given Example
+     * @return double feature value
+     */
+    double val ( const Example *example ) const;
+
+    /**
+     * @brief create feature pool with convolutional features
+     * @param featurePool to be filled
+     * @param variableWindow
+     */
+    void explode ( FeaturePool &featurePool, bool variableWindow = true ) const;
+
+    /**
+     * @brief clone current feature
+     * @return clone of current feature
+     */
+    Feature *clone () const;
+
+    Feature *generateFirstParameter () const;
+
+    ///////////////////// INTERFACE PERSISTENT /////////////////////
+    // interface specific methods for store and restore
+    ///////////////////// INTERFACE PERSISTENT /////////////////////
+
+    /**
+     * @brief Load convolution feature object from external file (stream)
+     */
+    void restore ( std::istream & is, int format = 0 );
+
+    /**
+     * @brief Save convolution feature object to external file (stream)
+     */
+    void store( std::ostream & os, int format = 0 ) const;
+
+    /**
+     * @brief Clear convolution feature object
+     */
+    void clear ();
+
+};
+
+
+
+} //namespace
+
+#endif

+ 0 - 0
cbaselib/VectorFeature.cpp → features/fpfeatures/VectorFeature.cpp


+ 0 - 0
cbaselib/VectorFeature.h → features/fpfeatures/VectorFeature.h


+ 6 - 1
features/fpfeatures/createFeatures.cpp

@@ -1,4 +1,4 @@
-#include <vislearning/cbaselib/VectorFeature.h>
+#include "VectorFeature.h"
 #include "HaarFeature.h"
 #include "PixelPairFeature.h"
 #include "SemanticFeature.h"
@@ -6,6 +6,7 @@
 #include "EOHFeature.h"
 #include "SparseVectorFeature.h"
 #include "ColorHistogramFeature.h"
+#include "ConvolutionFeature.h"
 
 #include "createFeatures.h"
 
@@ -43,6 +44,10 @@ Feature *OBJREC::createFeatureFromTag ( const Config *conf, const std::string &
 	    ||  ( tag == "SparseVectorFeature" ) ){
 	return new SparseVectorFeature ( 4711 ); // bogus dimension value only needed for explode
 
+    } else if ( ( tag == "CONVOLUTIONFEATURE")
+        || ( tag == "ConvolutionFeature" ) ) {
+    return new ConvolutionFeature ( conf );
+
     } else {
 	return NULL;
     }

+ 99 - 0
features/fpfeatures/tests/TestVectorFeature.cpp

@@ -0,0 +1,99 @@
+#ifdef NICE_USELIB_CPPUNIT
+
+#include <string>
+#include <exception>
+#include <iostream>
+#include <fstream>
+
+//----------
+
+#include "TestVectorFeature.h"
+
+#include "vislearning/cbaselib/FeaturePool.h"
+#include "../VectorFeature.h"
+
+const bool verbose = true;
+const bool verboseStartEnd = true;
+
+using namespace OBJREC;
+using namespace NICE;
+using namespace std;
+
+CPPUNIT_TEST_SUITE_REGISTRATION( TestVectorFeature );
+
+void TestVectorFeature::setUp() {
+}
+
+void TestVectorFeature::tearDown() {
+}
+void TestVectorFeature::testVectorFeature() 
+{
+    if (verboseStartEnd)
+        std::cerr << "================== TestVectorFeature::testVectorFeature ===================== " << std::endl;
+
+    Matrix mX;
+    Vector vY;
+    Vector vY_multi;
+
+    ifstream ifs ("toyExample1.data", ios::in);
+    //   ifstream ifs ("toyExampleLargeScale.data", ios::in);
+    //   ifstream ifs ("toyExampleLargeLargeScale.data", ios::in);
+    CPPUNIT_ASSERT ( ifs.good() );
+    ifs >> mX;
+    ifs >> vY;
+    ifs >> vY_multi;
+    ifs.close();
+
+    if (verbose)
+    {
+        std::cerr << "data loaded: mX" << std::endl;
+        std::cerr << mX << std::endl;
+        std::cerr << "vY: " << std::endl;
+        std::cerr << vY << std::endl;
+        std::cerr << "vY_multi: " << std::endl;
+        std::cerr << vY_multi << std::endl;
+    }
+
+    int iNumFeatureDimension = mX.cols();
+
+    FeaturePool fp;
+    VectorFeature *pVecFeature = new VectorFeature(iNumFeatureDimension);
+    pVecFeature->explode(fp);
+
+
+    // memory layout needs to be transposed into rows x column: features x samples
+    // features must lay next to each other in memory, so that each feature vector can
+    // be adressed by a starting pointer and the number of feature dimensions to come.
+    Matrix mX_transposed = mX.transpose();
+
+    Examples examples;
+
+    bool bSuccess = Examples::wrapExamplesAroundFeatureMatrix(mX_transposed, vY, examples);
+    CPPUNIT_ASSERT( bSuccess );
+
+    CPPUNIT_ASSERT( examples.size() == mX.rows() );
+
+    for(int i=0; i< examples.size(); i++)
+    {
+        Example &t_Example = examples[i].second;
+        NICE::Vector    t_FeatVector;
+        fp.calcFeatureVector(t_Example, t_FeatVector);
+        std::cerr << "Example " << i << " Features: " <<t_FeatVector << std::endl;
+
+        for(int f=0; f< iNumFeatureDimension;f++)
+        {
+            double t_f1 = t_FeatVector[f];
+            double t_f2 = mX(i,f);
+            CPPUNIT_ASSERT_DOUBLES_EQUAL( t_f1, t_f2, 0.001f );
+        }
+    }
+    examples.clean();
+    delete pVecFeature;
+
+    if (verboseStartEnd)
+        std::cerr << "================== TestVectorFeature::TestVectorFeature done ===================== " << std::endl;
+
+}
+
+
+#endif

+ 26 - 0
features/fpfeatures/tests/TestVectorFeature.h

@@ -0,0 +1,26 @@
+#ifndef _TESTVECTORFEATURE_H
+#define _TESTVECTORFEATURE_H
+
+#include <cppunit/extensions/HelperMacros.h>
+
+/**
+ * CppUnit-Testcase. 
+ */
+class TestVectorFeature : public CppUnit::TestFixture {
+
+    CPPUNIT_TEST_SUITE( TestVectorFeature );
+    
+    CPPUNIT_TEST(testVectorFeature);
+
+    CPPUNIT_TEST_SUITE_END();
+  
+ private:
+ 
+ public:
+    void setUp();
+    void tearDown();
+
+    void testVectorFeature();
+};
+
+#endif // _TESTVECTORFEATURE_H

+ 42 - 0
features/fpfeatures/tests/toyExample1.data

@@ -0,0 +1,42 @@
+39 x 2
+0.1394    0.3699
+0.1210    0.3260
+0.1164    0.2588
+0.1210    0.2032
+0.1417    0.1886
+0.1624    0.2325
+0.1624    0.3319
+0.1509    0.3114
+0.1417    0.2412
+0.1417    0.2763
+0.1279    0.3173
+0.3537    0.3582
+0.3306    0.3056
+0.3306    0.2471
+0.3376    0.2061
+0.3583    0.1740
+0.3698    0.1564
+0.3790    0.2558
+0.3744    0.3173
+0.3698    0.3406
+0.3583    0.2646
+0.3629    0.1944
+0.3468    0.3173
+0.3329    0.2588
+0.3514    0.1974
+0.2224    0.3436
+0.2270    0.3348
+0.2293    0.2675
+0.2339    0.2237
+0.2316    0.1623
+0.2408    0.1857
+0.2615    0.2763
+0.2638    0.3436
+0.2592    0.3904
+0.2477    0.4284
+0.2224    0.3582
+0.2177    0.2909
+0.2224    0.2178
+0.2500    0.1213
+39 < 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 >
+39 < 0 0 0 0 0 0 0 0 0 0 0 3 3 3 3 3 3 3 3 3 3 3 3 3 3 1 1 1 1 1 1 1 1 1 1 1 1 1 1 >

+ 20 - 1
features/localfeatures/LocalFeatureColorWeijer.cpp

@@ -281,7 +281,26 @@ void LocalFeatureColorWeijer::getFeats( const NICE::ColorImage &img, MultiChanne
     }
   }
 
-  return;
+}
+
+void LocalFeatureColorWeijer::getFeats( const ColorImage &img, NICE::ImageT<double> &feats, int color )
+{
+  assert ( feats.width() > 0 );
+  assert ( feats.height() > 0 );
+
+  int width = ( int )img.width();
+  int height = ( int )img.height();
+
+  for ( int y = 0; y < height; y++ )
+    for ( int x = 0; x < width; x++ )
+    {
+      int r = img(x,y,0)/8;
+      int g = img(x,y,1)/8;
+      int b = img(x,y,2)/8;
+
+      feats.setPixel( x, y, hist[r][g][b][color] );
+    }
+
 }
 
 ///////////////////// INTERFACE PERSISTENT /////////////////////

+ 9 - 0
features/localfeatures/LocalFeatureColorWeijer.h

@@ -161,6 +161,15 @@ class LocalFeatureColorWeijer : public LocalFeature
      */
     void getFeats ( const NICE::ColorImage &img, NICE::MultiChannelImageT<double> &feats );
     
+
+    /**
+     * transform each pixel of an image
+     * @param img input image
+     * @param feats feature value for each pixel
+     * @param color which color name
+     */
+    void getFeats ( const NICE::ColorImage &img, NICE::ImageT<double> &feats, int color);
+
     ///////////////////// INTERFACE PERSISTENT /////////////////////
     // interface specific methods for store and restore
     ///////////////////// INTERFACE PERSISTENT /////////////////////   

+ 2 - 2
features/localfeatures/libdepend.inc

@@ -1,5 +1,5 @@
-$(call PKG_DEPEND_EXT,CUDASIFT)
-$(call PKG_DEPEND_EXT,OPENMP)
+# $(call PKG_DEPEND_EXT,CUDASIFT)
+# $(call PKG_DEPEND_EXT,OPENMP)
 $(call PKG_DEPEND_INT,core)
 $(call PKG_DEPEND_INT,vislearning/baselib)
 $(call PKG_DEPEND_INT,vislearning/image)

+ 5 - 1
features/localfeatures/progs/computeLocalFeatures.cpp

@@ -38,7 +38,11 @@ using namespace OBJREC;
 */
 int main( int argc, char **argv )
 {
+#ifndef __clang__
+#ifndef __llvm__
   std::set_terminate( __gnu_cxx::__verbose_terminate_handler );
+#endif
+#endif
 
   NICE::Config * conf = new NICE::Config ( argc, argv );
   
@@ -131,4 +135,4 @@ int main( int argc, char **argv )
 
   
    return 0;
-}
+}

+ 2 - 2
features/localfeatures/progs/sift-driver.cpp

@@ -18,7 +18,7 @@
 
 extern "C" {
 #include<getopt.h>
-#if defined (VL_MAC)
+#if defined(__clang__) || defined(__llvm__)
 #include<libgen.h>
 #else
 #include<string.h>
@@ -438,7 +438,7 @@ main(int argc, char** argv)
 	// the basename
 	if(outputFilenamePrefix.size() != 0) {
 	  outputFilename = outputFilenamePrefix + 
-	    std::string(basename(outputFilename.c_str())) ;
+	    std::string(basename((char*)outputFilename.c_str())) ;
 	}
 	
       // remove .pgm extension, add .key

+ 3 - 3
features/regionfeatures/libdepend.inc

@@ -1,4 +1,4 @@
-$(call PKG_DEPEND_EXT,ICE)
-$(call PKG_DEPEND_EXT,OPENMP)
-$(call PKG_DEPEND_INT,vislearning/segmentation)
+# $(call PKG_DEPEND_EXT,ICE)
+# $(call PKG_DEPEND_EXT,OPENMP)
+$(call PKG_DEPEND_INT,segmentation)
 $(call PKG_DEPEND_INT,vislearning/features/fbase)

+ 68 - 6
features/simplefeatures/Codebook.cpp

@@ -89,8 +89,53 @@ void Codebook::clear ()
 
 void Codebook::restore ( istream & is, int format )
 {
-   is >> thresholds;
-   is >> informativeMeasure;
+    if (is.good())
+    {
+        std::string tmp;
+        is >> tmp; //class name
+
+        if ( ! this->isStartTag( tmp, "Codebook" ) )
+        {
+            std::cerr << " WARNING - attempt to restore Codebook, but start flag " << tmp << " does not match! Aborting... " << std::endl;
+            throw;
+        }
+
+        bool b_endOfBlock = false;
+
+        while ( !b_endOfBlock )
+        {
+            is >> tmp; // start of block
+
+            if ( this->isEndTag( tmp, "Codebook" ) )
+            {
+                b_endOfBlock = true;
+                continue;
+            }
+
+            tmp = this->removeStartTag ( tmp );
+            if ( tmp.compare("thresholds") == 0 )
+            {
+                is >> thresholds;
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+            else if ( tmp.compare("informativeMeasure") == 0 )
+            {
+                is >> informativeMeasure;
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+            else if ( tmp.compare("classnos") == 0 )
+            {
+                is >> classnos;
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+
+
+        }
+    }
+
 //    is >> classnos;
    
    //TODO use a flag for compatibility with old systems?
@@ -111,9 +156,26 @@ void Codebook::restore ( istream & is, int format )
 
 void Codebook::store ( ostream & os, int format ) const
 {
-    os << this->thresholds << endl;
-    os << this->informativeMeasure << endl;
-    os << this->classnos << endl;
+    if (os.good())
+    {
+        // show starting point
+        os << this->createStartTag( "Codebook" ) << std::endl;
+
+        os << this->createStartTag( "thresholds" ) << std::endl;
+        os << this->thresholds << endl;
+        os << this->createEndTag( "thresholds" ) << std::endl;
+
+        os << this->createStartTag( "informativeMeasure" ) << std::endl;
+        os << this->informativeMeasure << endl;
+        os << this->createEndTag( "informativeMeasure" ) << std::endl;
+
+        os << this->createStartTag( "classnos" ) << std::endl;
+        os << this->classnos << endl;
+        os << this->createEndTag( "classnos" ) << std::endl;
+
+        // done
+        os << this->createEndTag( "Codebook" ) << std::endl;
+    }
 //     os << this->i_noOfNearestClustersToConsidere << endl;
 //     os << this->s_section << endl;
 //     
@@ -148,4 +210,4 @@ void Codebook::setHardAssignment( const bool & _hardAssignment )
 bool Codebook::getHardAssignment ( ) const
 {
   return this->b_hardAssignment;
-}
+}

+ 4 - 2
features/simplefeatures/Codebook.h

@@ -119,13 +119,15 @@ namespace OBJREC {
         
         
         //for backward-compatibility
-        virtual void   vote (const NICE::Vector &feature, NICE::Vector &histogram, int &codebookEntry , double &weight , double &distance ) const { this->voteVQ ( feature, histogram, codebookEntry, weight, distance ); };
+        virtual void   vote (const NICE::Vector &feature, NICE::Vector &histogram, int &codebookEntry , double &weight , double &distance ) const {
+            this->voteVQ ( feature, histogram, codebookEntry, weight, distance );
+        }
         
         virtual void   vote (const NICE::Vector &feature, NICE::SparseVector &histogram ) const {
           int codebookEntry;
           double weight, distance;
           this->voteVQ ( feature, histogram, codebookEntry, weight, distance );          
-        };        
+        }
    
         
         

+ 582 - 0
features/simplefeatures/CodebookRandomForest.cpp

@@ -0,0 +1,582 @@
+/** 
+* @file CodebookRandomForest.cpp
+* @brief feature CodebookRandomForest
+* @author Erik Rodner
+* @date 02/15/2008
+
+*/
+
+#include <queue>
+#include <iostream>
+
+#include "CodebookRandomForest.h"
+
+using namespace OBJREC;
+
+using namespace std;
+using namespace NICE;
+
+#undef DEBUGPRUNING
+
+CodebookRandomForest::CodebookRandomForest( int maxDepth, int restrictedCodebookSize )
+{
+	this->clusterforest = NULL;
+    this->maxDepth = maxDepth;
+    this->restrictedCodebookSize = restrictedCodebookSize;
+}
+
+CodebookRandomForest::CodebookRandomForest( FPCRandomForests *clusterforest,
+    int maxDepth, int restrictedCodebookSize )
+{
+    this->clusterforest = clusterforest;
+    this->maxDepth = maxDepth;
+    this->restrictedCodebookSize = restrictedCodebookSize;
+    buildLeafMap();
+}
+
+CodebookRandomForest::~CodebookRandomForest()
+{
+    if ( clusterforest != NULL )
+        delete clusterforest;
+}
+
+void CodebookRandomForest::setClusterForest( FPCRandomForests *clusterforest)
+{
+	if(this->clusterforest != NULL)
+		delete this->clusterforest;
+	
+	this->clusterforest = clusterforest;
+	buildLeafMap();
+}
+
+void CodebookRandomForest::buildParentStructure ( DecisionNode *node, 
+	map<DecisionNode *, DecisionNode *> & parentStructure )
+{
+    if ( node == NULL ) return;
+    if ( node->left != NULL )
+    {
+		parentStructure.insert ( pair<DecisionNode *, DecisionNode *> ( node->left, node ) );
+		buildParentStructure ( node->left, parentStructure );
+    }
+    if ( node->right != NULL )
+    {
+		parentStructure.insert ( pair<DecisionNode *, DecisionNode *> ( node->right, node ) );
+		buildParentStructure ( node->right, parentStructure );
+    }
+}
+
+void CodebookRandomForest::pruneForest ()
+{
+    map<DecisionNode *, pair<long, int> > index;
+    clusterforest->indexDescendants( index );
+    map<DecisionNode *, DecisionNode *> parentStructure;
+
+    vector<DecisionTree *> & trees = clusterforest->getForestNonConst();
+
+    for ( vector<DecisionTree *>::const_iterator i = trees.begin();
+		i != trees.end(); i++ )
+    {
+		DecisionTree *tree = *i;
+		parentStructure.insert ( pair<DecisionNode *, DecisionNode *> ( tree->getRoot(), NULL ) );
+		buildParentStructure ( tree->getRoot(), parentStructure );
+    }
+
+    priority_queue< triplet<double, long, DecisionNode *> > lastLevelInnerNodes;
+
+    long leafs = 0;
+    for ( map<DecisionNode *, pair<long, int> >::const_iterator k = index.begin();
+	    k != index.end(); k++ )
+    {
+		DecisionNode *node = k->first;
+		if ( (!node->isLeaf()) && ((node->left->isLeaf())
+			 || (node->right->isLeaf())) )
+		{
+			double mi = node->distribution.entropy() - ( 
+			node->left->distribution.sum() * node->left->distribution.entropy() +
+			node->right->distribution.sum() * node->right->distribution.entropy() )
+			   / node->distribution.sum();
+			lastLevelInnerNodes.push ( triplet<double, long, DecisionNode *> ( 
+			- mi, k->second.first, node ) );
+		}
+
+		if ( node->isLeaf() ) leafs++;
+    }
+
+    set< DecisionNode * > deletedRoots;
+
+	/*********************************************
+	 * EVIL Pruning method                       *
+	 *********************************************/
+	set<DecisionNode *> deletedNodes;
+    while ( (leafs > restrictedCodebookSize) && (lastLevelInnerNodes.size() > 0) )
+    {
+		const triplet<double, long, DecisionNode *> & nodemi = lastLevelInnerNodes.top();
+        #ifdef DEBUGPRUNING
+            double current_mi = -nodemi.first;
+            fprintf (stderr, "CodebookRandomForest: %d contract leaf with mutual information %f\n", leafs, current_mi );
+        #endif
+		DecisionNode *node = nodemi.third;
+		lastLevelInnerNodes.pop();
+
+		assert ( node != NULL );
+
+		DecisionNode *left = node->left;
+		DecisionNode *right = node->right;
+		//fprintf (stderr, "node: %ld, left: %ld, right: %ld\n", (long int)node, (long int)left,
+		//    (long int)right ); 
+		if ( (deletedNodes.find(node) != deletedNodes.end() ) || node->isLeaf() ) {
+			// this is a tricky case...consider the subsequent contraction of
+			// two childs of a node
+			// After the first child is contracted, the node is added to lastLevelInnerNodes
+			// If the second child is contracted, the node is still in the queue but
+			// is now a leaf node.
+			// A second problem exists if the parent node is contracted after the second
+			// child but before the node. Therefore we introduced deletedNodes.
+			continue;
+		}
+
+		#ifdef DEBUGPRUNING
+		fprintf (stderr, "CodebookRandomForest: nodes remaining %ld (min:%d); current mi %f\n", 
+			leafs, restrictedCodebookSize, current_mi );
+		#endif
+    
+		assert ( parentStructure.find(node) != parentStructure.end() );
+		DecisionNode *parent_node = parentStructure[node];
+		//fprintf (stderr, "parent: %ld\n", (long int)parent_node );
+		if ( parent_node == NULL )
+		{
+			#ifdef DEBUGPRUNING
+			fprintf (stderr, "CodebookRandomForest: Deleting the root node !!!\n");
+			#endif
+
+			DecisionNode *newParent = NULL;
+			if ( (left->isLeaf()) && (right->isLeaf()) )
+			{
+				//fprintf (stderr, "case (a)\n");
+				delete ( node->f );
+				node->f = NULL;
+				delete left;
+				delete right;
+				deletedNodes.insert ( left );
+				deletedNodes.insert ( right );
+					
+				node->left = NULL;
+				node->right = NULL;
+				newParent = node;
+				leafs--;
+			} else if ( left->isLeaf() ) {
+				// case (b) left child is a leaf
+				delete left;
+				delete node;
+				deletedNodes.insert ( node );
+				deletedNodes.insert ( left );
+				parentStructure[right] = parent_node;
+				newParent = right;
+				leafs--;
+			} else if ( right->isLeaf() ) {
+				// case (b) right child is a leaf
+				delete right;
+				delete node;
+				deletedNodes.insert ( right );
+				deletedNodes.insert ( left );
+
+				parentStructure[left] = parent_node;
+				newParent = left;
+				leafs--;
+			} else {
+				fprintf (stderr, "UNKNOWN CASE !!\n");
+				exit(-1);
+			}
+			for ( vector<DecisionTree *>::iterator i = trees.begin(); i != trees.end() ; i++ )
+				if ( (*i)->getRoot() == node )
+					(*i)->setRoot(newParent);    
+
+			continue;
+		}
+		long int parent_index = index[parent_node].first;
+		double mi = 0.0;
+		bool nodeIsLeft = ( parent_node->left == node );
+		DecisionNode *sibling = nodeIsLeft ? parent_node->right : parent_node->left;
+
+		if ( (left == NULL) || (right == NULL) )
+			fthrow(Exception, "There is a bug in this code: CodebookRandomForest (good luck!) bugid=1");
+
+		if ( (left->isLeaf()) && (right->isLeaf()) )
+		{
+
+			/* ------------ case (a) left and right childs are leafs
+					(p)                    (p)
+			  (n)         (s)      ->  (n)     (s)  and add p to the last level nodes
+			(l) (r)                                     */
+
+			#ifdef DEBUGPRUNING
+			fprintf (stderr, "case (a)\n");
+			#endif
+
+			delete ( node->f );
+			node->f = NULL;
+			delete left;
+			deletedNodes.insert ( left );
+			delete right;
+			deletedNodes.insert ( right );
+			node->left = NULL;
+			node->right = NULL;
+			leafs--;
+			double ep = parent_node->distribution.entropy();
+			double en = node->distribution.entropy();
+			double es = sibling->distribution.entropy();
+			double pn = node->distribution.sum();
+			double ps = sibling->distribution.sum();
+			mi = ep - ( pn * en + ps * es ) / (pn+ps);
+
+			#ifdef DEBUGPRUNING
+			fprintf (stderr, "ep %f en %f es %f pn %f ps %f\n", 
+			ep, en, es, pn, ps );
+			parent_node->distribution.store(cerr);
+			node->distribution.store(cerr);
+			sibling->distribution.store(cerr);
+			
+			fprintf (stderr, "add new pre-leaf %ld: mi %lf top %lf\n", (long int)parent_node, mi, 
+				-lastLevelInnerNodes.top().first);
+			#endif
+
+			lastLevelInnerNodes.push ( triplet<double, long, DecisionNode *> (
+				- mi, parent_index, parent_node ) );
+
+
+		} else if ( left->isLeaf() ) {
+			// --------------- case (b) left child is a leaf
+		
+			#ifdef DEBUGPRUNING
+			fprintf (stderr, "case (b)\n");
+			#endif
+
+			if ( nodeIsLeft ) 
+				parent_node->left = right;
+			else
+				parent_node->right = right;
+
+			parentStructure[right] = parent_node;
+			delete left;
+			deletedNodes.insert ( left );
+			delete node;
+			deletedNodes.insert ( node );
+			leafs--;
+
+	
+		} else if ( right->isLeaf() ) {
+		
+			// --------------- case (c) right child is a leaf
+			
+			#ifdef DEBUGPRUNING
+			fprintf (stderr, "case (c)\n");
+			#endif
+			if ( nodeIsLeft ) 
+				parent_node->left = left;
+			else
+				parent_node->right = left;
+			
+			delete right;
+			deletedNodes.insert ( right );
+			delete node;
+			deletedNodes.insert ( node );
+			parentStructure[left] = parent_node;
+			leafs--;
+		} else {
+			fthrow(Exception, "There is a bug in this code: CodebookRandomForest (good luck!) bugid=2");
+		}
+    }
+
+    for ( vector<DecisionTree *>::iterator i = trees.begin(); i != trees.end() ; )
+    {
+		if ( deletedRoots.find((*i)->getRoot()) != deletedRoots.end() )
+		{
+			delete (*i);
+			trees.erase ( i );
+		} else {
+			i++;
+		}
+    }
+#ifdef DEBUGPRUNING
+    fprintf (stderr, "Final number of leafs: %ld (%d)\n", leafs, restrictedCodebookSize );
+#endif
+}
+
+void CodebookRandomForest::buildLeafMap ()
+{
+    if ( restrictedCodebookSize > 0 ) {
+		pruneForest ();
+    }
+
+    map<DecisionNode *, pair<long, int> > index;
+    vector< pair<long, DecisionNode *> > index_reverse;
+    clusterforest->indexDescendants ( index );
+
+    leafMap.clear();
+    for ( map<DecisionNode *, pair<long, int> >::const_iterator i = index.begin();
+	    i != index.end(); i++ )
+    {
+		DecisionNode *node = i->first;
+		int depth = i->second.second;
+		long index = i->second.first;
+		if ( ( (node->right == NULL) && (node->left == NULL) && (depth <= maxDepth) ) || ( depth == maxDepth ) )
+			index_reverse.push_back ( pair<long, DecisionNode *> ( index, node ) );
+	}
+
+    sort ( index_reverse.begin(), index_reverse.end() );
+
+    /*************************************
+	Recover a kind of canonical node
+	permutation
+    **************************************/
+    for ( vector< pair<long, DecisionNode *> >::const_iterator i = index_reverse.begin();
+	    i != index_reverse.end(); i++ )
+    {
+		DecisionNode *node = i->second;
+		leafMap.insert ( pair<DecisionNode *, int> ( node, leafMap.size() ) );
+    }
+#ifdef DEBUGPRUNING
+    fprintf (stderr, "CSRandomForest::buildLeafMap: dimension = %d\n", (int)leafMap.size() );
+#endif
+
+    reinit ( leafMap.size() );
+}
+
+
+void CodebookRandomForest::copy ( const Codebook *codebook )
+{
+    fthrow(Exception, "CodebookRandomForest::not yet implemented !\n");
+}
+
+void CodebookRandomForest::vote ( const NICE::Vector & feature, int & codebookEntry, double & weight, double & distance ) const
+{
+    fthrow(Exception, "CodebookRandomForest::not supported, please use multi voting feature\n");
+}
+
+void CodebookRandomForest::vote ( const NICE::Vector & feature, NICE::Vector & histogram, 
+		    int & codebookEntry, double & weight, double & distance ) const
+{
+    SparseVector votes;
+    vote ( feature, votes );
+    for ( SparseVector::const_iterator i = votes.begin(); 
+		i != votes.end(); i++ )
+    {
+		int index = i->first;
+		double val = i->second;
+		histogram[index] += val;
+
+		if ( i == votes.begin() )
+		{
+			codebookEntry = index;
+			weight = val;
+		}
+    }
+    distance = 0.0;
+}
+
+void CodebookRandomForest::vote ( const NICE::Vector & feature, NICE::SparseVector & votes ) const
+{
+    vector<DecisionNode *> leafNodes;
+    NICE::Vector *x = new NICE::Vector ( feature );
+    Example pe ( x );
+    clusterforest->getLeafNodes ( pe, leafNodes, maxDepth );
+    delete x;
+    for ( vector<DecisionNode *>::const_iterator j = leafNodes.begin();
+		j != leafNodes.end(); j++ )
+    {
+		map<DecisionNode *, int>::const_iterator k = leafMap.find ( *j );
+		assert ( k != leafMap.end() );
+		int leafindex = k->second;
+		votes.insert ( votes.begin(), pair<int, double> ( leafindex, 1.0 ) );
+    }
+
+}
+
+void CodebookRandomForest::voteAndClassify ( const NICE::Vector & feature, NICE::SparseVector & votes, FullVector & distribution ) const
+{
+    vector<DecisionNode *> leafNodes;
+    NICE::Vector *x = new NICE::Vector ( feature );
+    Example pe ( x );
+    clusterforest->getLeafNodes ( pe, leafNodes, maxDepth );
+    delete x;
+
+    for ( vector<DecisionNode *>::const_iterator j = leafNodes.begin();
+		j != leafNodes.end(); j++ )
+    {
+		map<DecisionNode *, int>::const_iterator k = leafMap.find ( *j );
+		DecisionNode *node = *j;
+
+		assert ( k != leafMap.end() );
+		int leafindex = k->second;
+		votes.insert ( votes.begin(), pair<int, double> ( leafindex, 1.0 ) );
+
+		FullVector sDistribution ( node->distribution );
+		sDistribution.normalize();
+		if ( distribution.empty() )
+			distribution = sDistribution;
+		else
+			distribution.add ( sDistribution );
+    }
+
+    distribution.normalize();
+}
+
+void CodebookRandomForest::voteAndClassify(const Vector &feature, SparseVector &votes, Vector &distribution) const
+{
+    vector<DecisionNode *> leafNodes;
+    NICE::Vector *x = new NICE::Vector ( feature );
+    Example pe ( x );
+    clusterforest->getLeafNodes ( pe, leafNodes, maxDepth );
+    delete x;
+
+    for ( vector<DecisionNode *>::const_iterator j = leafNodes.begin();
+        j != leafNodes.end(); j++ )
+    {
+        map<DecisionNode *, int>::const_iterator k = leafMap.find ( *j );
+        DecisionNode *node = *j;
+
+        assert ( k != leafMap.end() );
+        int leafindex = k->second;
+        votes.insert ( votes.begin(), pair<int, double> ( leafindex, 1.0 ) );
+
+        FullVector sDistribution ( node->distribution );
+        sDistribution.normalize();
+        if ( distribution.size() == 0 )
+        {
+            distribution.resize(sDistribution.size() );
+            distribution.set(0.0f);
+        }
+        for(int i = 0; i< sDistribution.size(); i++)
+            distribution[i] += sDistribution[i];
+
+    }
+
+    distribution.normalizeL2();
+}
+
+void CodebookRandomForest::add ( const Codebook *codebook )
+{
+    fthrow ( Exception, "CodebookRandomForest::not yet implemented !");
+}
+	
+Codebook *CodebookRandomForest::clone () const
+{
+    return (new CodebookRandomForest(maxDepth, restrictedCodebookSize));
+}
+
+void CodebookRandomForest::clear ()
+{
+    if ( clusterforest != NULL )
+        clusterforest->clear();
+    Codebook::clear();
+}
+
+void CodebookRandomForest::restore ( istream & is, int format )
+{
+    if (is.good())
+    {
+        std::string tmp;
+        is >> tmp; //class name
+
+        if ( ! this->isStartTag( tmp, "CodebookRandomForest" ) )
+        {
+            std::cerr << " WARNING - attempt to restore CodebookRandomForest, but start flag " << tmp << " does not match! Aborting... " << std::endl;
+            throw;
+        }
+
+        if(this->clusterforest == NULL)
+            this->clusterforest = new FPCRandomForests ();
+
+
+        bool b_endOfBlock = false;
+
+        while ( !b_endOfBlock )
+        {
+            is >> tmp; // start of block
+
+            if ( this->isEndTag( tmp, "CodebookRandomForest" )  || is.eof() )
+            {
+                b_endOfBlock = true;
+                continue;
+            }
+
+            tmp = this->removeStartTag ( tmp );
+            if ( tmp.compare("baseclass") == 0 )
+            {
+                Codebook::restore(is, format);
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+            else if ( tmp.compare("maxDepth") == 0 )
+            {
+                is >> maxDepth;
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+            else if ( tmp.compare("restrictedCodebookSize") == 0 )
+            {
+                is >> restrictedCodebookSize;
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+            else if ( tmp.compare("maxClassNo") == 0 )
+            {
+                int maxClassNo = 0;
+                is >> maxClassNo;
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+
+                if(clusterforest != NULL)
+                    clusterforest->setMaxClassNo(maxClassNo);
+            }
+            else if ( tmp.compare("clusterforest") == 0 )
+            {
+                clusterforest->restore ( is, format );
+                is >> tmp; // end of block
+                tmp = this->removeEndTag ( tmp );
+            }
+        }
+
+        buildLeafMap();
+    }
+}
+
+void CodebookRandomForest::store ( ostream & os, int format ) const
+{
+    if (os.good())
+    {
+        // show starting point
+        os << this->createStartTag( "CodebookRandomForest" ) << std::endl;
+
+        os.precision (numeric_limits<double>::digits10 + 1);
+
+        os << this->createStartTag( "baseclass" ) << std::endl;
+        Codebook::store ( os, format );
+        os << this->createEndTag( "baseclass" ) << std::endl;
+
+        os << this->createStartTag( "maxDepth" ) << std::endl;
+        os << maxDepth << std::endl;
+        os << this->createEndTag( "maxDepth" ) << std::endl;
+
+        os << this->createStartTag( "restrictedCodebookSize" ) << std::endl;
+        os << restrictedCodebookSize << std::endl;
+        os << this->createEndTag( "restrictedCodebookSize" ) << std::endl;
+
+        os << this->createStartTag( "maxClassNo" ) << std::endl;
+        os << clusterforest->getMaxClassNo() << endl;
+        os << this->createEndTag( "maxClassNo" ) << std::endl;
+
+        os << this->createStartTag( "clusterforest" ) << std::endl;
+        clusterforest->store ( os, format );
+        os << this->createEndTag( "clusterforest" ) << std::endl;
+/*        Codebook::store ( os, format );
+        os << maxDepth << endl;
+        os << restrictedCodebookSize << endl;
+        os << clusterforest->getMaxClassNo() << endl;
+        clusterforest->store ( os, format );
+        os << endl;
+*/
+        // done
+        os << this->createEndTag( "CodebookRandomForest" ) << std::endl;
+    }
+}

+ 139 - 0
features/simplefeatures/CodebookRandomForest.h

@@ -0,0 +1,139 @@
+#ifndef CodebookRandomForestINCLUDE
+#define CodebookRandomForestINCLUDE
+
+
+#include "core/vector/VVector.h"
+#include "vislearning/features/simplefeatures/Codebook.h"
+#include "vislearning/classifier/fpclassifier/randomforest/FPCRandomForests.h"
+
+#include <string>
+
+namespace OBJREC {
+
+/**
+ * @brief Random Forest based codebook generator (Moosmann et al.)
+ *
+ * Implementation of the Extremely randomized clustering forests (ERC) by (Moosmann et al., 2006)
+ *
+ * There also exists an Matlab Mex implementation for this class (CodebookRandomForestMex.cpp).
+ *
+ * @example TestCodebookRandomForest
+ * @example progCodebookRandomForest
+ *
+ * @author Erik Rodner (creator), Johannes Ruehle (updater)
+ * @date 02/15/2008, 05/05/2014
+ */
+class CodebookRandomForest : public Codebook
+{
+
+    protected:
+		/** leafs and their corresponding indices */
+	 std::map<DecisionNode *, int> leafMap;
+
+		/** the random forest used for clustering */
+		FPCRandomForests *clusterforest; 
+
+		/** maximum depth of the forest */
+		int maxDepth;
+
+		/** the forest is reduced in pruneForest to this
+		    specific number of leafs */
+		int restrictedCodebookSize;
+
+		/** build the a directed relation graph from a forest 
+		    @param node starting node for traversal
+		    @param parentStructure (a,b) is in this map if b is the parent node of a
+		*/
+		void buildParentStructure ( DecisionNode *node, 
+		 std::map<DecisionNode *, DecisionNode *> & parentStructure );
+
+		/** reduce the cluster forest to a specific number of leafs */
+		void pruneForest ();
+
+		/** enumerate all leafs and store this information in leafMap */
+		void buildLeafMap ();
+
+    public:
+		/** simple constructor */
+		CodebookRandomForest( int maxDepth, int restrictedCodebookSize = 0 );
+		
+		/** standard constructor */
+		CodebookRandomForest( FPCRandomForests *clusterforest, int maxDepth, int restrictedCodebookSize = 0 );
+		
+		/** simple destructor */
+		virtual ~CodebookRandomForest();
+		
+		/**
+		 * set a new clusterforest
+		 * @param clusterforest new random forest
+		 */
+		void setClusterForest( FPCRandomForests *clusterforest);
+		
+		/**
+		 * not supported, please use one of the other vote functions
+		 */
+        void vote ( const NICE::Vector & feature, int & codebookEntry, double & weight, double & distance ) const;
+        virtual void voteVQ ( const NICE::Vector & feature, int & codebookEntry, double & weight, double & distance ) const
+        {
+            this->vote(feature,codebookEntry, weight, distance);
+        }
+
+        void vote ( const NICE::Vector & feature, NICE::Vector & histogram, int & codebookEntry, double & weight, double & distance ) const;
+        virtual void voteVQ (const NICE::Vector &feature, NICE::Vector &histogram, int & codebookEntry, double & weight, double & distance ) const {
+            this->vote(feature, histogram, codebookEntry, weight, distance);
+        }
+        virtual void voteVA ( const NICE::Vector & feature, NICE::Vector & votes ) const {
+            int codebookEntry = 0;
+            double weight = 0.0f;
+            double distance = 0.0f;
+            this->vote(feature, votes, codebookEntry, weight, distance);
+        }
+
+
+	
+		/** this is the preferred voting interface for this codebook */
+        void vote ( const NICE::Vector & feature, NICE::SparseVector & votes ) const;
+
+		/** normal codebook voting, but additionally returns a probability distribution for the class label **/
+        void voteAndClassify ( const NICE::Vector & feature, NICE::SparseVector & votes, FullVector & distribution ) const;
+        /** normal codebook voting, but additionally returns a probability distribution for the class label **/
+        void voteAndClassify ( const NICE::Vector & feature, NICE::SparseVector & votes, NICE::Vector & distribution ) const;
+        virtual void voteVA ( const NICE::Vector & feature, NICE::SparseVector & votes ) const {
+            this->vote(feature, votes);
+        }
+		
+		/** this codebook method gives multiple votes for each local feature (depending
+		    on the number of decision trees in the forest */
+        bool allowsMultipleVoting () { return true; }
+
+		/** get the random forest for direct access */
+        FPCRandomForests *getRandomForest (void) { return clusterforest; }
+
+		void add ( const Codebook *codebook );
+		void copy ( const Codebook *codebook );
+		Codebook *clone () const;
+
+		/** clear the codebook */
+		void clear ();
+
+		/** read the codebook from a stream */
+		void restore ( std::istream & is, int format = 0);
+
+		/** write the codebook to a stream */
+		void store ( std::ostream & os, int format = 0) const;
+
+        int getMaxDepth() const
+        {
+            return this->maxDepth;
+        }
+
+        int getRestrictedCodebookSize() const
+        {
+            return restrictedCodebookSize;
+        }
+};
+
+
+} // namespace
+
+#endif

+ 2 - 1
features/simplefeatures/libdepend.inc

@@ -1,5 +1,6 @@
 $(call PKG_DEPEND_INT,vislearning/optimization)
-$(call PKG_DEPEND_INT,vislearning/fourier)
+# This does not exist..
+# $(call PKG_DEPEND_INT,vislearning/fourier)
 $(call PKG_DEPEND_INT,vislearning/features/localfeatures)
 $(call FILE_DEPEND_EXT,FCGetFeature.o,ICE)
 $(call FILE_DEPEND_EXT,FCGetFeature,ICE)

+ 444 - 0
features/simplefeatures/matlab/CodebookRandomForestMex.cpp

@@ -0,0 +1,444 @@
+#ifdef NICE_USELIB_MEX
+/** 
+* @author Johannes Ruehle
+* @date 25-04-2014
+* @brief Matlab-Interface for the Extremely randomized clustering forest ERC
+*/
+
+// STL includes
+#include <math.h>
+#include <matrix.h>
+#include <mex.h>
+
+// NICE-core includes
+#include <core/basics/Config.h>
+#include <core/basics/Timer.h>
+#include <core/vector/MatrixT.h>
+#include <core/vector/VectorT.h>
+
+// CodebookRandomForest stuff
+#include "vislearning/features/simplefeatures/CodebookRandomForest.h"
+
+#include "vislearning/features/fpfeatures/VectorFeature.h"
+
+// Interface for conversion between Matlab and C objects
+#include "gp-hik-core/matlab/classHandleMtoC.h"
+#include "gp-hik-core/matlab/ConverterMatlabToNICE.h"
+#include "gp-hik-core/matlab/ConverterNICEToMatlab.h"
+
+using namespace std; //C basics
+using namespace NICE;  // nice-core
+
+#define DEBUG_VERBOSE
+
+NICE::Config parseParametersERC(const mxArray *prhs[], int nrhs)
+{
+  NICE::Config conf;
+
+  // Check parameters
+  if ( nrhs % 2 == 1 )
+  {
+      mexErrMsgTxt("parseParametersERC: uneven number of config arguments.");
+  }
+
+  // now run over all given parameter specifications
+  // and add them to the config
+  for( int i=0; i < nrhs; i+=2 )
+  {
+    std::string variable = MatlabConversion::convertMatlabToString(prhs[i]);
+    
+    /////////////
+    //CodebookRandomForest( int maxDepth
+
+//    number_of_trees = conf->gI(section, "number_of_trees", 20 );
+//    features_per_tree = conf->gD(section, "features_per_tree", 1.0 );
+//    samples_per_tree  = conf->gD(section, "samples_per_tree", 0.2 );
+//    use_simple_balancing = conf->gB(section, "use_simple_balancing", false);
+//    weight_examples = conf->gB(section, "weight_examples", false);
+//    memory_efficient = conf->gB(section, "memory_efficient", false);
+
+    //std::string builder_section = conf->gS(section, "builder_section", "DTBRandom");
+
+#ifdef DEBUG_VERBOSE
+    std::cerr << "config variable: "<< variable << std::endl;
+#endif
+    if(variable == "conf")
+    {
+        // if first argument is the filename of an existing config file,
+        // read the config accordingly
+
+        conf = NICE::Config ( MatlabConversion::convertMatlabToString( prhs[i+1] )  );
+#ifdef DEBUG_VERBOSE
+        std::cerr << "conf " << MatlabConversion::convertMatlabToString( prhs[i+1] ) << std::endl;
+#endif
+    }
+    else if( variable == "number_of_trees")
+    {
+        if ( mxIsInt32( prhs[i+1] ) )
+        {
+            int value = MatlabConversion::convertMatlabToInt32(prhs[i+1]);
+            conf.sI("RandomForest", variable, value);
+#ifdef DEBUG_VERBOSE
+            std::cerr << "number_of_trees " << value << std::endl;
+#endif
+        }
+        else
+        {
+            std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Int32 expected.";
+            mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
+        }
+
+    }
+    else if( variable == "maxDepthTree")
+    {
+        if ( mxIsInt32( prhs[i+1] ) )
+        {
+            int value = MatlabConversion::convertMatlabToInt32(prhs[i+1]);
+            conf.sI("CodebookRandomForest", variable, value);
+#ifdef DEBUG_VERBOSE
+            std::cerr << "maxDepthTree " << value << std::endl;
+#endif
+        }
+        else
+        {
+            std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Int32 expected.";
+            mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
+        }
+
+    }
+    else if( variable == "verbose")
+    {
+        if ( mxIsLogical( prhs[i+1] ) )
+        {
+            bool bVerbose = MatlabConversion::convertMatlabToBool(prhs[i+1]);
+            conf.sB("CodebookRandomForest", variable, bVerbose);
+#ifdef DEBUG_VERBOSE
+            std::cerr << "verbose " << bVerbose << std::endl;
+#endif
+        }
+        else
+        {
+            std::string errorMsg = "Unexpected parameter value for \'" +  variable + "\'. Boolean expected.";
+            mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
+        }
+
+    }
+
+  }
+
+  return conf;
+}
+
+// MAIN MATLAB FUNCTION
+void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
+{    
+#ifdef DEBUG_VERBOSE
+    std::cerr << "Verbose Debug Output on (compiled with debug definition)." << std::endl;
+#endif
+
+    // get the command string specifying what to do
+    if (nrhs < 1)
+        mexErrMsgTxt("No commands and options passed... Aborting!");        
+    
+    if( !mxIsChar( prhs[0] ) )
+        mexErrMsgTxt("First argument needs to be the command, ie.e, the class method to call... Aborting!");        
+    
+    std::string cmd = MatlabConversion::convertMatlabToString( prhs[0] );
+      
+        
+    // in all other cases, there should be a second input,
+    // which the be the class instance handle
+    if (nrhs < 2)
+      mexErrMsgTxt("Second input should be a class instance handle.");
+    
+    // delete object
+    if ( !strcmp("delete", cmd.c_str() ) )
+    {
+        // Destroy the C++ object
+        MatlabConversion::destroyObject<OBJREC::CodebookRandomForest>(prhs[1]);
+        return;
+    }
+    
+    ////////////////////////////////////////
+    //  Check which class method to call  //
+    ////////////////////////////////////////
+    
+    
+    // standard train - assumes initialized object
+    if (!strcmp("createAndTrain", cmd.c_str() ))
+    {
+        // Check parameters
+        if (nlhs < 0 || nrhs < 4 )
+        {
+            mexErrMsgTxt("Train: Unexpected arguments.");
+        }
+        
+        //------------- read the data --------------
+        if (nrhs < 4)
+        {
+            mexErrMsgTxt("needs at least 2 matrix inputs, first the training features, second the sample labels");
+            return;
+        }
+
+        const mxArray *t_pArrTrainData   = prhs[1];
+        const mxArray *t_pArrTrainLabels = prhs[2];
+
+
+        //----------------- parse config options  -------------
+        NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
+
+        int iNumFeatureDimension = mxGetM( t_pArrTrainData ); // feature dimensions
+#ifdef DEBUG_VERBOSE
+        std::cerr << "iNumFeatureDimension " << iNumFeatureDimension << std::endl;
+#endif
+        //----------------- create examples object -------------
+        NICE::Vector t_vecLabelsTrain = MatlabConversion::convertDoubleVectorToNice( t_pArrTrainLabels );
+        NICE::Matrix t_matDataTrain   = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
+
+        OBJREC::Examples examplesTrain;
+
+        bool bRet = OBJREC::Examples::wrapExamplesAroundFeatureMatrix( t_matDataTrain, t_vecLabelsTrain, examplesTrain );
+        if( !bRet )
+        {
+            mexErrMsgTxt("createAndTrain: Error creating Examples from raw feature matrix and labels.");
+        }
+
+        //----------------- create raw feature mapping -------------
+        OBJREC::FeaturePool fp;
+        OBJREC::VectorFeature *pVecFeature = new OBJREC::VectorFeature(iNumFeatureDimension);
+        pVecFeature->explode(fp);
+
+#ifdef DEBUG_VERBOSE
+        //----------------- debug features -------------
+        OBJREC::Example t_Exp = examplesTrain[0].second;
+        NICE::Vector t_FeatVector;
+        fp.calcFeatureVector(t_Exp, t_FeatVector);
+        std::cerr << "first full Feature Vec: " <<t_FeatVector << std::endl;
+#endif
+        //----------------- train our random Forest -------------
+        OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&conf,"RandomForest");
+        pRandForest->train(fp, examplesTrain);
+
+        //----------------- create codebook ERC clusterer -------------
+        int nMaxDepth        = conf.gI("CodebookRandomForest", "maxDepthTree",10);
+        int nMaxCodebookSize = conf.gI("CodebookRandomForest", "maxCodebookSize",100);
+#ifdef DEBUG_VERBOSE
+        std::cerr << "maxDepthTree " << nMaxDepth << std::endl;
+        std::cerr << "nMaxCodebookSize " << nMaxCodebookSize << std::endl;
+#endif
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(pRandForest, nMaxDepth,nMaxCodebookSize);
+
+        // handle to the C++ instance
+        plhs[0] = MatlabConversion::convertPtr2Mat<OBJREC::CodebookRandomForest>( pCodebookRandomForest );
+
+        //----------------- clean up -------------
+
+        delete pVecFeature;
+        pVecFeature = NULL;
+        // delete all "exploded" features, they are internally cloned in the random trees anyway
+        fp.destroy();
+        //
+        examplesTrain.clean();
+
+        return;
+    }
+    ///// generate Histogram over trees
+    else if (!strcmp("generateHistogram", cmd.c_str() ))
+    {
+        //------------- read the data --------------
+        if (nrhs < 3)
+        {
+            mexErrMsgTxt("needs at least 1 matrix inputs, first the training features");
+            return;
+        }
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
+        if( pCodebookRandomForest == NULL )
+        {
+            mexErrMsgTxt("classify: No valid trained classifier given");
+        }
+
+        //----------------- convert matlab data into NICE data -------------
+        const mxArray *t_pArrTrainData   = prhs[2];
+
+        NICE::Matrix matDataTrain = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
+        size_t numTrainSamples      = matDataTrain.cols();
+        size_t iNumFeatureDimension = matDataTrain.rows();
+        size_t iNumCodewords        = pCodebookRandomForest->getCodebookSize();
+#ifdef DEBUG_VERBOSE
+        std::cerr << "numTrainSamples "      << numTrainSamples         << std::endl;
+        std::cerr << "iNumFeatureDimension " << iNumFeatureDimension    << std::endl;
+        std::cerr << "iNumCodewords "        << iNumCodewords           << std::endl;
+#endif
+
+        //----------------- parse config options  -------------
+        bool bVerboseOutput = false;
+        if( nrhs > 3)
+        {
+            NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
+            bVerboseOutput = conf.gB("CodebookRandomForest", "verbose", false);
+        }
+
+        //----------------- quantize samples into histogram -------------
+        NICE::Vector histogram(iNumCodewords, 0.0f);
+
+        const double *pDataPtr = matDataTrain.getDataPointer();
+        int t_iCodebookEntry; double t_fWeight; double t_fDistance;
+        for (size_t i = 0; i < numTrainSamples; i++, pDataPtr+= iNumFeatureDimension )
+        {
+            const NICE::Vector t_VecTrainData( pDataPtr , iNumFeatureDimension);
+            pCodebookRandomForest->voteVQ(t_VecTrainData, histogram, t_iCodebookEntry, t_fWeight, t_fDistance );
+            if(bVerboseOutput)
+                std::cerr << i << ": " << "CBEntry " << t_iCodebookEntry << " Weight: " << t_fWeight << " Distance: " << t_fDistance << std::endl;
+        }
+
+        //----------------- convert NICE histogram into MATLAB data -------------
+        plhs[0] = MatlabConversion::convertVectorFromNice(histogram);
+
+        return;
+    }
+    ///// get distribution of classes per sample
+    else if (!strcmp("calcClassDistributionPerSample", cmd.c_str() ))
+    {
+        //------------- read the data --------------
+        if (nrhs < 3)
+        {
+            mexErrMsgTxt("needs at least 1 matrix inputs, first the training features");
+            return;
+        }
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
+        if( pCodebookRandomForest == NULL )
+        {
+            mexErrMsgTxt("classify: No valid trained classifier given");
+        }
+
+        //----------------- convert matlab data into NICE data -------------
+        const mxArray *t_pArrTrainData   = prhs[2];
+
+        NICE::Matrix matData = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
+        size_t numTrainSamples      = matData.cols();
+        size_t iNumFeatureDimension = matData.rows();
+#ifdef DEBUG_VERBOSE
+        std::cerr << "numTrainSamples "      << numTrainSamples         << std::endl;
+        std::cerr << "iNumFeatureDimension " << iNumFeatureDimension    << std::endl;
+#endif
+
+        //----------------- parse config options  -------------
+        bool bVerboseOutput = false;
+        if( nrhs > 3)
+        {
+            NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
+            bVerboseOutput = conf.gB("CodebookRandomForest", "verbose", false);
+        }
+
+        //----------------- quantize samples into histogram -------------
+        const double *pDataPtr = matData.getDataPointer();
+        for (size_t i = 0; i < numTrainSamples; i++, pDataPtr+= iNumFeatureDimension )
+        {
+            NICE::SparseVector votes;
+            NICE::Vector distribution;
+            const NICE::Vector t_VecTrainData( pDataPtr , iNumFeatureDimension);
+            pCodebookRandomForest->voteAndClassify(t_VecTrainData, votes, distribution);
+            if(bVerboseOutput)
+            {
+                NICE::Vector t_fullVector;
+                votes.convertToVectorT( t_fullVector );
+                std::cerr << i << ": " << "votes " << t_fullVector << " distribution: " << distribution << std::endl;
+            }
+        }
+
+        //----------------- convert NICE histogram into MATLAB data -------------
+        //plhs[0] = MatlabConversion::convertVectorFromNice(histogram);
+        plhs[0] =  mxCreateLogicalScalar( true );
+
+        return;
+    }
+    // store codebook random forest to file
+    else if ( strcmp("storeToFile", cmd.c_str()) == 0 )
+    {
+        //------------- read the data --------------
+        if (nrhs != 3)
+        {
+            mexErrMsgTxt("needs a string for filename to save to");
+            return;
+        }
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
+        if( pCodebookRandomForest == NULL )
+        {
+            mexErrMsgTxt("classify: No valid trained classifier given");
+        }
+
+        bool bSuccess = false;
+
+        try
+        {
+            std::string sStoreFilename = MatlabConversion::convertMatlabToString( prhs[2] );
+            std::ofstream ofs;
+            ofs.open (sStoreFilename.c_str(), std::ofstream::out);
+            pCodebookRandomForest->store( ofs );
+            ofs.close();
+            bSuccess = true;
+        }
+        catch( std::exception &e)
+        {
+            std::cerr << "exception occured: " << e.what() << std::endl;
+            mexErrMsgTxt("storing failed");
+        }
+
+        plhs[0] =  mxCreateLogicalScalar( bSuccess );
+
+        return;
+    }
+    // restore codebook random forest from file
+    else if (!strcmp("restoreFromFile", cmd.c_str() ))
+    {
+        //------------- read the data --------------
+        if (nrhs != 2)
+        {
+            mexErrMsgTxt("needs a string for filename to load from");
+            return;
+        }
+
+        //----------------- convert ptr of trained codebook forest -------------
+        OBJREC::CodebookRandomForest *pRestoredCRF = new OBJREC::CodebookRandomForest(-1, -1);
+
+        bool bSuccess = false;
+
+        try
+        {
+            std::string sStoreFilename = MatlabConversion::convertMatlabToString( prhs[1] );
+            std::ifstream ifs;
+            ifs.open( sStoreFilename.c_str() );
+            pRestoredCRF->restore( ifs );
+            ifs.close();
+            bSuccess = true;
+        }
+        catch( std::exception &e)
+        {
+            std::cerr << "exception occured: " << e.what() << std::endl;
+            mexErrMsgTxt("restoring failed");
+        }
+
+        // handle to the C++ instance
+        if(bSuccess)
+            plhs[0] = MatlabConversion::convertPtr2Mat<OBJREC::CodebookRandomForest>( pRestoredCRF );
+        else
+            plhs[0] = mxCreateLogicalScalar(false);
+
+        return;
+    }
+
+    
+    // Got here, so command not recognized
+    
+    std::string errorMsg (cmd.c_str() );
+    errorMsg += "--command not recognized.";
+    mexErrMsgTxt( errorMsg.c_str() );
+
+}
+#endif

+ 9 - 0
features/simplefeatures/matlab/Makefile

@@ -0,0 +1,9 @@
+#MEX=/home/matlab/7.14/bin/mex
+MEX=/home/matlab/8.2/academic/bin/mex
+NICEFLAGS1=$(shell pkg-config libgp-hik-core --cflags --libs)
+NICEFLAGS2=$(shell pkg-config libvislearning_features libsegmentation --cflags --libs)
+NICEFLAGS=$(subst -fopenmp,,$(NICEFLAGS2))
+
+default:
+	${MEX} ${NICEFLAGS} -largeArrayDims CodebookRandomForestMex.cpp ../../../../gp-hik-core/matlab/ConverterMatlabToNICE.cpp ../../../../gp-hik-core/matlab/ConverterNICEToMatlab.cpp
+	${MEX} ${NICEFLAGS} -largeArrayDims testHelperDataConversionMex.cpp ../../../../gp-hik-core/matlab/ConverterMatlabToNICE.cpp ../../../../gp-hik-core/matlab/ConverterNICEToMatlab.cpp

+ 21 - 0
features/simplefeatures/matlab/config.conf

@@ -0,0 +1,21 @@
+[RandomForest]
+number_of_trees = 3
+features_per_tree = 1.0
+samples_per_tree  = 0.5
+builder = random
+builder_section = DTBRandom
+minimum_error_reduction = .001
+minimum_improvement = .01
+enable_out_of_bag_estimates = false
+
+[DTBRandom]
+random_split_tests = 50
+random_features = 6
+max_depth = 4
+min_examples = 50
+save_indices = false
+start_random_generator = true
+
+[CodebookRandomForest]
+maxDepthTree = 500
+maxCodebookSize = 20

+ 2 - 0
features/simplefeatures/matlab/libdepend.inc

@@ -0,0 +1,2 @@
+$(call PKG_DEPEND_EXT,MEX)
+

+ 102 - 0
features/simplefeatures/matlab/testHelperDataConversionMex.cpp

@@ -0,0 +1,102 @@
+#ifdef NICE_USELIB_MEX
+/** 
+* @author Johannes Ruehle
+* @date 25-04-2014
+* @brief test MEX program for wrapping an OBJ::Examples class around a given feature and label matrix 
+*/
+
+// STL includes
+#include <math.h>
+#include <matrix.h>
+#include <mex.h>
+
+// NICE-core includes
+#include <core/vector/MatrixT.h>
+#include <core/vector/VectorT.h>
+
+#include "vislearning/cbaselib/Example.h"
+
+// Interface for conversion between Matlab and C objects
+#include "gp-hik-core/matlab/classHandleMtoC.h"
+#include "gp-hik-core/matlab/ConverterMatlabToNICE.h"
+#include "gp-hik-core/matlab/ConverterNICEToMatlab.h"
+
+using namespace std; //C basics
+using namespace NICE;  // nice-core
+
+// MAIN MATLAB FUNCTION
+void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
+{    
+    // get the command string specifying what to do
+    if (nrhs < 1)
+        mexErrMsgTxt("No commands and options passed... Aborting!");        
+    
+    if( !mxIsChar( prhs[0] ) )
+        mexErrMsgTxt("First argument needs to be the command, ie.e, the unit test method to call... Aborting!");
+    
+    std::string cmd = MatlabConversion::convertMatlabToString( prhs[0] );
+    
+    // in all other cases, there should be a second input,
+    // which the be the class instance handle
+    if (nrhs < 2)
+    {
+        mexErrMsgTxt("Second input should be some kind of matrix variable");
+        return;
+    }
+
+    if (nlhs < 1)
+    {
+        mexErrMsgTxt("No return value defined, possible loss of data... Aborting!");
+    }
+
+    ////////////////////////////////////////
+    //  Check which method to call  //
+    ////////////////////////////////////////
+
+    if ( !strcmp("convertDoubleMatrixToExamples", cmd.c_str() ) )
+    {
+        if (nrhs != 3)
+        {
+            mexErrMsgTxt("needs 2 matrix inputs, first the training features, second the sample labels");
+            return;
+        }
+
+        const mxArray *t_pArrTrainData   = prhs[1];
+        const mxArray *t_pArrTrainLabels = prhs[2];
+
+        NICE::Vector t_vecLabelsTrain = MatlabConversion::convertDoubleVectorToNice( t_pArrTrainLabels );
+        NICE::Matrix t_matDataTrain   = MatlabConversion::convertDoubleMatrixToNice( t_pArrTrainData   );
+
+        OBJREC::Examples t_ExamplesTrain;
+
+        bool bConversionSuccess = OBJREC::Examples::wrapExamplesAroundFeatureMatrix( t_matDataTrain, t_vecLabelsTrain, t_ExamplesTrain );
+        //wrapExamplesAroundFeatureMatrix
+
+        std::cerr << "Examples size: " << t_ExamplesTrain.size() << std::endl;
+        for(int i=0; i< t_ExamplesTrain.size(); i++)
+        {
+            int iClass = t_ExamplesTrain[i].first;
+            OBJREC::Example &t_Example = t_ExamplesTrain[i].second ;
+
+            std::cerr << "Example["<<i<<"]" << "L:" << iClass << " data: "<< *t_Example.vec << std::endl;
+
+        }
+
+        // clean up
+        t_ExamplesTrain.clean();
+
+        // output
+        plhs[0] = mxCreateLogicalScalar( bConversionSuccess );
+
+        return;
+    }    
+    
+    
+    // Got here, so command not recognized
+    
+    std::string errorMsg (cmd.c_str() );
+    errorMsg += " -- command not recognized.";
+    mexErrMsgTxt( errorMsg.c_str() );
+
+}
+#endif

+ 45 - 0
features/simplefeatures/matlab/unittestCodebookRandomForestMex.m

@@ -0,0 +1,45 @@
+% brief:    Unit testing of the CodebookRandomForest functions (ERC)
+% author:   Johannes Ruehle
+% date:     11-04-2014 (dd-mm-yyyy)
+
+%% test Creation of OBJREC::Examples class from sample matrix and label data
+
+    numSamples  = 10000;
+    numFeatures = 5;
+    maxClass    = 3;
+
+    matFeatures = rand(numSamples, numFeatures, 'double')';
+    matLabels = randi(maxClass, numSamples,1,'double');
+%%  create and train
+    hClassifier = CodebookRandomForestMex('createAndTrain',... 
+                            matFeatures, matLabels,...
+                            'conf', 'config.conf');
+%%  calcClassDistributionPerSample
+    bSucceess = CodebookRandomForestMex('calcClassDistributionPerSample',... 
+                                        hClassifier,...
+                                        matFeatures,'verbose',true );
+    assert(bSucceess);
+%%  generateHistogram
+    matHistogram = CodebookRandomForestMex('generateHistogram',... 
+                                        hClassifier,...
+                                        matFeatures, 'verbose',false);
+    
+%%  store
+    bSuccess = CodebookRandomForestMex('storeToFile',... 
+                                        hClassifier,...
+                                        'codebookrf.stored.txt');    
+%%  restore
+    hRestoredClassifier = CodebookRandomForestMex('restoreFromFile',... 
+                                            'codebookrf.stored.txt');
+    assert( ~(hRestoredClassifier == false) );
+    matHistogramNew = CodebookRandomForestMex('generateHistogram',... 
+                                        hRestoredClassifier,...
+                                        matFeatures, 'verbose',false);
+    d = matHistogramNew-matHistogram;
+    assert( sum(d(:)) == 0 ); % histogram are alike
+%%    
+    CodebookRandomForestMex('delete', hClassifier);
+    CodebookRandomForestMex('delete', hRestoredClassifier);
+%catch ecpn
+%    disp( ecpn );
+%end

+ 19 - 0
features/simplefeatures/matlab/unittestHelperDataConversionMex.m

@@ -0,0 +1,19 @@
+% brief:    Unit testing of the NICE::MatlabConversion functions
+% author:   Johannes Ruehle
+% date:     11-04-2014 (dd-mm-yyyy)
+
+%% test Creation of OBJREC::Examples class from sample matrix and label data
+
+numSamples  = 10;
+numFeatures = 5;
+maxClass    = 3;
+
+matFeatures = rand(numSamples, numFeatures, 'double')';
+matLabels = randi(maxClass, numSamples,1,'double');
+disp(matFeatures);
+try
+    bSuccess = testHelperDataConversionMex( 'convertDoubleMatrixToExamples', matFeatures, matLabels);
+    assert( bSuccess );
+catch ecpn
+    disp( ecpn );
+end

+ 2 - 0
features/simplefeatures/progs/libdepend.inc

@@ -0,0 +1,2 @@
+$(call PKG_DEPEND_EXT,MATIO)
+$(call PKG_DEPEND_EXT,HDF5)

+ 407 - 0
features/simplefeatures/progs/progCodebookRandomForest.cpp

@@ -0,0 +1,407 @@
+/**
+  * @brief Extremely randomized clustering forest program for Matlab input data.
+  *
+  * @author Johannes Ruehle
+  * @date 10/05/2014
+  */
+
+#ifdef NICE_USELIB_MEX
+#ifdef NICE_USELIB_MATIO
+
+#include <string>
+#include <exception>
+#include <iostream>
+#include <fstream>
+
+//----------
+#include "vislearning/features/simplefeatures/CodebookRandomForest.h"
+#include "vislearning/features/fpfeatures/VectorFeature.h"
+
+#include "vislearning/cbaselib/FeaturePool.h"
+
+#ifdef NICE_USELIB_MATIO
+#include <core/matlabAccess/MatFileIO.h>
+
+const bool verbose = false;
+const bool verboseStartEnd = true;
+
+using namespace OBJREC;
+using namespace NICE;
+using namespace std;
+
+#undef DEBUG_VERBOSE
+
+struct structCommands
+{
+    QString sFunction;
+    QString sFileTrainData;
+    QString sFileTrainDataLabels;
+    QString sConfigFile;
+
+    QString sFileStoreClassifier;   // txt file storing the config of the trained codebook rdf
+    QString sFileStoreResult;       // matlab mat file storing the generated histogram
+
+};
+
+bool loadMatlabMatrix(const std::string &sFilename, const std::string &matrix_name, NICE::Matrix &p_Matrix)
+{
+
+    NICE::MatFileIO matlab_file(sFilename, MAT_ACC_RDONLY);
+
+#ifdef DEBUG_VERBOSE
+    // Show the number of variables in the file
+    int vars_in_file = matlab_file.getNumberOfVariables();
+
+    std::cout << vars_in_file << " Variables in " << sFilename << "\n";
+
+    // Load the matrix
+    std::cout << "Loading matrix \"" << matrix_name << "\"...\n";
+#endif
+    // Check if the variable is a matrix
+    matvar_t* matrix_variable = matlab_file.getVariableViaName(matrix_name);
+    if(matrix_variable == NULL)
+    {
+        std::cout << "variable is not found in mat file.\n";
+        return false;
+    }
+
+    if(matrix_variable->rank != 2) {
+        std::cout << "Variable is not a matrix. Rank: " << matrix_variable->rank << ".\n";
+        return false;
+    }
+
+    // Read the dimensions
+    int cols = matrix_variable->dims[1];
+    int rows = matrix_variable->dims[0];
+    std::cout << "Dimensions: " << cols << " x " << rows << "\n";
+
+    // Read the matrix into a vector of vectors
+    std::vector< std::vector<double> > matrix_vecvec(rows, std::vector<double>(cols));
+    matlab_file.getFeatureMatrixViaName(matrix_vecvec, matrix_name);
+
+    // Now, we want a NICE matrix
+    //NICE::MatrixT<double> matrix(rows, cols);
+    p_Matrix.resize(rows, cols);
+    for(int i = 0; i < rows; i++) {
+        for(int j = 0; j < cols; j++) {
+            p_Matrix(i,j) = matrix_vecvec[i][j];
+        }
+    }
+
+    return true;
+}
+
+NICE::Matrix* loadMatlabVec(const std::string &sFilename, const std::string &matrix_name)
+{
+    NICE::Matrix *pMatrix = NULL;
+
+    NICE::MatFileIO *matFile = new NICE::MatFileIO(sFilename, MAT_ACC_RDONLY );
+
+    matvar_t *t = matFile->getVariableViaName(matrix_name);
+    if ( t->class_type == MAT_C_DOUBLE)
+    {
+        double *pD = (double*)( t->data );
+        pMatrix = new NICE::Matrix(pD ,  (int)t->dims[0], (int)t->dims[1], Matrix::copy );
+    }
+    else
+    {
+        std::cerr << "raw format of matlab matrix not supported" << std::endl;
+    }
+
+    Mat_VarFree(t);
+    delete matFile;
+
+    return pMatrix;
+}
+
+
+bool saveMatlabVector(const std::string &sFilename, const NICE::Vector &p_Vector, int p_iFodID)
+{
+    std::ofstream ofs;
+    ofs.open (sFilename.c_str(), std::ofstream::out);
+    if (!ofs.is_open())
+        return false;
+    ofs << p_iFodID << " #fodID" << std::endl;
+    ofs << p_Vector.size() << std::endl;
+    for(int i=0; i<p_Vector.size(); i++)
+        ofs << p_Vector[i] << std::endl;
+    ofs.close();
+
+    return true;
+}
+
+
+bool storeClassifier(const structCommands &p_Command, const OBJREC::CodebookRandomForest *p_pCodebookRandomForest)
+{
+    if( p_Command.sFileStoreClassifier.isEmpty() )
+        return false;
+
+    std::string t_sDestinationSave = p_Command.sFileStoreClassifier.toStdString();
+    std::ofstream ofs;
+    ofs.open (t_sDestinationSave.c_str(), std::ofstream::out);
+    p_pCodebookRandomForest->store( ofs );
+    ofs.close();
+
+    return true;
+
+}
+
+bool restoreClassifier(const structCommands &p_Command, OBJREC::CodebookRandomForest *p_pCodebookRandomForest)
+{
+    if( p_Command.sFileStoreClassifier.isEmpty() )
+        return false;
+
+    if (p_pCodebookRandomForest == NULL )
+        return false;
+
+    std::string t_sDestinationSave = p_Command.sFileStoreClassifier.toStdString();
+    std::ifstream ifs2;
+    ifs2.open (t_sDestinationSave.c_str() );
+    p_pCodebookRandomForest->restore( ifs2 );
+    ifs2.close();
+
+    return true;
+}
+
+bool createAndTrain( const structCommands &p_Command)
+{
+    if( p_Command.sConfigFile.isEmpty() )
+    {
+        std::cout << "no config file provided. Exiting" << std::endl;
+        return false;
+    }
+    NICE::Config t_conf = NICE::Config( p_Command.sConfigFile.toStdString() );
+
+    Matrix *t_pMatDataTrain       = loadMatlabVec( p_Command.sFileTrainData.toStdString(), "matFeatures");
+    if( t_pMatDataTrain == NULL )
+    {
+        std::cout << "Training data Matrix couldn't be loaded" << std::endl;
+        return 0;
+    }
+#ifdef DEBUG_VERBOSE
+    for(int i = 0; i<10; i++)
+    {
+        std::cerr << (*t_pMatDataTrain)(i,0) << " ## " << (*t_pMatDataTrain)(0,i) << std::endl;
+    }
+#endif
+    Matrix *t_pMatDataTrainLabels = loadMatlabVec( p_Command.sFileTrainDataLabels.toStdString(), "matLabels");
+    if( t_pMatDataTrainLabels == NULL )
+    {
+        std::cout << "Training data label Matrix couldn't be loaded" << std::endl;
+        return 0;
+    }
+    int iNumFeatureDimension = t_pMatDataTrain->rows();
+
+    NICE::Vector t_vecLabelsTrain(t_pMatDataTrainLabels->getDataPointer(), t_pMatDataTrainLabels->rows(), Vector::external);
+
+    OBJREC::Examples examplesTrain;
+
+    bool bRet = OBJREC::Examples::wrapExamplesAroundFeatureMatrix( *t_pMatDataTrain, t_vecLabelsTrain, examplesTrain );
+    if( !bRet )
+    {
+        std::cout << "createAndTrain: Error creating Examples from raw feature matrix and labels." << std::endl;
+        return 0;
+    }
+
+    //----------------- create raw feature mapping -------------
+    OBJREC::FeaturePool fp;
+    OBJREC::VectorFeature *pVecFeature = new OBJREC::VectorFeature(iNumFeatureDimension);
+    pVecFeature->explode(fp);
+
+#ifdef DEBUG_VERBOSE
+    //----------------- debug features -------------
+    OBJREC::Example t_Exp = examplesTrain[0].second;
+    NICE::Vector t_FeatVector;
+    fp.calcFeatureVector(t_Exp, t_FeatVector);
+    std::cerr << "first full Feature Vec: " <<t_FeatVector << std::endl;
+#endif
+    //----------------- train our random Forest -------------
+    OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&t_conf,"RandomForest");
+    pRandForest->train(fp, examplesTrain);
+
+    //----------------- create codebook ERC clusterer -------------
+    int nMaxDepth        = t_conf.gI("CodebookRandomForest", "maxDepthTree",10);
+    int nMaxCodebookSize = t_conf.gI("CodebookRandomForest", "maxCodebookSize",100);
+#ifdef DEBUG_VERBOSE
+    std::cerr << "maxDepthTree " << nMaxDepth << std::endl;
+    std::cerr << "nMaxCodebookSize " << nMaxCodebookSize << std::endl;
+#endif
+    OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(pRandForest, nMaxDepth,nMaxCodebookSize);
+
+
+    //----------------- store classifier in file ---------------------
+    bool bSuccess = storeClassifier(p_Command, pCodebookRandomForest);
+
+    //----------------- clean up -------------
+
+    delete pCodebookRandomForest;
+
+    delete pVecFeature;
+    pVecFeature = NULL;
+    // delete all "exploded" features, they are internally cloned in the random trees anyway
+    fp.destroy();
+    //
+    examplesTrain.clean();
+
+    delete t_pMatDataTrain;
+    delete t_pMatDataTrainLabels;
+
+    return true;
+}
+
+
+bool generateHistogram( const structCommands &p_Command)
+{
+
+    Matrix *t_pMatFodID = loadMatlabVec( p_Command.sFileTrainData.toStdString(), "fodID");
+    if( t_pMatFodID == NULL )
+    {
+        std::cout << "Data Matrix didn't include a fodID, so couldn't be loaded" << std::endl;
+        return 0;
+    }
+    int iFodID = (*t_pMatFodID)(0,0);
+
+    Matrix *t_pMatDataTrain       = loadMatlabVec( p_Command.sFileTrainData.toStdString(), "matFeatures");
+    if( t_pMatDataTrain == NULL )
+    {
+        std::cout << "Data Matrix couldn't be loaded" << std::endl;
+        return 0;
+    }
+
+    //----------------- restore trained codebook forest -------------
+    OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(-1,-1);
+    if( !restoreClassifier(p_Command, pCodebookRandomForest ) )
+    {
+        std::cout << "Error restoring codebook random forest" << std::endl;
+        return false;
+    }
+
+    size_t numTrainSamples      = t_pMatDataTrain->cols();
+    size_t iNumFeatureDimension = t_pMatDataTrain->rows();
+    size_t iNumCodewords        = pCodebookRandomForest->getCodebookSize();
+#ifdef DEBUG_VERBOSE
+    std::cerr << "numTrainSamples "      << numTrainSamples         << std::endl;
+    std::cerr << "iNumFeatureDimension " << iNumFeatureDimension    << std::endl;
+    std::cerr << "iNumCodewords "        << iNumCodewords           << std::endl;
+#endif
+
+    //----------------- parse config options  -------------
+    bool bVerboseOutput = false;
+//    if( nrhs > 3)
+//    {
+//        NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
+//        bVerboseOutput = conf.gB("CodebookRandomForest", "verbose", false);
+//    }
+
+    //----------------- quantize samples into histogram -------------
+    NICE::Vector histogram(iNumCodewords, 0.0f);
+
+    const double *pDataPtr = t_pMatDataTrain->getDataPointer();
+    int t_iCodebookEntry; double t_fWeight; double t_fDistance;
+    for (size_t i = 0; i < numTrainSamples; i++, pDataPtr+= iNumFeatureDimension )
+    {
+        const NICE::Vector t_VecTrainData( pDataPtr , iNumFeatureDimension);
+        pCodebookRandomForest->voteVQ(t_VecTrainData, histogram, t_iCodebookEntry, t_fWeight, t_fDistance );
+        if(bVerboseOutput)
+            std::cerr << i << ": " << "CBEntry " << t_iCodebookEntry << " Weight: " << t_fWeight << " Distance: " << t_fDistance << std::endl;
+    }
+
+    // store histogram
+    bool bSuccess = saveMatlabVector(p_Command.sFileStoreResult.toStdString(), histogram , iFodID);
+
+    //----------------- clean up -------------
+
+    delete pCodebookRandomForest;
+    delete t_pMatDataTrain;
+
+    return bSuccess;
+}
+
+
+#endif
+int main(int argc, char **argv)
+{
+
+#ifdef NICE_USELIB_MATIO
+#ifndef __clang__
+#ifndef __llvm__
+    std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
+
+    structCommands sCommand;
+
+    QString sCmdArg;
+    int iCurrArgIdx = 1;
+    while(iCurrArgIdx < argc)
+    {
+        sCmdArg = QString(argv[iCurrArgIdx]);
+
+        if    ( sCmdArg == "--function" )
+        {
+            iCurrArgIdx++;
+            sCommand.sFunction = QString(argv[iCurrArgIdx]);
+        }
+        else if( sCmdArg == "--config" )
+        {
+            iCurrArgIdx++;
+            sCommand.sConfigFile = QString(argv[iCurrArgIdx]);
+        }
+
+        else if( sCmdArg == "--traindata" )
+        {
+            iCurrArgIdx++;
+            sCommand.sFileTrainData = QString(argv[iCurrArgIdx]);
+        }
+        else if( sCmdArg == "--traindatalabels" )
+        {
+            iCurrArgIdx++;
+            sCommand.sFileTrainDataLabels = QString(argv[iCurrArgIdx]);
+        }
+        else if( sCmdArg == "--results" )
+        {
+            iCurrArgIdx++;
+            sCommand.sFileStoreResult = QString(argv[iCurrArgIdx]);
+        }
+        else if( sCmdArg == "--classifier" )
+        {
+            iCurrArgIdx++;
+            sCommand.sFileStoreClassifier = QString(argv[iCurrArgIdx]);
+        }
+        else if( sCmdArg == "--help" )
+        {
+//             print_usage();
+             return 0;
+        }
+        else
+        {
+            std::cout << "unknown command arg: " << sCmdArg.toStdString() << std::endl;
+        }
+
+        iCurrArgIdx++;
+    }
+
+    ///////////////////////////////////////////////////
+    try
+    {
+        if( sCommand.sFunction.compare("createAndTrain") == 0)
+        {
+            bool bSuccess = createAndTrain(sCommand);
+        }
+        else if( sCommand.sFunction.compare("generateHistogram") == 0)
+        {
+            bool bSuccess = generateHistogram(sCommand);
+        }
+    }
+    catch(std::exception &e)
+    {
+        std::cerr << "exception occured: " << e.what() << std::endl;
+    }
+#else
+    return -1;
+#endif	
+
+    return 0;
+}
+#endif //#ifdef NICE_USELIB_MATIO
+#endif

+ 89 - 0
features/simplefeatures/tests/Makefile.inc

@@ -0,0 +1,89 @@
+# BINARY-DIRECTORY-MAKEFILE
+# conventions:
+# - there are no subdirectories, they are ignored!
+# - all ".C", ".cpp" and ".c" files in the current directory are considered
+#   independent binaries, and linked as such.
+# - the binaries depend on the library of the parent directory
+# - the binary names are created with $(BINNAME), i.e. it will be more or less
+#   the name of the .o file
+# - all binaries will be added to the default build list ALL_BINARIES
+
+# --------------------------------
+# - remember the last subdirectory
+#
+# set the variable $(SUBDIR) correctly to the current subdirectory. this
+# variable can be used throughout the current makefile.inc. The many 
+# SUBDIR_before, _add, and everything are only required so that we can recover
+# the previous content of SUBDIR before exitting the makefile.inc
+
+SUBDIR_add:=$(dir $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)))
+SUBDIR_before:=$(SUBDIR)
+SUBDIR:=$(strip $(SUBDIR_add))
+SUBDIR_before_$(SUBDIR):=$(SUBDIR_before)
+
+# ------------------------
+# - include subdirectories
+#
+# note the variables $(SUBDIRS_OF_$(SUBDIR)) are required later on to recover
+# the dependencies automatically. if you handle dependencies on your own, you
+# can also dump the $(SUBDIRS_OF_$(SUBDIR)) variable, and include the
+# makefile.inc of the subdirectories on your own...
+
+#SUBDIRS_OF_$(SUBDIR):=$(patsubst %/Makefile.inc,%,$(wildcard $(SUBDIR)*/Makefile.inc))
+#include $(SUBDIRS_OF_$(SUBDIR):%=%/Makefile.inc)
+
+# ----------------------------
+# - include local dependencies
+#
+# include the libdepend.inc file, which gives additional dependencies for the
+# libraries and binaries. additionally, an automatic dependency from the library
+# of the parent directory is added (commented out in the code below).
+
+-include $(SUBDIR)libdepend.inc
+
+PARENTDIR:=$(patsubst %/,%,$(dir $(patsubst %/,%,$(SUBDIR))))
+$(call PKG_DEPEND_INT,$(PARENTDIR))
+$(call PKG_DEPEND_EXT,CPPUNIT)
+
+# ---------------------------
+# - objects in this directory
+#
+# the use of the variable $(OBJS) is not mandatory. it is mandatory however
+# to update $(ALL_OBJS) in a way that it contains the path and name of
+# all objects. otherwise we can not include the appropriate .d files.
+
+OBJS:=$(patsubst %.cpp,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.cpp))) \
+      $(patsubst %.C,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.C))) \
+      $(shell grep -ls Q_OBJECT $(SUBDIR)*.h | sed -e's@^@/@;s@.*/@$(OBJDIR)moc_@;s@\.h$$@.o@') \
+      $(patsubst %.c,$(OBJDIR)%.o,$(notdir $(wildcard $(SUBDIR)*.c)))
+ALL_OBJS += $(OBJS)
+
+# ----------------------------
+# - binaries in this directory
+#
+# output of binaries in this directory. none of the variables has to be used.
+# but everything you add to $(ALL_LIBRARIES) and $(ALL_BINARIES) will be
+# compiled with `make all`. be sure again to add the files with full path.
+
+CHECKS:=$(BINDIR)$(call LIBNAME,$(SUBDIR))
+ALL_CHECKS+=$(CHECKS)
+
+# ---------------------
+# - binary dependencies
+#
+# there is no way of determining the binary dependencies automatically, so we
+# follow conventions. each binary depends on the corresponding .o file and
+# on the libraries specified by the INTLIBS/EXTLIBS. these dependencies can be
+# specified manually or they are automatically stored in a .bd file.
+
+$(foreach head,$(wildcard $(SUBDIR)*.h),$(eval $(shell grep -q Q_OBJECT $(head) && echo $(head) | sed -e's@^@/@;s@.*/\(.*\)\.h$$@$(BINDIR)\1:$(OBJDIR)moc_\1.o@')))
+$(eval $(foreach c,$(CHECKS),$(c):$(BUILDDIR)$(CPPUNIT_MAIN_OBJ) $(OBJS) $(call PRINT_INTLIB_DEPS,$(c),.a)))
+
+# -------------------
+# - subdir management
+#
+# as the last step, always add this line to correctly recover the subdirectory
+# of the makefile including this one!
+
+SUBDIR:=$(SUBDIR_before_$(SUBDIR))
+

+ 173 - 0
features/simplefeatures/tests/TestCodebookRandomForest.cpp

@@ -0,0 +1,173 @@
+/**
+  * Unit test for Extremely randomized clustering forest (ERC).
+  *
+  * @author Johannes Ruehle
+  * @date 01/05/2014
+  */
+#ifdef NICE_USELIB_CPPUNIT
+
+#include <string>
+#include <exception>
+#include <iostream>
+#include <fstream>
+
+//----------
+
+#include "TestCodebookRandomForest.h"
+
+#include "vislearning/features/simplefeatures/CodebookRandomForest.h"
+#include "vislearning/features/fpfeatures/VectorFeature.h"
+
+#include "vislearning/cbaselib/FeaturePool.h"
+
+const bool verbose = false;
+const bool verboseStartEnd = true;
+
+using namespace OBJREC;
+using namespace NICE;
+using namespace std;
+
+CPPUNIT_TEST_SUITE_REGISTRATION( TestCodebookRandomForest );
+
+void TestCodebookRandomForest::setUp() {
+}
+
+void TestCodebookRandomForest::tearDown() {
+}
+void TestCodebookRandomForest::testCodebookRandomForest()
+{
+    if (verboseStartEnd)
+        std::cerr << "================== TestCodebookRandomForest::TestCodebookRandomForest ===================== " << std::endl;
+    try
+        {
+        Matrix mX;
+        Vector vY;
+        Vector vY_multi;
+
+        //ifstream ifs ("toyExample1.data", ios::in);
+        //   ifstream ifs ("toyExampleLargeScale.data", ios::in);
+        ifstream ifs ("toyExampleLargeLargeScale.data", ios::in);
+        CPPUNIT_ASSERT ( ifs.good() );
+        ifs >> mX;
+        ifs >> vY;
+        ifs >> vY_multi;
+        ifs.close();
+
+        if (verbose)
+        {
+            std::cerr << "data loaded: mX" << std::endl;
+            std::cerr << mX << std::endl;
+            std::cerr << "vY: " << std::endl;
+            std::cerr << vY << std::endl;
+            std::cerr << "vY_multi: " << std::endl;
+            std::cerr << vY_multi << std::endl;
+        }
+
+        int iNumFeatureDimension = mX.cols();
+
+        // memory layout needs to be transposed into rows x column: features x samples
+        // features must lay next to each other in memory, so that each feature vector can
+        // be adressed by a starting pointer and the number of feature dimensions to come.
+        Matrix mX_transposed = mX.transpose();
+
+        Examples examples;
+
+        bool bSuccess = Examples::wrapExamplesAroundFeatureMatrix(mX_transposed, vY_multi, examples);
+        CPPUNIT_ASSERT( bSuccess );
+
+        CPPUNIT_ASSERT( examples.size() == mX.rows() );
+
+        //----------------- create raw feature mapping -------------
+        OBJREC::FeaturePool fp;
+        OBJREC::VectorFeature *pVecFeature = new OBJREC::VectorFeature(iNumFeatureDimension);
+        pVecFeature->explode(fp);
+
+        //----------------- debug features -------------
+        OBJREC::Example t_Exp = examples[0].second;
+        NICE::Vector t_FeatVector;
+        fp.calcFeatureVector(t_Exp, t_FeatVector);
+        std::cerr << "first full Feature Vec: " <<t_FeatVector << std::endl;
+
+        //----------------- train our random Forest -------------
+        NICE::Config conf("config.conf");
+        OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&conf,"RandomForest");
+        pRandForest->train(fp, examples);
+
+        //----------------- create codebook ERC clusterer -------------
+        int nMaxDepth        = conf.gI("CodebookRandomForest", "maxDepthTree",10);
+        int nMaxCodebookSize = conf.gI("CodebookRandomForest", "maxCodebookSize",100);
+
+        std::cerr << "maxDepthTree " << nMaxDepth << std::endl;
+
+        OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(pRandForest, nMaxDepth, nMaxCodebookSize);
+
+
+
+
+        //----------------- quantize samples into histogram -------------
+        size_t iNumCodewords        = pCodebookRandomForest->getCodebookSize();
+        NICE::Vector histogram(iNumCodewords, 0.0f);
+
+        int t_iCodebookEntry; double t_fWeight; double t_fDistance;
+
+        for (size_t i = 0; i < examples.size(); i++ )
+        {
+            Example &t_Ex = examples[i].second;
+            pCodebookRandomForest->voteVQ( *t_Ex.vec, histogram, t_iCodebookEntry, t_fWeight, t_fDistance );
+            std::cerr << i << ": " << "CBEntry " << t_iCodebookEntry << " Weight: " << t_fWeight << " Distance: " << t_fDistance << std::endl;
+        }
+        std::cerr << "histogram: " << histogram << std::endl;
+
+
+
+
+        // test of store and restore
+        std::string t_sDestinationSave = "codebookRF.save.txt";
+        std::ofstream ofs;
+        ofs.open (t_sDestinationSave.c_str(), std::ofstream::out);
+        pCodebookRandomForest->store( ofs );
+        ofs.close();
+        // restore
+        OBJREC::CodebookRandomForest *pTestCRF = new OBJREC::CodebookRandomForest(-1, -1);
+        std::ifstream ifs2;
+        ifs2.open (t_sDestinationSave.c_str() );
+        pTestCRF->restore( ifs2 );
+        ifs2.close();
+        CPPUNIT_ASSERT_EQUAL(iNumCodewords,     pTestCRF->getCodebookSize() );
+        CPPUNIT_ASSERT_EQUAL(nMaxDepth,         pTestCRF->getMaxDepth() );
+        CPPUNIT_ASSERT_EQUAL(nMaxCodebookSize,  pTestCRF->getRestrictedCodebookSize() );
+
+        NICE::Vector histogramCompare(iNumCodewords, 0.0f);
+
+        for (size_t i = 0; i < examples.size(); i++ )
+        {
+            Example &t_Ex = examples[i].second;
+            pTestCRF->voteVQ( *t_Ex.vec, histogramCompare, t_iCodebookEntry, t_fWeight, t_fDistance );
+
+        }
+        std::cerr << "histogram of restored CodebookRandomForest: " << histogramCompare << std::endl;
+        std::cerr << "comparing histograms...";
+        for (size_t i = 0; i < iNumCodewords; i++ )
+        {
+            CPPUNIT_ASSERT_DOUBLES_EQUAL(histogram[i], histogramCompare[i], 1e-5 );
+        }
+        std::cerr << "equal..." << std::endl;
+
+        // clean up
+        delete pTestCRF;
+        delete pCodebookRandomForest;
+
+        examples.clean();
+        delete pVecFeature;
+
+        if (verboseStartEnd)
+            std::cerr << "================== TestCodebookRandomForest::TestCodebookRandomForest done ===================== " << std::endl;
+    }
+    catch(std::exception &e)
+    {
+        std::cerr << "exception occured: " << e.what() << std::endl;
+    }
+}
+
+
+#endif

+ 26 - 0
features/simplefeatures/tests/TestCodebookRandomForest.h

@@ -0,0 +1,26 @@
+#ifndef _TESTCODEBOOKRANDOMFOREST_H
+#define _TESTCODEBOOKRANDOMFOREST_H
+
+#include <cppunit/extensions/HelperMacros.h>
+
+/**
+ * CppUnit-Testcase. 
+ */
+class TestCodebookRandomForest : public CppUnit::TestFixture {
+
+    CPPUNIT_TEST_SUITE( TestCodebookRandomForest );
+    
+    CPPUNIT_TEST(testCodebookRandomForest);
+
+    CPPUNIT_TEST_SUITE_END();
+  
+ private:
+ 
+ public:
+    void setUp();
+    void tearDown();
+
+    void testCodebookRandomForest();
+};
+
+#endif // _TESTCODEBOOKRANDOMFOREST_H

+ 21 - 0
features/simplefeatures/tests/config.conf

@@ -0,0 +1,21 @@
+[RandomForest]
+number_of_trees = 3
+features_per_tree = 1.0
+samples_per_tree  = 0.5
+builder = random
+builder_section = DTBRandom
+minimum_error_reduction = .001
+minimum_improvement = .01
+enable_out_of_bag_estimates = false
+
+[DTBRandom]
+random_split_tests = 50
+random_features = 6
+max_depth = 4
+min_examples = 50
+save_indices = false
+start_random_generator = true
+
+[CodebookRandomForest]
+maxDepthTree = 500
+maxCodebookSize = 20

+ 12 - 0
features/simplefeatures/tests/libdepend.inc

@@ -0,0 +1,12 @@
+$(call PKG_DEPEND_INT,core/basics)
+$(call PKG_DEPEND_INT,core/algebra)
+$(call PKG_DEPEND_INT,vislearning/math)
+$(call PKG_DEPEND_INT,vislearning/baselib)
+$(call PKG_DEPEND_INT,vislearning/cbaselib)
+$(call PKG_DEPEND_INT,vislearning/classifier)
+$(call PKG_DEPEND_INT,vislearning/features)
+$(call PKG_DEPEND_INT,vislearning/matlabAccessHighLevel)
+$(call PKG_DEPEND_EXT,MATIO)
+$(call PKG_DEPEND_EXT,HDF5)
+
+

File diff suppressed because it is too large
+ 1502 - 0
features/simplefeatures/tests/toyExampleLargeLargeScale.data


+ 3 - 3
math/cluster/libdepend.inc

@@ -1,3 +1,3 @@
-$(call PKG_DEPEND_EXT,OPENMP)
-$(call PKG_DEPEND_EXT,ICE)
-$(call PKG_DEPEND_INT,vislearning/math/mathbase)
+# $(call PKG_DEPEND_EXT,OPENMP)
+# $(call PKG_DEPEND_EXT,ICE)
+$(call PKG_DEPEND_INT,vislearning/math/mathbase)

+ 5 - 0
math/cluster/progs/testKMeans.cpp

@@ -30,7 +30,12 @@ using namespace OBJREC;
 
 int main(int argc, char **argv)
 {
+#ifndef __clang__
+#ifndef __llvm__
 	std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
+#endif
+#endif
+
 	NICE::Image panel(200, 200);
 	NICE::Image overlay(panel.width(), panel.height());
 	panel.set(255);

+ 0 - 84
math/distances/Kernel.cpp

@@ -1,84 +0,0 @@
-/** 
-* @file Kernel.cpp
-* @brief Interface for Mercer Kernels
-* @author Erik Rodner
-* @date 10/24/2007
-
-*/
-
-#include <iostream>
-
-#include "vislearning/math/distances/Kernel.h"
-
-using namespace OBJREC;
-
-using namespace std;
-using namespace NICE;
-
-
-
-Kernel::Kernel( bool _symmetric ) 
-{
-    symmetric = _symmetric;
-}
-
-Kernel::~Kernel()
-{
-}
-
-	
-void Kernel::calcGramMatrix ( const VVector & vecSet, NICE::Matrix & G ) const
-{
-	G.resize(vecSet.size(), vecSet.size());
-    if ( symmetric )
-    {
-		int ii = 0;
-		for ( VVector::const_iterator i  = vecSet.begin();
-							   i != vecSet.end();
-							   i++, ii++ )
-		{
-			const NICE::Vector & x = *i;
-			int jj = ii;
-			for ( VVector::const_iterator j  = i;
-							   j != vecSet.end();
-							   j++, jj++ )
-			{
-				const NICE::Vector & y = *j;
-				double kval = K(x,y);
-				G(ii, jj) = kval;
-				G(jj, ii) = kval;
-			}
-		}
-    } else {
-		int ii = 0;
-		for ( VVector::const_iterator i  = vecSet.begin();
-							   i != vecSet.end();
-							   i++, ii++ )
-		{
-			const NICE::Vector & x = *i;
-			int jj = 0;
-			for ( VVector::const_iterator j  = vecSet.begin();
-							   j != vecSet.end();
-								   j++, jj++ )
-			{
-				const NICE::Vector & y = *j;
-				double kval = K(x,y);
-				G(ii, jj) = kval;
-			}
-		}
-    }
-}
-	
-void Kernel::calcKernelVector ( const VVector & vecSet, const NICE::Vector & y, NICE::Vector & kstar ) const
-{
-	kstar.resize(vecSet.size());
-	int ii = 0;
-	for ( VVector::const_iterator i  = vecSet.begin();
-						   i != vecSet.end();
-						   i++, ii++ )
-	{
-			const NICE::Vector & x = *i;
-			double kval = K(x, y);
-			kstar[ii] = kval;
-	}
-}

+ 0 - 43
math/distances/Kernel.h

@@ -1,43 +0,0 @@
-/** 
-* @file Kernel.h
-* @brief Interface for Mercer Kernels
-* @author Erik Rodner
-* @date 10/24/2007
-
-*/
-#ifndef KERNELINCLUDE
-#define KERNELINCLUDE
-
-#include "core/vector/VectorT.h"
-#include "core/vector/MatrixT.h"
-
-#include "core/vector/VVector.h"
-  
-/** Interface for Mercer Kernels */
-
-namespace OBJREC {
-
-class Kernel
-{
-    protected:
-	bool symmetric;
-
-    public:
-  
-	/** simple constructor */
-	Kernel( bool symmetric );
-      
-	/** simple destructor */
-	virtual ~Kernel();
-
-	virtual double K (const NICE::Vector & x, const NICE::Vector & y) const = 0;
-
-	void calcGramMatrix ( const NICE::VVector & vecSet, NICE::Matrix & G ) const;
-	void calcKernelVector ( const NICE::VVector & vecSet, const NICE::Vector & y, NICE::Vector & kstar ) const;
-     
-};
-
-
-} // namespace
-
-#endif

+ 0 - 37
math/distances/KernelExp.cpp

@@ -1,37 +0,0 @@
-/** 
-* @file KernelExp.cpp
-* @brief Interface for the popular exponential mercer kernels
-* @author Erik Rodner
-* @date 10/24/2007
-
-*/
-#include <iostream>
-
-#include <math.h>
-#include "vislearning/math/distances/KernelExp.h"
-
-using namespace OBJREC;
-
-using namespace std;
-// refactor-nice.pl: check this substitution
-// old: using namespace ice;
-using namespace NICE;
-
-
-
-KernelExp::KernelExp( NICE::VectorDistance<double> *_kInside, double _a ) : Kernel( true )
-{
-    kInside = _kInside;
-    a = _a;
-}
-
-KernelExp::~KernelExp()
-{
-	delete kInside;
-}
-
-	
-double KernelExp::K (const NICE::Vector & x, const NICE::Vector & y) const
-{
-    return exp( - kInside->calculate (x,y) / a );
-}

+ 0 - 45
math/distances/KernelExp.h

@@ -1,45 +0,0 @@
-/** 
-* @file KernelExp.h
-* @brief Interface for the popular exponential mercer kernels
-* @author Erik Rodner
-* @date 10/24/2007
-
-*/
-#ifndef KERNELEXPINCLUDE
-#define KERNELEXPINCLUDE
-
-#include "core/vector/VectorT.h"
-#include "core/vector/MatrixT.h"
-
-#include "Kernel.h"
-
-#include <core/vector/Distance.h>
-
-/** Interface for the popular exponential mercer kernels */
-
-namespace OBJREC {
-
-class KernelExp : public Kernel
-{
-
-    protected:
-	NICE::VectorDistance<double> *kInside;
-	double a;
-
-    public:
-  
-	/** simple constructor */
-	KernelExp( NICE::VectorDistance<double> *_kInside, double _a );
-      
-	/** simple destructor */
-	virtual ~KernelExp();
-     
-	// refactor-nice.pl: check this substitution
-	// old: double K (const Vector & x, const Vector & y) const;
-	double K (const NICE::Vector & x, const NICE::Vector & y) const;
-};
-
-
-} // namespace
-
-#endif

+ 1 - 1
math/distances/KernelStd.h

@@ -11,7 +11,7 @@
 #include "core/vector/VectorT.h"
 #include "core/vector/MatrixT.h"
   
-#include "Kernel.h"
+#include "vislearning/math/kernels/Kernel.h"
 
 /** Standard kernel */
 

+ 1 - 1
math/ftransform/PCA.h

@@ -8,7 +8,7 @@
 #ifndef PCAINCLUDE
 #define PCAINCLUDE
 
-#include "core/image/Filter.h"
+//#include "core/image/Filter.h"
 #include "core/vector/VectorT.h"
 #include "core/vector/MatrixT.h"
 

+ 5 - 0
math/pdf/PDFGaussian.h

@@ -31,8 +31,13 @@ class PDFGaussian : public PDF
 	/** empty constructor */
 	PDFGaussian ( int dimension );
 
+#if __cplusplus >= 201103L
+	static constexpr double logdetEPS = 0.0;
+	static constexpr double regEPS = 1e-7;
+#else
 	static const double logdetEPS = 0.0;
 	static const double regEPS = 1e-7;
+#endif
 	static NICE::Matrix RobustInverse ( const NICE::Matrix & M, double & logdet );
 
     public:

Some files were not shown because too many files changed in this diff