Jelajahi Sumber

Merge branch 'master' of dbv.inf-cv.uni-jena.de:nice/nice-gp-hik-exp into cmake_mergebranch

Johannes Ruehle 11 tahun lalu
induk
melakukan
f90db57d84
5 mengubah file dengan 74 tambahan dan 48 penghapusan
  1. 17 11
      GPHIKClassifierNICE.cpp
  2. 27 9
      GPHIKClassifierNICE.h
  3. 9 8
      progs/IL_AL.cpp
  4. 9 8
      progs/IL_AL_Binary.cpp
  5. 12 12
      progs/activeLearningCheckerBoard.cpp

+ 17 - 11
GPHIKClassifierNICE.cpp

@@ -3,11 +3,13 @@
 * @brief feature pool interface for our GP HIK classifier
 * @brief feature pool interface for our GP HIK classifier
 * @author Alexander Freytag
 * @author Alexander Freytag
 * @date 02/01/2012
 * @date 02/01/2012
-
 */
 */
+
+// STL includes
 #include <iostream>
 #include <iostream>
 
 
-#include "core/basics/numerictools.h"
+// NICE-core includes
+#include <core/basics/numerictools.h>
 #include <core/basics/Timer.h>
 #include <core/basics/Timer.h>
 
 
 #include "GPHIKClassifierNICE.h"
 #include "GPHIKClassifierNICE.h"
@@ -118,7 +120,7 @@ void GPHIKClassifierNICE::train ( FeaturePool & fp, Examples & examples )
   Vector y ( examples.size() /* maximum size */ );
   Vector y ( examples.size() /* maximum size */ );
 
 
   // flat structure of our training data
   // flat structure of our training data
-  std::vector< SparseVector * > sparseExamples;
+  std::vector< const SparseVector * > sparseExamples;
 
 
   if (verbose)
   if (verbose)
     cerr << "Converting (and sampling) feature vectors" << endl;
     cerr << "Converting (and sampling) feature vectors" << endl;
@@ -147,7 +149,7 @@ void GPHIKClassifierNICE::train ( FeaturePool & fp, Examples & examples )
 }
 }
 
 
 /** training process */
 /** training process */
-void GPHIKClassifierNICE::train ( const std::vector< SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
+void GPHIKClassifierNICE::train ( const std::vector< const SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
 {
 {
   classifier->train(examples, binLabels);
   classifier->train(examples, binLabels);
 }
 }
@@ -166,22 +168,22 @@ FeaturePoolClassifier *GPHIKClassifierNICE::clone () const
   return NULL;
   return NULL;
 }
 }
 
 
-void GPHIKClassifierNICE::predictUncertainty( Example & pe, NICE::Vector & uncertainties )
+void GPHIKClassifierNICE::predictUncertainty( Example & pe, double & uncertainty )
 {
 {
   const SparseVector *svec = pe.svec;  
   const SparseVector *svec = pe.svec;  
   if ( svec == NULL )
   if ( svec == NULL )
     fthrow(Exception, "GPHIKClassifierNICE requires example.svec (SparseVector stored in an Example struct)");
     fthrow(Exception, "GPHIKClassifierNICE requires example.svec (SparseVector stored in an Example struct)");
-  classifier->predictUncertainty(svec, uncertainties);
+  classifier->predictUncertainty(svec, uncertainty);
 }
 }
    
    
-void GPHIKClassifierNICE::predictUncertainty( const NICE::SparseVector * example, NICE::Vector & uncertainties )
+void GPHIKClassifierNICE::predictUncertainty( const NICE::SparseVector * example, double & uncertainty )
 {  
 {  
-  classifier->predictUncertainty(example, uncertainties);
+  classifier->predictUncertainty(example, uncertainty);
 }
 }
 
 
-//---------------------------------------------------------------------
-//                           protected methods
-//---------------------------------------------------------------------
+///////////////////// INTERFACE PERSISTENT /////////////////////
+// interface specific methods for store and restore
+///////////////////// INTERFACE PERSISTENT ///////////////////// 
 void GPHIKClassifierNICE::restore ( std::istream & is, int format )
 void GPHIKClassifierNICE::restore ( std::istream & is, int format )
 {
 {
   if (is.good())
   if (is.good())
@@ -214,6 +216,10 @@ void GPHIKClassifierNICE::store ( std::ostream & os, int format ) const
   }
   }
 }
 }
 
 
+///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+// interface specific methods for incremental extensions
+///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+
 void GPHIKClassifierNICE::addExample( const Example & pe, const double & label)
 void GPHIKClassifierNICE::addExample( const Example & pe, const double & label)
 {
 {
   const SparseVector *svec = pe.svec;
   const SparseVector *svec = pe.svec;

+ 27 - 9
GPHIKClassifierNICE.h

@@ -2,15 +2,20 @@
 * @file GPHIKClassifierNICE.h
 * @file GPHIKClassifierNICE.h
 * @author Alexander Freytag, Erik Rodner
 * @author Alexander Freytag, Erik Rodner
 * @date 02/01/2012
 * @date 02/01/2012
-
 */
 */
 #ifndef _NICE_GPHIKCLASSIFIERNICEINCLUDE
 #ifndef _NICE_GPHIKCLASSIFIERNICEINCLUDE
 #define _NICE_GPHIKCLASSIFIERNICEINCLUDE
 #define _NICE_GPHIKCLASSIFIERNICEINCLUDE
 
 
+// STL includes
 #include <string>
 #include <string>
-#include "core/basics/Config.h"
-#include "vislearning/classifier/classifierbase/FeaturePoolClassifier.h"
 
 
+// NICE-core includes
+#include <core/basics/Config.h>
+
+// NICE-vislearning includes
+#include <vislearning/classifier/classifierbase/FeaturePoolClassifier.h>
+
+// NICE-gp-hik-core includes
 #include <gp-hik-core/GPHIKClassifier.h>
 #include <gp-hik-core/GPHIKClassifier.h>
 #include <gp-hik-core/FMKGPHyperparameterOptimization.h>
 #include <gp-hik-core/FMKGPHyperparameterOptimization.h>
 #include <gp-hik-core/parameterizedFunctions/ParameterizedFunction.h>
 #include <gp-hik-core/parameterizedFunctions/ParameterizedFunction.h>
@@ -69,25 +74,38 @@ class GPHIKClassifierNICE : public FeaturePoolClassifier
      * @param examples examples to use given in a sparse data structure
      * @param examples examples to use given in a sparse data structure
      * @param binLabels corresponding binary labels with class no. There is no need here that every examples has only on positive entry in this set (1,-1)
      * @param binLabels corresponding binary labels with class no. There is no need here that every examples has only on positive entry in this set (1,-1)
      */
      */
-    void train ( const std::vector< NICE::SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels );
+    void train ( const std::vector< const NICE::SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels );
     
     
-    /** Persistent interface */
+    ///////////////////// INTERFACE PERSISTENT /////////////////////
+    // interface specific methods for store and restore
+    ///////////////////// INTERFACE PERSISTENT ///////////////////// 
     virtual void restore ( std::istream & is, int format = 0 );
     virtual void restore ( std::istream & is, int format = 0 );
     virtual void store ( std::ostream & os, int format = 0 ) const;
     virtual void store ( std::ostream & os, int format = 0 ) const;
     virtual void clear ();
     virtual void clear ();
 
 
     virtual FeaturePoolClassifier *clone () const;
     virtual FeaturePoolClassifier *clone () const;
     
     
-    /** prediction of classification uncertainty */
-    void predictUncertainty( OBJREC::Example & pe, NICE::Vector & uncertainties );
     /** 
     /** 
      * @brief prediction of classification uncertainty
      * @brief prediction of classification uncertainty
      * @date 19-06-2012 (dd-mm-yyyy)
      * @date 19-06-2012 (dd-mm-yyyy)
      * @author Alexander Freytag
      * @author Alexander Freytag
      * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
      * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
-     * @param uncertainties contains the resulting classification uncertainties (1 entry for standard setting, m entries for binary-balanced setting)
+     * @param uncertainty contains the resulting classification uncertainty
+     */
+    void predictUncertainty( OBJREC::Example & pe, double & uncertainty );
+    
+    /** 
+     * @brief prediction of classification uncertainty
+     * @date 19-06-2012 (dd-mm-yyyy)
+     * @author Alexander Freytag
+     * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
+     * @param uncertainty contains the resulting classification uncertainty
      */       
      */       
-    void predictUncertainty( const NICE::SparseVector * example, NICE::Vector & uncertainties );
+    void predictUncertainty( const NICE::SparseVector * example, double & uncertainty );
+    
+    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+    // interface specific methods for incremental extensions
+    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////    
     
     
     void addExample( const OBJREC::Example & pe, const double & label);
     void addExample( const OBJREC::Example & pe, const double & label);
     virtual void addMultipleExamples( OBJREC::Examples & newExamples);
     virtual void addMultipleExamples( OBJREC::Examples & newExamples);

+ 9 - 8
progs/IL_AL.cpp

@@ -365,25 +365,26 @@ int main ( int argc, char **argv )
             }
             }
             else if (queryStrategy == GPPREDVAR)
             else if (queryStrategy == GPPREDVAR)
             {
             {
-              NICE::Vector singleUncertainties;
+              double singleUncertainty;
               //use the pred variance computation specified in the config file
               //use the pred variance computation specified in the config file
-              classifier->predictUncertainty( example, singleUncertainties );
+              classifier->predictUncertainty( example, singleUncertainty );
               //take the maximum of the scores for the predictive variance
               //take the maximum of the scores for the predictive variance
-              scores.push_back( std::pair<int,double> ( exIndex, singleUncertainties.Max()) );
+              scores.push_back( std::pair<int,double> ( exIndex, singleUncertainty ) );
             }
             }
             else if (queryStrategy == GPHEURISTIC)
             else if (queryStrategy == GPHEURISTIC)
             {
             {
-              NICE::Vector singleUncertainties;
+              double singleUncertainty;
               //use the pred variance computation specified in the config file
               //use the pred variance computation specified in the config file
-              classifier->predictUncertainty( example, singleUncertainties );
+              classifier->predictUncertainty( example, singleUncertainty );
               //compute the mean values for every class
               //compute the mean values for every class
               ClassificationResult r = classifier->classify( example );
               ClassificationResult r = classifier->classify( example );
-              for ( int tmp = 0; tmp < singleUncertainties.size(); tmp++ )
+              NICE::Vector heuristicValues ( r.scores.size(), 0);
+              for ( int tmp = 0; tmp < heuristicValues.size(); tmp++ )
               {
               {
-                singleUncertainties[tmp] = fabs(r.scores[tmp]) / sqrt( squaredNoise + singleUncertainties[tmp] );
+                heuristicValues[tmp] = fabs(r.scores[tmp]) / sqrt( squaredNoise + singleUncertainty );
               }              
               }              
               //take the minimum of the scores for the heuristic measure
               //take the minimum of the scores for the heuristic measure
-              scores.push_back( std::pair<int,double> ( exIndex, singleUncertainties.Min()) );
+              scores.push_back( std::pair<int,double> ( exIndex, heuristicValues.Min()) );
             }
             }
         }
         }
         float time_score_computation = ( float ) ( clock() - unc_pred_start_time ) ;
         float time_score_computation = ( float ) ( clock() - unc_pred_start_time ) ;

+ 9 - 8
progs/IL_AL_Binary.cpp

@@ -403,25 +403,26 @@ int main ( int argc, char **argv )
               }
               }
               else if (queryStrategy == GPPREDVAR)
               else if (queryStrategy == GPPREDVAR)
               {
               {
-                NICE::Vector singleUncertainties;
+                double uncertainty;
                 //use the pred variance computation specified in the config file
                 //use the pred variance computation specified in the config file
-                classifier->predictUncertainty( example, singleUncertainties );
+                classifier->predictUncertainty( example, uncertainty );
                 //take the maximum of the scores for the predictive variance
                 //take the maximum of the scores for the predictive variance
-                scores.push_back( std::pair<int,double> ( exIndex, singleUncertainties.Max()) );
+                scores.push_back( std::pair<int,double> ( exIndex, uncertainty) );
               }
               }
               else if (queryStrategy == GPHEURISTIC)
               else if (queryStrategy == GPHEURISTIC)
               {
               {
-                NICE::Vector singleUncertainties;
+                double uncertainty;
                 //use the pred variance computation specified in the config file
                 //use the pred variance computation specified in the config file
-                classifier->predictUncertainty( example, singleUncertainties );
+                classifier->predictUncertainty( example, uncertainty );
                 //compute the mean values for every class
                 //compute the mean values for every class
                 ClassificationResult r = classifier->classify( example );
                 ClassificationResult r = classifier->classify( example );
-                for ( int tmp = 0; tmp < singleUncertainties.size(); tmp++ )
+                NICE::Vector heuristicValues ( r.scores.size(), 0);
+                for ( int tmp = 0; tmp < heuristicValues.size(); tmp++ )
                 {
                 {
-                  singleUncertainties[tmp] = fabs(r.scores[tmp]) / sqrt( squaredNoise + singleUncertainties[tmp] );
+                  heuristicValues[tmp] = fabs(r.scores[tmp]) / sqrt( squaredNoise + uncertainty );
                 }              
                 }              
                 //take the minimum of the scores for the heuristic measure
                 //take the minimum of the scores for the heuristic measure
-                scores.push_back( std::pair<int,double> ( exIndex, singleUncertainties.Min()) );
+                scores.push_back( std::pair<int,double> ( exIndex, heuristicValues.Min()) );
               }
               }
           }
           }
           float time_score_computation = ( float ) ( clock() - unc_pred_start_time ) ;
           float time_score_computation = ( float ) ( clock() - unc_pred_start_time ) ;

+ 12 - 12
progs/activeLearningCheckerBoard.cpp

@@ -588,44 +588,44 @@ int main ( int argc, char **argv )
             }
             }
             else if (queryStrategy == GPPREDVAR)
             else if (queryStrategy == GPPREDVAR)
             {
             {
-              NICE::Vector singleUncertainties;
+              double uncertainty;
               //use the pred variance computation specified in the config file
               //use the pred variance computation specified in the config file
-              classifier->predictUncertainty( example, singleUncertainties );
+              classifier->predictUncertainty( example, uncertainty );
               //take the maximum of the scores for the predictive variance
               //take the maximum of the scores for the predictive variance
-              scores.push_back( std::pair<int,double> ( exIndex, singleUncertainties[0]) );
+              scores.push_back( std::pair<int,double> ( exIndex, uncertainty) );
             }
             }
             else if (queryStrategy == GPHEURISTIC)
             else if (queryStrategy == GPHEURISTIC)
             {
             {
-              NICE::Vector singleUncertainties;
+              double uncertainty;
               //use the pred variance computation specified in the config file
               //use the pred variance computation specified in the config file
-              classifier->predictUncertainty( example, singleUncertainties );
+              classifier->predictUncertainty( example, uncertainty );
               //compute the mean values for every class
               //compute the mean values for every class
               ClassificationResult r = classifier->classify( example );
               ClassificationResult r = classifier->classify( example );
               //take the minimum of the scores for the heuristic measure
               //take the minimum of the scores for the heuristic measure
-              scores.push_back( std::pair<int,double> ( exIndex, fabs(r.scores[0]) / sqrt( squaredNoise + singleUncertainties[0] )) );             
+              scores.push_back( std::pair<int,double> ( exIndex, fabs(r.scores[0]) / sqrt( squaredNoise + uncertainty )) );             
             }
             }
             else if (queryStrategy == GPHEURISTICPLUS)
             else if (queryStrategy == GPHEURISTICPLUS)
             {
             {
-              NICE::Vector singleUncertainties;
+              double uncertainty;
               //use the pred variance computation specified in the config file
               //use the pred variance computation specified in the config file
-              classifier->predictUncertainty( example, singleUncertainties );
+              classifier->predictUncertainty( example, uncertainty );
               //compute the mean values for every class
               //compute the mean values for every class
               ClassificationResult r = classifier->classify( example );
               ClassificationResult r = classifier->classify( example );
               //take the minimum of the scores for the heuristic measure
               //take the minimum of the scores for the heuristic measure
-              scores.push_back( std::pair<int,double> ( exIndex, fabs(r.scores[0]) + sqrt( squaredNoise + singleUncertainties[0] )) );             
+              scores.push_back( std::pair<int,double> ( exIndex, fabs(r.scores[0]) + sqrt( squaredNoise + uncertainty )) );             
             }
             }
             else if (queryStrategy == GPBALANCE)
             else if (queryStrategy == GPBALANCE)
             {
             {
-              NICE::Vector singleUncertainties;
+              double uncertainty;
               //use the pred variance computation specified in the config file
               //use the pred variance computation specified in the config file
-              classifier->predictUncertainty( example, singleUncertainties );
+              classifier->predictUncertainty( example, uncertainty );
               //compute the mean values for every class
               //compute the mean values for every class
               ClassificationResult r = classifier->classify( example );
               ClassificationResult r = classifier->classify( example );
               double scorePositive (fabs (r.scores[0] - 1.0 ));
               double scorePositive (fabs (r.scores[0] - 1.0 ));
               double scoreNegative (fabs (r.scores[0] + 1.0 ));
               double scoreNegative (fabs (r.scores[0] + 1.0 ));
               double score = scorePositive < scoreNegative ? scorePositive : scoreNegative;
               double score = scorePositive < scoreNegative ? scorePositive : scoreNegative;
               //take the minimum of the scores for the heuristic measure
               //take the minimum of the scores for the heuristic measure
-              scores.push_back( std::pair<int,double> ( exIndex, score / ( squaredNoise + singleUncertainties[0] )) );             
+              scores.push_back( std::pair<int,double> ( exIndex, score / ( squaredNoise + uncertainty )) );             
             }            
             }            
         }
         }
         float time_score_computation = ( float ) ( clock() - unc_pred_start_time ) ;
         float time_score_computation = ( float ) ( clock() - unc_pred_start_time ) ;