Quellcode durchsuchen

adapted FPCGPHIK to new gp-hik-core source code

Alexander Freytag vor 11 Jahren
Ursprung
Commit
47610e13be
3 geänderte Dateien mit 141 neuen und 118 gelöschten Zeilen
  1. 11 8
      classifier/fpclassifier/FPCGPHIK.cpp
  2. 18 6
      classifier/fpclassifier/FPCGPHIK.h
  3. 112 104
      math/kernels/KernelData.h

+ 11 - 8
classifier/fpclassifier/FPCGPHIK.cpp

@@ -3,13 +3,16 @@
 * @brief feature pool interface for our GP HIK classifier
 * @author Alexander Freytag
 * @date 02/01/2012
-
 */
+
+// STL includes
 #include <iostream>
 
-#include "core/basics/numerictools.h"
+// NICE-core includes
+#include <core/basics/numerictools.h>
 #include <core/basics/Timer.h>
 
+// NICE-vislearning includes
 #include "FPCGPHIK.h"
 
 using namespace std;
@@ -143,7 +146,7 @@ void FPCGPHIK::train ( FeaturePool & fp, Examples & examples )
   Vector y ( examples.size() /* maximum size */ );
 
   // flat structure of our training data
-  std::vector< SparseVector * > sparseExamples;
+  std::vector< const SparseVector * > sparseExamples;
 
   if (verbose)
     cerr << "Converting (and sampling) feature vectors" << endl;
@@ -172,7 +175,7 @@ void FPCGPHIK::train ( FeaturePool & fp, Examples & examples )
 }
 
 /** training process */
-void FPCGPHIK::train ( const std::vector< SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
+void FPCGPHIK::train ( const std::vector< const SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels )
 {
   
   std::cerr << "call internal train method " << std::endl;
@@ -194,17 +197,17 @@ FeaturePoolClassifier *FPCGPHIK::clone () const
   return NULL;
 }
 
-void FPCGPHIK::predictUncertainty( Example & pe, NICE::Vector & uncertainties )
+void FPCGPHIK::predictUncertainty( Example & pe, double & uncertainty )
 {
   const SparseVector *svec = pe.svec;  
   if ( svec == NULL )
     fthrow(Exception, "FPCGPHIK requires example.svec (SparseVector stored in an Example struct)");
-  classifier->predictUncertainty(svec, uncertainties);
+  classifier->predictUncertainty(svec, uncertainty);
 }
    
-void FPCGPHIK::predictUncertainty( const NICE::SparseVector * example, NICE::Vector & uncertainties )
+void FPCGPHIK::predictUncertainty( const NICE::SparseVector * example, double & uncertainty )
 {  
-  classifier->predictUncertainty(example, uncertainties);
+  classifier->predictUncertainty(example, uncertainty);
 }
 
 //---------------------------------------------------------------------

+ 18 - 6
classifier/fpclassifier/FPCGPHIK.h

@@ -7,14 +7,20 @@
 #ifndef _NICE_GPHIKCLASSIFIERNICEINCLUDE
 #define _NICE_GPHIKCLASSIFIERNICEINCLUDE
 
+// STL includes
 #include <string>
-#include "core/basics/Config.h"
-#include "vislearning/classifier/classifierbase/FeaturePoolClassifier.h"
 
+// NICE-core includes
+#include <core/basics/Config.h>
+
+// NICE-gp-hik-core includes
 #include <gp-hik-core/GPHIKClassifier.h>
 #include <gp-hik-core/FMKGPHyperparameterOptimization.h>
 #include <gp-hik-core/parameterizedFunctions/ParameterizedFunction.h>
 
+// NICE-vislearning includes
+#include "vislearning/classifier/classifierbase/FeaturePoolClassifier.h"
+
 namespace OBJREC {
   
 /** @class FPCGPHIK
@@ -69,9 +75,11 @@ class FPCGPHIK : public FeaturePoolClassifier
      * @param examples examples to use given in a sparse data structure
      * @param binLabels corresponding binary labels with class no. There is no need here that every examples has only on positive entry in this set (1,-1)
      */
-    void train ( const std::vector< NICE::SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels );
+    void train ( const std::vector< const NICE::SparseVector *> & examples, std::map<int, NICE::Vector> & binLabels );
     
-    /** Persistent interface */
+    ///////////////////// INTERFACE PERSISTENT /////////////////////
+    // interface specific methods for store and restore
+    ///////////////////// INTERFACE PERSISTENT ///////////////////// 
     virtual void restore ( std::istream & is, int format = 0 );
     virtual void store ( std::ostream & os, int format = 0 ) const;
     virtual void clear ();
@@ -79,7 +87,7 @@ class FPCGPHIK : public FeaturePoolClassifier
     virtual FeaturePoolClassifier *clone () const;
     
     /** prediction of classification uncertainty */
-    void predictUncertainty( OBJREC::Example & pe, NICE::Vector & uncertainties );
+    void predictUncertainty( OBJREC::Example & pe, double & uncertainty );
     /** 
      * @brief prediction of classification uncertainty
      * @date 19-06-2012 (dd-mm-yyyy)
@@ -87,7 +95,11 @@ class FPCGPHIK : public FeaturePoolClassifier
      * @param examples example for which the classification uncertainty shall be predicted, given in a sparse representation
      * @param uncertainties contains the resulting classification uncertainties (1 entry for standard setting, m entries for binary-balanced setting)
      */       
-    void predictUncertainty( const NICE::SparseVector * example, NICE::Vector & uncertainties );
+    void predictUncertainty( const NICE::SparseVector * example, double & uncertainty );
+    
+    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////
+    // interface specific methods for incremental extensions
+    ///////////////////// INTERFACE ONLINE LEARNABLE (SIMILAR) /////////////////////       
     
     void addExample( const OBJREC::Example & pe, const double & label);
     virtual void addMultipleExamples( OBJREC::Examples & newExamples);

+ 112 - 104
math/kernels/KernelData.h

@@ -1,4 +1,4 @@
-/** 
+/**
 * @file KernelData.h
 * @author Erik Rodner
 * @date 01/19/2010
@@ -12,115 +12,123 @@
 #include "core/vector/MatrixT.h"
 
 namespace OBJREC {
-  
+
 /** @class KernelData
- * caching some kernel data 
+ * caching some kernel data
  *
  * @author Erik Rodner
  */
 class KernelData
 {
-	public:
-		enum {
-			QUADRATIC_DISTANCES = 0
-		};
-
-	protected:
-		bool verbose;
-
-		NICE::CholeskyRobust *cr;
-
-    protected:
-		NICE::Matrix kernelMatrix;
-
-		NICE::Matrix inverseKernelMatrix;
-
-		NICE::Matrix choleskyMatrix;
-
-	 std::map<int, NICE::Matrix *> cachedMatrices;
-
-		double logdet;
-
-		void initFromConfig ( const NICE::Config *conf, const std::string & section );
-
-		NICE::Matrix B;
-		NICE::Matrix U;
-		NICE::Matrix V;
-		NICE::Matrix F;
-		NICE::Matrix F_inv;
-
-    public:
-  
-  		/** standard stuff */
-  		KernelData();
-
-		/** copy constructor */
-  		KernelData( const KernelData & src );
-
-		/** simple constructor using config settings for numerical details */
-		KernelData( const NICE::Config *conf, const std::string & section = "Kernel" );
-		
-		/** the config contains information about numerical setting of the cholesky factorization etc. */
-		KernelData( const NICE::Config *conf, const NICE::Matrix & kernelMatrix, const std::string & section = "Kernel", bool updateCholesky = true );
-		  
-		/** simple destructor */
-		virtual ~KernelData();
-
-		/** update the cholesky factorization necessary to use computeInverseKernelMultiply */
-		virtual void updateCholeskyFactorization ();
-
-		/** in nearly all cases computeInverseKernelMultiply can be used instead */
-		virtual void updateInverseKernelMatrix ();
-		
-		/** compute K^{-1} * x */
-		virtual void computeInverseKernelMultiply ( const NICE::Vector & x, NICE::Vector & result ) const;
-
-		/** standard const and non-const get functions */
-		virtual const NICE::Matrix & getKernelMatrix() const;
-		virtual NICE::Matrix & getKernelMatrix();
-		virtual const NICE::Matrix & getInverseKernelMatrix() const;
-		virtual NICE::Matrix & getInverseKernelMatrix();
-		virtual const NICE::Matrix & getCholeskyMatrix() const;
-
-		/** get the logdet of the current kernel matrix (cholesky factorization has to be computed in advance) */
-		double getLogDetKernelMatrix () const { return logdet; };
-		
-		/** get the numbers of rows (and columns) of the kernel matrix */
-		virtual uint getKernelMatrixSize () const;
-
-		/** get a pre-cached matrix */
-		const NICE::Matrix & getCachedMatrix (int i) const;
-		/** set a pre-cached matrix */
-		void setCachedMatrix (int i, NICE::Matrix *m);
-
-		/** did we already start updateCholeskyFactorization() */
-		bool hasCholeskyFactorization () const { return (choleskyMatrix.rows() == kernelMatrix.rows()); };
-
-		/** get efficient GP regression loo estimates and corresponding variances, which can be used
-		  * to perform model selection (cf. Rasmussen and Williams) */
-		void getLooEstimates ( const NICE::Vector & y, NICE::Vector & muLoo, NICE::Vector & sigmaLoo ) const;
-
-		/** clone this object */
-		virtual KernelData *clone(void) const;
-
-		void getGPLikelihoodWithOneNewRow( const NICE::Vector & y, const double & oldLogdetK, const int & rowIndex, const NICE::Vector & newRow, const NICE::Vector & oldAlpha, NICE::Vector & newAlpha, double & loglike);
-
-		void getGPLikelihoodWithOneNewRow_FirstPart( const int & rowIndex, const NICE::Vector & newRow);
-
-		void getGPLikelihoodWithOneNewRow_SecondPart( const NICE::Vector & y, const double & oldLogdetK, const NICE::Vector & oldAlpha, NICE::Vector & newAlpha, double & loglike);
-		
-		void perform_Rank_2_Update(const int & rowIndex, const NICE::Vector & newRow);
-		void perform_Rank_2k_Update(const std::vector<int> & rowIndices, const std::vector<NICE::Vector> & newRows);
-		void delete_one_row(const int & rowIndex);
-		void delete_multiple_rows(std::vector<int> & indices);
-		
-		void setKernelMatrix(const NICE::Matrix & k_matrix);
-		
-		void increase_size_by_One();
-		void increase_size_by_k(const uint & k);
-		
-		void set_verbose(const bool & _verbose){ verbose = _verbose;};
-		bool get_verbose(){ return verbose;};
+public:
+    enum {
+        QUADRATIC_DISTANCES = 0
+    };
+
+protected:
+    bool verbose;
+
+    NICE::CholeskyRobust *cr;
+
+protected:
+    NICE::Matrix kernelMatrix;
+
+    NICE::Matrix inverseKernelMatrix;
+
+    NICE::Matrix choleskyMatrix;
+
+    std::map<int, NICE::Matrix *> cachedMatrices;
+
+    double logdet;
+
+    void initFromConfig ( const NICE::Config *conf, const std::string & section );
+
+    NICE::Matrix B;
+    NICE::Matrix U;
+    NICE::Matrix V;
+    NICE::Matrix F;
+    NICE::Matrix F_inv;
+
+public:
+
+    /** standard stuff */
+    KernelData();
+
+    /** copy constructor */
+    KernelData( const KernelData & src );
+
+    /** simple constructor using config settings for numerical details */
+    KernelData( const NICE::Config *conf, const std::string & section = "Kernel" );
+
+    /** the config contains information about numerical setting of the cholesky factorization etc. */
+    KernelData( const NICE::Config *conf, const NICE::Matrix & kernelMatrix, const std::string & section = "Kernel", bool updateCholesky = true );
+
+    /** simple destructor */
+    virtual ~KernelData();
+
+    /** update the cholesky factorization necessary to use computeInverseKernelMultiply */
+    virtual void updateCholeskyFactorization ();
+
+    /** in nearly all cases computeInverseKernelMultiply can be used instead */
+    virtual void updateInverseKernelMatrix ();
+
+    /** compute K^{-1} * x */
+    virtual void computeInverseKernelMultiply ( const NICE::Vector & x, NICE::Vector & result ) const;
+
+    /** standard const and non-const get functions */
+    virtual const NICE::Matrix & getKernelMatrix() const;
+    virtual NICE::Matrix & getKernelMatrix();
+    virtual const NICE::Matrix & getInverseKernelMatrix() const;
+    virtual NICE::Matrix & getInverseKernelMatrix();
+    virtual const NICE::Matrix & getCholeskyMatrix() const;
+
+    /** get the logdet of the current kernel matrix (cholesky factorization has to be computed in advance) */
+    double getLogDetKernelMatrix () const {
+        return logdet;
+    };
+
+    /** get the numbers of rows (and columns) of the kernel matrix */
+    virtual uint getKernelMatrixSize () const;
+
+    /** get a pre-cached matrix */
+    const NICE::Matrix & getCachedMatrix (int i) const;
+    /** set a pre-cached matrix */
+    void setCachedMatrix (int i, NICE::Matrix *m);
+
+    /** did we already start updateCholeskyFactorization() */
+    bool hasCholeskyFactorization () const {
+        return (choleskyMatrix.rows() == kernelMatrix.rows());
+    };
+
+    /** get efficient GP regression loo estimates and corresponding variances, which can be used
+      * to perform model selection (cf. Rasmussen and Williams) */
+    void getLooEstimates ( const NICE::Vector & y, NICE::Vector & muLoo, NICE::Vector & sigmaLoo ) const;
+
+    /** clone this object */
+    virtual KernelData *clone(void) const;
+
+    void getGPLikelihoodWithOneNewRow( const NICE::Vector & y, const double & oldLogdetK, const int & rowIndex, const NICE::Vector & newRow, const NICE::Vector & oldAlpha, NICE::Vector & newAlpha, double & loglike);
+
+    void getGPLikelihoodWithOneNewRow_FirstPart( const int & rowIndex, const NICE::Vector & newRow);
+
+    void getGPLikelihoodWithOneNewRow_SecondPart( const NICE::Vector & y, const double & oldLogdetK, const NICE::Vector & oldAlpha, NICE::Vector & newAlpha, double & loglike);
+
+    void perform_Rank_2_Update(const int & rowIndex, const NICE::Vector & newRow);
+    void perform_Rank_2k_Update(const std::vector<int> & rowIndices, const std::vector<NICE::Vector> & newRows);
+    void delete_one_row(const int & rowIndex);
+    void delete_multiple_rows(std::vector<int> & indices);
+
+    void setKernelMatrix(const NICE::Matrix & k_matrix);
+
+    void increase_size_by_One();
+    void increase_size_by_k(const uint & k);
+
+    void set_verbose(const bool & _verbose) {
+        verbose = _verbose;
+    };
+    bool get_verbose() {
+        return verbose;
+    };
 };
 
 }