Forráskód Böngészése

added KCGPApproxOneClass

Alexander Luetz 13 éve
szülő
commit
4d358d6f2c

+ 169 - 0
classifier/kernelclassifier/KCGPApproxOneClass.cpp

@@ -0,0 +1,169 @@
+/** 
+* @file KCGPApproxOneClass.cpp
+* @brief One-Class Gaussian Process Regression for Classification: we approximate the inverse of the regularized kernel matrix using a diagonal matrix
+* @author Alexander Lütz
+* @date 22-05-2012 (dd-mm-yyyy)
+
+*/
+#include <iostream>
+#include <typeinfo>
+#include <cstring>
+
+#include "core/vector/Algorithms.h"
+#include "core/vector/VVector.h"
+
+#include "vislearning/classifier/kernelclassifier/KCGPApproxOneClass.h"
+
+
+using namespace std;
+using namespace NICE;
+using namespace OBJREC;
+
+
+KCGPApproxOneClass::KCGPApproxOneClass( const Config *conf, Kernel *kernel, const string & section ) : KernelClassifier ( conf, kernel )
+{
+//   this->kernelFunction = kernel;  
+  //overwrite the default optimization options, since we don't want to perform standard loo or marginal likelihood stuff
+  Config config(*conf);
+  string modestr = config.gS(section,"detection_mode"); 
+
+  if(strcmp("mean",modestr.c_str())==0){
+    this->mode=MEAN_DETECTION_MODE;cerr << "One-class classification via GP predictive _mean_ !!!"<<endl;
+  }
+  if(strcmp("variance",modestr.c_str())==0){
+    mode=VARIANCE_DETECTION_MODE;cerr << "One-class classification via GP predictive _variance_ !!!"<<endl;
+  }
+
+  this->staticNoise = conf->gD(section, "static_noise", 0.0);
+}
+
+
+
+KCGPApproxOneClass::KCGPApproxOneClass( const KCGPApproxOneClass & src ) : KernelClassifier ( src )
+{
+  this->matrixDInv = src.matrixDInv;
+  this->InvDY = src.InvDY;
+  this->mode = src.mode;
+  this->staticNoise = src.staticNoise;
+}
+
+KCGPApproxOneClass::~KCGPApproxOneClass()
+{
+}
+
+
+void KCGPApproxOneClass::teach ( KernelData *kernelData, const NICE::Vector & y )
+{
+    fthrow( Exception, "KCGPApproxOneClass::teach: this method is not implemented for this specific type of classifier. Please use the second teach-method." );  
+}
+
+void KCGPApproxOneClass::teach (const LabeledSetVector &teachSet)
+{
+  if ( this->kernelFunction == NULL )
+    fthrow( Exception, "KernelClassifier::teach: To use this function, you have to specify a kernel function using the constructor" );  
+  
+  //we do not have to allocate new storage here since these variables come from the interface KernelClassifier
+//   NICE::VVector vecSet;
+
+  teachSet.getFlatRepresentation (this->vecSet, this->vecSetLabels);
+    
+  if ( (this->vecSetLabels.Min() != 1) || (this->vecSetLabels.Max() != 1) ) {
+    fthrow(Exception, "This classifier is suitable only for one-class classification problems, i.e. max(y) = min(y) = 1");
+  }  
+
+  this->matrixDInv.resize(this->vecSetLabels.size());
+ 
+  //compute D 
+  //start with adding some noise, if necessary
+  if (this->staticNoise != 0.0)
+    this->matrixDInv.set(this->staticNoise);
+  else
+    this->matrixDInv.set(0.0);
+  
+  //now sum up all entries of each row in the original kernel matrix
+  double kernelScore(0.0);
+  for (int i = 0; i < this->vecSetLabels.size(); i++)
+  {
+    for (int j = i; j < this->vecSetLabels.size(); j++)
+    {
+      kernelScore = this->kernelFunction->K(vecSet[i],vecSet[j]);
+      this->matrixDInv[i] += kernelScore;
+      if (i != j)
+        this->matrixDInv[j] += kernelScore; 
+    }
+  }  
+  
+  //compute its inverse
+  for (int i = 0; i < this->vecSetLabels.size(); i++)
+  {
+    this->matrixDInv[i] = 1.0 / this->matrixDInv[i];
+  }
+  
+  //and multiply it from right with the label vector (precalculation for mean computation)
+  if(this->mode==MEAN_DETECTION_MODE)
+  {
+    this->InvDY.resize ( this->vecSetLabels.size() );
+    for (int i = 0; i < this->vecSetLabels.size(); i++)
+    {
+      this->InvDY[i] = this->vecSetLabels[i] * this->matrixDInv[i];
+    }
+  }  
+}
+
+ClassificationResult KCGPApproxOneClass::classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const
+{
+  FullVector scores ( 2 );
+  scores[0] = 0.0;
+
+  if(this->mode==MEAN_DETECTION_MODE)
+  {
+    // kernelSelf is not needed for the regression type of GP
+
+    if ( kernelVector.size() != this->vecSetLabels.size() ) 
+      fthrow(Exception, "KCGPApproxOneClass::classifyKernel: size of kernel value vector " << 
+        kernelVector.size() << " does not match number of training points " << this->vecSetLabels.size() );
+      
+    double yEstimate = kernelVector.scalarProduct ( InvDY );    
+    scores[1] = yEstimate;
+  }
+  if(this->mode==VARIANCE_DETECTION_MODE)
+  {
+    if ( kernelVector.size() != this->vecSetLabels.size() ) 
+      fthrow(Exception, "KCGPApproxOneClass::classifyKernel: size of kernel value vector " << 
+        kernelVector.size() << " does not match number of training points " << this->vecSetLabels.size() );
+      
+    NICE::Vector rightPart (this->vecSetLabels.size());
+    for (int i = 0; i < this->vecSetLabels.size(); i++)
+    {
+      rightPart[i] = kernelVector[i] * this->matrixDInv[i];
+    }
+
+    double uncertainty = kernelSelf - kernelVector.scalarProduct ( rightPart );
+    scores[1] = 1.0 - uncertainty;
+
+  }
+  ClassificationResult r ( scores[1]<0.5 ? 0 : 1, scores );
+
+  return r;
+}
+
+KCGPApproxOneClass *KCGPApproxOneClass::clone() const
+{
+  return new KCGPApproxOneClass ( *this );
+}
+
+void KCGPApproxOneClass::store(std::ostream& ofs, int type) const
+{
+  ofs << this->matrixDInv << std::endl;
+  ofs << this->InvDY << std::endl;
+  ofs << this->mode << std::endl;
+  ofs << this->staticNoise << std::endl;
+}
+
+void KCGPApproxOneClass::restore(std::istream& ifs, int type)
+{
+  ifs >> this->matrixDInv;
+  ifs >> this->InvDY;
+  ifs >> this->mode;
+  ifs >> this->staticNoise;
+} 

+ 69 - 0
classifier/kernelclassifier/KCGPApproxOneClass.h

@@ -0,0 +1,69 @@
+/** 
+* @file KCGPApproxOneClass.h
+* @brief One-Class Gaussian Process Regression for Classification: we approximate the inverse of the regularized kernel matrix using a diagonal matrix
+* @author Alexander Lütz
+* @date 22-05-2012 (dd-mm-yyyy)
+
+*/
+#ifndef KCGPAPPROXONECLASSINCLUDE
+#define KCGPAPPROXONECLASSINCLUDE
+
+#include "vislearning/cbaselib/ClassificationResult.h"
+
+#include "vislearning/classifier/classifierbase/KernelClassifier.h"
+
+#include "vislearning/math/kernels/Kernel.h"
+#include "vislearning/math/kernels/ParameterizedKernel.h"
+
+#include "vislearning/regression/regressionbase/RegressionAlgorithmKernel.h"
+
+
+#define VARIANCE_DETECTION_MODE 1
+#define MEAN_DETECTION_MODE 2
+
+namespace OBJREC {
+ 
+class KCGPApproxOneClass : public KernelClassifier
+{
+
+    protected:
+    NICE::Vector matrixDInv;
+    NICE::Vector InvDY;
+    int mode;
+    double staticNoise;
+    
+
+    public:
+  
+  /** simple constructor */
+  KCGPApproxOneClass( const NICE::Config *conf, Kernel *kernel = NULL, const std::string & section = "OneClassGP" );
+
+  /** copy constructor */
+  KCGPApproxOneClass( const KCGPApproxOneClass & src );
+      
+  /** simple destructor */
+  virtual ~KCGPApproxOneClass();
+    
+  /** teach the classifier with a kernel matrix and the corresponding class labels @param y ! */
+  void teach ( KernelData *kernelData, const NICE::Vector & y );
+  
+  void teach (const LabeledSetVector &teachSet);
+    
+  /** classify an example by using its kernel values with the training set,
+    be careful with the order in @param kernelVector */
+//   ClassificationResult classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const;
+  virtual ClassificationResult classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const;
+   
+    /** clone this object */
+  KCGPApproxOneClass *clone() const;
+
+  void restore(std::istream&, int);
+  void store(std::ostream&, int) const;
+  void clear();
+  
+};
+
+
+}
+
+#endif