123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173 |
- /**
- * @file KCGPApproxOneClass.cpp
- * @brief One-Class Gaussian Process Regression for Classification: we approximate the inverse of the regularized kernel matrix using a diagonal matrix
- * @author Alexander Lütz
- * @date 22-05-2012 (dd-mm-yyyy)
- */
- #include <iostream>
- #include <typeinfo>
- #include <cstring>
- #include "core/vector/Algorithms.h"
- #include "core/vector/VVector.h"
- #include "KCGPApproxOneClass.h"
- using namespace std;
- using namespace NICE;
- using namespace OBJREC;
- KCGPApproxOneClass::KCGPApproxOneClass( const Config *conf, Kernel *kernel, const string & section ) : KernelClassifier ( conf, kernel )
- {
- // this->kernelFunction = kernel;
- //overwrite the default optimization options, since we don't want to perform standard loo or marginal likelihood stuff
- Config config(*conf);
- string modestr = config.gS(section,"detection_mode");
- if(strcmp("mean",modestr.c_str())==0){
- this->mode=MEAN_DETECTION_MODE;cerr << "One-class classification via GP predictive _mean_ !!!"<<endl;
- }
- if(strcmp("variance",modestr.c_str())==0){
- mode=VARIANCE_DETECTION_MODE;cerr << "One-class classification via GP predictive _variance_ !!!"<<endl;
- }
- this->staticNoise = conf->gD(section, "static_noise", 0.0);
- }
- KCGPApproxOneClass::KCGPApproxOneClass( const KCGPApproxOneClass & src ) : KernelClassifier ( src )
- {
- this->matrixDInv = src.matrixDInv;
- this->InvDY = src.InvDY;
- this->mode = src.mode;
- this->staticNoise = src.staticNoise;
- }
- KCGPApproxOneClass::~KCGPApproxOneClass()
- {
- }
- void KCGPApproxOneClass::teach ( KernelData *kernelData, const NICE::Vector & y )
- {
- fthrow( Exception, "KCGPApproxOneClass::teach: this method is not implemented for this specific type of classifier. Please use the second teach-method." );
- }
- void KCGPApproxOneClass::teach (const LabeledSetVector &teachSet)
- {
- if ( this->kernelFunction == NULL )
- fthrow( Exception, "KernelClassifier::teach: To use this function, you have to specify a kernel function using the constructor" );
-
- //we do not have to allocate new storage here since these variables come from the interface KernelClassifier
- // NICE::VVector vecSet;
- teachSet.getFlatRepresentation (this->vecSet, this->vecSetLabels);
-
- if ( (this->vecSetLabels.Min() != 1) || (this->vecSetLabels.Max() != 1) ) {
- fthrow(Exception, "This classifier is suitable only for one-class classification problems, i.e. max(y) = min(y) = 1");
- }
- this->matrixDInv.resize(this->vecSetLabels.size());
-
- //compute D
- //start with adding some noise, if necessary
- if (this->staticNoise != 0.0)
- this->matrixDInv.set(this->staticNoise);
- else
- this->matrixDInv.set(0.0);
-
- //now sum up all entries of each row in the original kernel matrix
- double kernelScore(0.0);
- for (int i = 0; i < (int)this->vecSetLabels.size(); i++)
- {
- for (int j = i; j < (int)this->vecSetLabels.size(); j++)
- {
- kernelScore = this->kernelFunction->K(vecSet[i],vecSet[j]);
- this->matrixDInv[i] += kernelScore;
- if (i != j)
- this->matrixDInv[j] += kernelScore;
- }
- }
-
- //compute its inverse
- for (int i = 0; i < (int)this->vecSetLabels.size(); i++)
- {
- this->matrixDInv[i] = 1.0 / this->matrixDInv[i];
- }
-
- //and multiply it from right with the label vector (precalculation for mean computation)
- if(this->mode==MEAN_DETECTION_MODE)
- {
- this->InvDY.resize ( this->vecSetLabels.size() );
- for (int i = 0; i < (int)this->vecSetLabels.size(); i++)
- {
- this->InvDY[i] = this->vecSetLabels[i] * this->matrixDInv[i];
- }
- }
- }
- ClassificationResult KCGPApproxOneClass::classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const
- {
- FullVector scores ( 2 );
- scores[0] = 0.0;
- if(this->mode==MEAN_DETECTION_MODE)
- {
- // kernelSelf is not needed for the regression type of GP
- if ( kernelVector.size() != this->vecSetLabels.size() )
- fthrow(Exception, "KCGPApproxOneClass::classifyKernel: size of kernel value vector " <<
- kernelVector.size() << " does not match number of training points " << this->vecSetLabels.size() );
-
- double yEstimate = kernelVector.scalarProduct ( InvDY );
- scores[1] = yEstimate;
- }
- if(this->mode==VARIANCE_DETECTION_MODE)
- {
- if ( kernelVector.size() != this->vecSetLabels.size() )
- fthrow(Exception, "KCGPApproxOneClass::classifyKernel: size of kernel value vector " <<
- kernelVector.size() << " does not match number of training points " << this->vecSetLabels.size() );
-
- NICE::Vector rightPart (this->vecSetLabels.size());
- for (int i = 0; i < (int)this->vecSetLabels.size(); i++)
- {
- rightPart[i] = kernelVector[i] * this->matrixDInv[i];
- }
- double uncertainty = kernelSelf - kernelVector.scalarProduct ( rightPart );
- scores[1] = 1.0 - uncertainty;
- }
- ClassificationResult r ( scores[1]<0.5 ? 0 : 1, scores );
- return r;
- }
- KCGPApproxOneClass *KCGPApproxOneClass::clone() const
- {
- return new KCGPApproxOneClass ( *this );
- }
- void KCGPApproxOneClass::store(std::ostream& ofs, int type) const
- {
- ofs << this->matrixDInv << std::endl;
- ofs << this->InvDY << std::endl;
- ofs << this->mode << std::endl;
- ofs << this->staticNoise << std::endl;
- }
- void KCGPApproxOneClass::restore(std::istream& ifs, int type)
- {
- ifs >> this->matrixDInv;
- ifs >> this->InvDY;
- ifs >> this->mode;
- ifs >> this->staticNoise;
- }
- void KCGPApproxOneClass::clear()
- {
- }
|