/** 
* @file KCGPOneClass.cpp
* @brief One-Class Gaussian Process Regression for Classification
* @author Erik Rodner + Mi.Ke.
* @date 12/03/2010

*/
#include <iostream>
#include <typeinfo>

#include "core/vector/Algorithms.h"

#include "vislearning/regression/gpregression/RegGaussianProcess.h"
#include "KCGPOneClass.h"


using namespace std;
using namespace NICE;
using namespace OBJREC;


KCGPOneClass::KCGPOneClass( const Config *conf, Kernel *kernel, const string & section ) 
	: KernelClassifier ( conf, kernel )
{
	//overwrite the default optimization options, since we don't want to perform standard loo or marginal likelihood stuff
	Config config(*conf);
	string modestr = config.gS(section,"detection_mode"); 
	//enforce not to use the optimization routines (wich would lead to a disaster in the 1-class case)
	config.sB(section, "optimize_parameters", false );
	regressionAlgorithm = new RegGaussianProcess ( &config, kernel, section );

	if(strcmp("mean",modestr.c_str())==0){
		mode=MEAN_DETECTION_MODE;cerr << "One-class classification via GP predictive _mean_ !!!"<<endl;
	}
	if(strcmp("variance",modestr.c_str())==0){
		mode=VARIANCE_DETECTION_MODE;cerr << "One-class classification via GP predictive _variance_ !!!"<<endl;
	}

	//should be beneficial when the amount of training data is small compared to test data (given a well-conditioned kernel)
	computeInverse = config.gB(section,"compute_inverse",false); 

	staticNoise = conf->gD(section, "static_noise", 0.0);
}



KCGPOneClass::KCGPOneClass( const KCGPOneClass & src ) : KernelClassifier ( src )
{
	regressionAlgorithm = src.regressionAlgorithm->clone();
	y = src.y;
}

KCGPOneClass::~KCGPOneClass()
{
	delete regressionAlgorithm;
}


void KCGPOneClass::teach ( KernelData *kernelData, const NICE::Vector & y )
{
	if ( y.size() <= 0 ) {
		fthrow(Exception, "Number of training vectors is zero!");
	}
	if ( almostZero(kernelData->getKernelMatrix().Max()) ) {
		fthrow(Exception, "Kernel matrix seems to be not positive definite!");
	}
	//cerr << kernelMatrix << endl;

	this->y.resize ( y.size() );
	this->y = y;
	this->y = 2*this->y;
	this->y += -1.0;
	
	if ( (this->y.Min() != 1) || (this->y.Max() != 1) ) {
		fthrow(Exception, "This classifier is suitable only for one-class classification problems, i.e. max(y) = min(y) = 1");
	}

	if ( staticNoise != 0.0 )
		kernelData->getKernelMatrix().addIdentity ( staticNoise );

	regressionAlgorithm->teach ( kernelData, this->y );
	if(mode==VARIANCE_DETECTION_MODE){
		if(computeInverse){
			kernelData->updateInverseKernelMatrix();
			InverseKernelMatrix = kernelData->getInverseKernelMatrix();
		}else{
			this->kernelData=kernelData;
		}
	}
}

ClassificationResult KCGPOneClass::classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const
{
	FullVector scores ( 2 );
	scores[0] = 0.0;

	if(mode==MEAN_DETECTION_MODE){
		double yEstimate = regressionAlgorithm->predictKernel ( kernelVector,kernelSelf );
		scores[1] = yEstimate;
	}
	if(mode==VARIANCE_DETECTION_MODE){
		if(computeInverse){
			NICE::Vector kInvkstar = InverseKernelMatrix*kernelVector;
			double sigmaEstimate = kernelSelf - kernelVector.scalarProduct ( kInvkstar );
			scores[1] = 1.0-sigmaEstimate;
		}else{
			NICE::Vector kInvkstar;
			kInvkstar.resize ( kernelVector.size() );
			kernelData->computeInverseKernelMultiply ( kernelVector, kInvkstar);
			double sigmaEstimate = kernelSelf - kernelVector.scalarProduct ( kInvkstar );
			scores[1] = 1.0-sigmaEstimate;
		}
	}
	ClassificationResult r ( scores[1]<0.5 ? 0 : 1, scores );

	return r;
}

KCGPOneClass *KCGPOneClass::clone() const
{
	return new KCGPOneClass ( *this );
}