123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100 |
- /**
- * @file KCGPRegOneVsAll.h
- * @author Erik Rodner
- * @date 12/10/2009
- */
- #ifndef _NICE_OBJREC_KCGPREGONEVSALLINCLUDE
- #define _NICE_OBJREC_KCGPREGONEVSALLINCLUDE
- #include "vislearning/classifier/classifierbase/KernelClassifier.h"
- #include "vislearning/regression/gpregression/RegGaussianProcess.h"
- #include "vislearning/math/kernels/TraceApproximation.h"
- #include "vislearning/regression/gpregression/modelselcrit/genericGPModelSelection.h"
- #include <vector>
- namespace OBJREC
- {
- #undef ROADWORKS
- #define ROADWORKS fthrow(NICE::Exception, "Persistent interface not implemented!");
- /** @class KCGPRegOneVsAll
- * One vs. All GP regression classifier with joint optimization
- * of kernel parameters
- *
- * @author Erik Rodner
- */
- class KCGPRegOneVsAll : public KernelClassifier
- {
- protected:
- /** set of classifiers with the corresponding class */
- std::vector< std::pair<int, RegGaussianProcess *> > classifiers;
- /** clone from prototype to generate new classifiers */
- const RegGaussianProcess *prototype;
- /** whether to optimize hyper-parameters */
- bool optimizeParameters;
- /** tell us something about what you are doing */
- bool verbose;
- /** maximum number of iterations of the hyper-parameter estimation */
- int maxIterations;
- TraceApproximation *traceApproximation;
- GPMSCLooLikelihoodRegression *modelselcrit;
- /** use the hyperparameters which lead to the best leave-one-out criterion */
- bool useLooParameters;
- /** whether to invest some computation time to estimate the uncertainty of the prediction */
- bool computeUncertainty;
- /** for computing uncertainties we need the cholesky decomposition of the kernel matrix */
- NICE::Matrix choleskyMatrix;
- /** whether to calibrate the probabilities using uncertainty estimates */
- bool calibrateProbabilities;
- /** how many samples should we draw to estimate the probabilities */
- uint numSamplesCalibration;
- public:
- /** simplest constructor */
- KCGPRegOneVsAll() {};
- /** simple constructor */
- KCGPRegOneVsAll ( const NICE::Config *conf, Kernel *kernelFunction = NULL, const std::string & section = "KCGPRegOneVsAll" );
- /** copy constructor */
- KCGPRegOneVsAll ( const KCGPRegOneVsAll &vcova );
- /** simple destructor */
- virtual ~KCGPRegOneVsAll();
- /** teach the classifier with a kernel matrix and the corresponding class labels @param y ! */
- void teach ( KernelData *kernelData, const NICE::Vector & y );
- void teach ( KernelData *kernelData, const std::vector<double> & y );
- /** classify an example by using its kernel values with the training set,
- be careful with the order in @param kernelVector */
- ClassificationResult classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const;
- void restore ( std::istream&, int );
- void store ( std::ostream&, int ) const;
- void clear();
- /** clone this object */
- virtual KCGPRegOneVsAll *clone ( void ) const;
- };
- #undef ROADWORKS
- }
- #endif
|