KCGPRegOneVsAll.h 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. /**
  2. * @file KCGPRegOneVsAll.h
  3. * @author Erik Rodner
  4. * @date 12/10/2009
  5. */
  6. #ifndef _NICE_OBJREC_KCGPREGONEVSALLINCLUDE
  7. #define _NICE_OBJREC_KCGPREGONEVSALLINCLUDE
  8. #include "vislearning/classifier/classifierbase/KernelClassifier.h"
  9. #include "vislearning/regression/gpregression/RegGaussianProcess.h"
  10. #include "vislearning/math/kernels/TraceApproximation.h"
  11. #include "vislearning/regression/gpregression/modelselcrit/genericGPModelSelection.h"
  12. #include <vector>
  13. namespace OBJREC
  14. {
  15. #undef ROADWORKS
  16. #define ROADWORKS fthrow(NICE::Exception, "Persistent interface not implemented!");
  17. /** @class KCGPRegOneVsAll
  18. * One vs. All GP regression classifier with joint optimization
  19. * of kernel parameters
  20. *
  21. * @author Erik Rodner
  22. */
  23. class KCGPRegOneVsAll : public KernelClassifier
  24. {
  25. protected:
  26. /** set of classifiers with the corresponding class */
  27. std::vector< std::pair<int, RegGaussianProcess *> > classifiers;
  28. /** clone from prototype to generate new classifiers */
  29. const RegGaussianProcess *prototype;
  30. /** whether to optimize hyper-parameters */
  31. bool optimizeParameters;
  32. /** tell us something about what you are doing */
  33. bool verbose;
  34. /** maximum number of iterations of the hyper-parameter estimation */
  35. int maxIterations;
  36. TraceApproximation *traceApproximation;
  37. GPMSCLooLikelihoodRegression *modelselcrit;
  38. /** use the hyperparameters which lead to the best leave-one-out criterion */
  39. bool useLooParameters;
  40. /** whether to invest some computation time to estimate the uncertainty of the prediction */
  41. bool computeUncertainty;
  42. /** for computing uncertainties we need the cholesky decomposition of the kernel matrix */
  43. NICE::Matrix choleskyMatrix;
  44. /** whether to calibrate the probabilities using uncertainty estimates */
  45. bool calibrateProbabilities;
  46. /** how many samples should we draw to estimate the probabilities */
  47. uint numSamplesCalibration;
  48. public:
  49. /** simplest constructor */
  50. KCGPRegOneVsAll() {};
  51. /** simple constructor */
  52. KCGPRegOneVsAll ( const NICE::Config *conf, Kernel *kernelFunction = NULL, const std::string & section = "KCGPRegOneVsAll" );
  53. /** copy constructor */
  54. KCGPRegOneVsAll ( const KCGPRegOneVsAll &vcova );
  55. /** simple destructor */
  56. virtual ~KCGPRegOneVsAll();
  57. /** teach the classifier with a kernel matrix and the corresponding class labels @param y ! */
  58. void teach ( KernelData *kernelData, const NICE::Vector & y );
  59. void teach ( KernelData *kernelData, const std::vector<double> & y );
  60. /** classify an example by using its kernel values with the training set,
  61. be careful with the order in @param kernelVector */
  62. ClassificationResult classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const;
  63. void restore ( std::istream&, int );
  64. void store ( std::ostream&, int ) const;
  65. void clear();
  66. /** clone this object */
  67. virtual KCGPRegOneVsAll *clone ( void ) const;
  68. };
  69. #undef ROADWORKS
  70. }
  71. #endif