KCGPLaplace.cpp 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. /**
  2. * @file KCGPLaplace.cpp
  3. * @brief Gaussian Process Regression for Classification
  4. * @author Erik Rodner
  5. * @date 12/03/2009
  6. */
  7. #include <iostream>
  8. #include <typeinfo>
  9. #include "core/vector/Algorithms.h"
  10. #include "KCGPLaplace.h"
  11. #include "LHCumulativeGauss.h"
  12. #include "vislearning/classifier/kernelclassifier/GPLaplaceOptimizationProblem.h"
  13. #include "core/optimization/limun/FirstOrderTrustRegion.h"
  14. #include "core/optimization/limun/FirstOrderRasmussen.h"
  15. using namespace std;
  16. using namespace NICE;
  17. using namespace OBJREC;
  18. KCGPLaplace::KCGPLaplace( const Config *conf, Kernel *kernel, const string & section )
  19. : KernelClassifier ( conf, kernel ), laplaceApproximation(conf, section)
  20. {
  21. optimizeParameters = conf->gB(section, "optimize_parameters", true );
  22. string optimizationMethod_s = conf->gS(section, "optimization_method", "rasmussen" );
  23. if ( optimizationMethod_s == "rasmussen" )
  24. optimizationMethod = OPTIMIZATION_METHOD_RASMUSSEN;
  25. else if ( optimizationMethod_s == "trustregion" )
  26. optimizationMethod = OPTIMIZATION_METHOD_TRUSTREGION;
  27. else
  28. fthrow(Exception, "Optimization method " << optimizationMethod_s << " is unknown.");
  29. ParameterizedKernel *pkernelFunction = dynamic_cast< ParameterizedKernel * > ( kernelFunction );
  30. if ( optimizeParameters && (pkernelFunction == NULL) )
  31. {
  32. cerr << "KCGPLaplace: Unable to optimize hyperparameters with no specified kernel function" << endl;
  33. cerr << "KCGPLaplace: Switching to non-optimization mode" << endl;
  34. optimizeParameters = false;
  35. }
  36. // the only one supported by now
  37. likelihoodFunction = new LHCumulativeGauss( conf->gD(section, "likelihood_lengthscale", sqrt(2)) );
  38. verbose = conf->gB(section, "verbose", true );
  39. }
  40. KCGPLaplace::KCGPLaplace( const KCGPLaplace & src ) : KernelClassifier ( src ), laplaceApproximation ( src.laplaceApproximation )
  41. {
  42. laplaceApproximation = src.laplaceApproximation;
  43. verbose = src.verbose;
  44. optimizeParameters = src.optimizeParameters;
  45. optimizationMethod = src.optimizationMethod;
  46. // we loose the length scale in this case
  47. likelihoodFunction = new LHCumulativeGauss(); // FIXME: clone() for likelihoods
  48. }
  49. KCGPLaplace::~KCGPLaplace()
  50. {
  51. if ( likelihoodFunction != NULL )
  52. delete likelihoodFunction;
  53. }
  54. void KCGPLaplace::teach ( KernelData *kernelData, const NICE::Vector & _y )
  55. {
  56. if ( _y.size() <= 0 ) {
  57. fthrow(Exception, "Number of training vectors is zero!");
  58. }
  59. this->y.resize ( _y.size() );
  60. this->y = _y;
  61. this->y = 2*this->y;
  62. this->y += -1.0;
  63. if ( (this->y.Min() != -1) || (this->y.Max() != 1) ) {
  64. fthrow(Exception, "This classifier is suitable only for binary classification problems" );
  65. }
  66. if ( optimizeParameters )
  67. {
  68. if ( (kernelFunction != NULL) )
  69. {
  70. ParameterizedKernel *kernelPara = dynamic_cast< ParameterizedKernel * > ( kernelFunction );
  71. if ( kernelPara == NULL ) {
  72. fthrow(Exception, "KCGPLaplace: you have to specify a parameterized kernel !");
  73. }
  74. GPLaplaceOptimizationProblem gpopt ( kernelData, this->y, kernelPara, likelihoodFunction, &laplaceApproximation, verbose );
  75. cout << "KCGPLaplace: Hyperparameter optimization ..." << endl;
  76. if ( optimizationMethod == OPTIMIZATION_METHOD_TRUSTREGION )
  77. {
  78. if ( verbose )
  79. cerr << "KCGPLaplace: using trust region optimizer" << endl;
  80. FirstOrderTrustRegion *optimizer = new FirstOrderTrustRegion();
  81. optimizer->setEpsilonG ( 0.01 );
  82. optimizer->setMaxIterations ( 200 );
  83. optimizer->optimizeFirst ( gpopt );
  84. delete optimizer;
  85. } else if ( optimizationMethod == OPTIMIZATION_METHOD_RASMUSSEN ) {
  86. if ( verbose )
  87. cerr << "KCGPLaplace: using conjugate gradient optimizer" << endl;
  88. FirstOrderRasmussen *optimizer = new FirstOrderRasmussen();
  89. optimizer->setEpsilonG ( 0.01 );
  90. optimizer->setMaxIterations ( -200 );
  91. optimizer->optimizeFirst ( gpopt );
  92. delete optimizer;
  93. } else {
  94. fthrow(Exception, "Unknown optimization method " << optimizationMethod );
  95. }
  96. cout << "KCGPLaplace: Hyperparameter optimization ...done" << endl;
  97. gpopt.update();
  98. Vector parameters;
  99. kernelPara->getParameters ( parameters );
  100. cout << "KCGPLaplace: Optimization finished: " << parameters << endl << endl;
  101. } else {
  102. fthrow(Exception, "KCGPRegression: you have to specify a kernel function !" );
  103. }
  104. } else {
  105. laplaceApproximation.approximate ( kernelData, this->y, likelihoodFunction );
  106. }
  107. }
  108. ClassificationResult KCGPLaplace::classifyKernel ( const NICE::Vector & kernelVector, double kernelSelf ) const
  109. {
  110. double yEstimate = laplaceApproximation.predict ( kernelVector, kernelSelf, y, likelihoodFunction );
  111. FullVector scores ( 2 );
  112. scores[0] = 1.0 - yEstimate;
  113. scores[1] = yEstimate;
  114. ClassificationResult r ( (yEstimate < 0.5) ? 0 : 1, scores );
  115. return r;
  116. }
  117. KCGPLaplace *KCGPLaplace::clone() const
  118. {
  119. return new KCGPLaplace ( *this );
  120. }