GPRegressionOptimizationProblem.h 2.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. /**
  2. * @file GPRegressionOptimizationProblem.h
  3. * @author Erik Rodner
  4. * @date 12/09/2009
  5. */
  6. #ifndef _NICE_OBJREC_GPREGRESSIONOPTIMIZATIONPROBLEMINCLUDE
  7. #define _NICE_OBJREC_GPREGRESSIONOPTIMIZATIONPROBLEMINCLUDE
  8. #include "core/vector/VVector.h"
  9. #include "vislearning/math/kernels/ParameterizedKernel.h"
  10. #include "core/optimization/limun/OptimizationProblemFirst.h"
  11. #include "vislearning/math/kernels/KernelData.h"
  12. #include "vislearning/math/kernels/TraceApproximation.h"
  13. #include "modelselcrit/GPMSCLooEstimates.h"
  14. namespace OBJREC {
  15. /** @class GPRegressionOptimizationProblem
  16. * Hyperparameter Optimization Problem for GP Regression
  17. *
  18. * @author Erik Rodner
  19. */
  20. class GPRegressionOptimizationProblem : public NICE::OptimizationProblemFirst
  21. {
  22. protected:
  23. KernelData *kernelData;
  24. NICE::VVector y;
  25. double bestAvgLooError;
  26. NICE::Vector bestLooParameters;
  27. ParameterizedKernel *kernel;
  28. bool verbose;
  29. /** An !additional! model selection criterion (e.g. loo), which is used
  30. * to select parameters from the set of parameters generated during optimization.
  31. * We restrict ourselves to loo-based model selection criterions because
  32. * they are easy to compute with matrices and vectors precomputed by
  33. * this optimization anyway (inverse kernel matrix, cholesky decomp, etc.)
  34. * Recommendation: give it a try! */
  35. const GPMSCLooLikelihoodRegression *modelselcrit;
  36. /** Use this method to set the stochastic trace approximation method,
  37. * this method is highly instable, but you can try to skip the trace term
  38. * by setting numTraceSamples to zero, which assumes a constant volume kernel matrix.
  39. * Recommendation: you shouldn't use this option, unless you like a kind of random search
  40. * for hyper-parameters. */
  41. const TraceApproximation *traceApproximation;
  42. public:
  43. GPRegressionOptimizationProblem ( KernelData *kernelData, const NICE::Vector & y,
  44. ParameterizedKernel *kernel, bool verbose = false,
  45. const GPMSCLooLikelihoodRegression *modelselcrit = NULL,
  46. const TraceApproximation *traceApproximation = NULL );
  47. GPRegressionOptimizationProblem ( KernelData *kernelData, const NICE::VVector & y,
  48. ParameterizedKernel *kernel, bool verbose = false,
  49. const GPMSCLooLikelihoodRegression *modelselcrit = NULL,
  50. const TraceApproximation *traceApproximation = NULL );
  51. double computeObjective();
  52. void computeGradient( NICE::Vector& newGradient );
  53. void setParameters ( const NICE::Vector & newParameters ) { parameters() = newParameters; };
  54. void useLooParameters ();
  55. void update();
  56. };
  57. }
  58. #endif