DerivativeBasedOptimizer.cpp 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101
  1. //////////////////////////////////////////////////////////////////////
  2. //
  3. // DerivativeBasedOptimizer.cpp: Implementation of the DerivativeBased
  4. // Optimizer class.
  5. //
  6. // Written by Matthias Wacker
  7. // edited by Johannes Ruehle, 2012-10-11
  8. //////////////////////////////////////////////////////////////////////
  9. #include "optimization/DerivativeBasedOptimizer.h"
  10. using namespace optimization;
  11. DerivativeBasedOptimizer::DerivativeBasedOptimizer( OptLogBase *loger): SuperClass(loger)
  12. {
  13. m_analyticalGradients = true;
  14. m_gradientTolActive = false;
  15. m_gradientTol = 1.0e-3;
  16. }
  17. DerivativeBasedOptimizer::DerivativeBasedOptimizer( const DerivativeBasedOptimizer &opt) : SuperClass(opt)
  18. {
  19. m_analyticalGradients = opt.m_analyticalGradients;
  20. m_gradientTolActive = opt.m_gradientTolActive;
  21. m_gradientTol = opt.m_gradientTol;
  22. m_gradient = opt.m_gradient;
  23. }
  24. DerivativeBasedOptimizer::~DerivativeBasedOptimizer()
  25. {
  26. }
  27. void DerivativeBasedOptimizer::setGradientTol(bool active, double norm)
  28. {
  29. m_gradientTol = norm;
  30. m_gradientTolActive = active;
  31. }
  32. inline double DerivativeBasedOptimizer::getGradientTol()
  33. {
  34. return m_gradientTol;
  35. }
  36. void DerivativeBasedOptimizer::init()
  37. {
  38. SuperClass::init();
  39. m_gradient = matrix_type(m_numberOfParameters,1);
  40. }
  41. void DerivativeBasedOptimizer::useAnalyticalGradients(bool useAnalyticalGradients)
  42. {
  43. m_analyticalGradients = useAnalyticalGradients;
  44. }
  45. const matrix_type DerivativeBasedOptimizer::getNumericalGradient(const matrix_type & x , const matrix_type & maskWidth)
  46. {
  47. matrix_type grad(m_numberOfParameters,1);
  48. matrix_type grid(m_numberOfParameters, 2 * m_numberOfParameters);
  49. for(int i=0; i < static_cast<int>(m_numberOfParameters);i++)
  50. {
  51. for(int j = 0 ; j< 2 * static_cast<int>(m_numberOfParameters);j++)
  52. {
  53. grid[i][j] = x[i][0] + (( j == i*2 )? +maskWidth[i][0] : 0.0)
  54. + (( j == i*2+1 )? -maskWidth[i][0] : 0.0);
  55. }
  56. }
  57. matrix_type values = evaluateSetCostFunction(grid);
  58. for(int i=0; i < static_cast<int>(m_numberOfParameters);i++)
  59. {
  60. if(m_scales[i][0] == 0 )
  61. {
  62. grad[i][0] = 0;
  63. continue;
  64. }
  65. grad[i][0] = ( values[2*i][0] - values[2*i+1][0] )/( 2 * maskWidth[i][0]);
  66. }
  67. return grad;
  68. }
  69. const matrix_type DerivativeBasedOptimizer::getAnalyticalGradient(const matrix_type & x)
  70. {
  71. return (m_maximize == true) ? (m_costFunction->getAnalyticGradient(x) * (-1.0))
  72. : (m_costFunction->getAnalyticGradient(x)) ;
  73. }
  74. const matrix_type DerivativeBasedOptimizer::getAnalyticalHessian(const matrix_type & x)
  75. {
  76. return (m_maximize == true) ? (m_costFunction->getAnalyticHessian(x) * (-1.0))
  77. : (m_costFunction->getAnalyticHessian(x)) ;
  78. }