123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100 |
- //////////////////////////////////////////////////////////////////////
- //
- // DerivativeBasedOptimizer.cpp: Implementation of the DerivativeBased
- // Optimizer class.
- //
- // Written by Matthias Wacker
- //
- //////////////////////////////////////////////////////////////////////
- #include "optimization/DerivativeBasedOptimizer.h"
- using namespace optimization;
- DerivativeBasedOptimizer::DerivativeBasedOptimizer( OptLogBase *loger): SuperClass(loger)
- {
- m_analyticalGradients = true;
- m_gradientTolActive = false;
- m_gradientTol = 1.0e-3;
- }
- DerivativeBasedOptimizer::DerivativeBasedOptimizer( const DerivativeBasedOptimizer &opt) : SuperClass(opt)
- {
- m_analyticalGradients = opt.m_analyticalGradients;
- m_gradientTolActive = opt.m_gradientTolActive;
- m_gradientTol = opt.m_gradientTol;
- m_gradient = opt.m_gradient;
- }
- DerivativeBasedOptimizer::~DerivativeBasedOptimizer()
- {
- }
- void DerivativeBasedOptimizer::setGradientTol(bool active, double norm)
- {
- m_gradientTol = norm;
- m_gradientTolActive = active;
- }
- inline double DerivativeBasedOptimizer::getGradientTol()
- {
- return m_gradientTol;
- }
- void DerivativeBasedOptimizer::init()
- {
- SuperClass::init();
-
- m_gradient = matrix_type(m_numberOfParameters,1);
- }
- void DerivativeBasedOptimizer::useAnalyticalGradients(bool useAnalyticalGradients)
- {
- m_analyticalGradients = useAnalyticalGradients;
- }
- const matrix_type DerivativeBasedOptimizer::getNumericalGradient(const matrix_type & x , const matrix_type & maskWidth)
- {
- matrix_type grad(m_numberOfParameters,1);
- matrix_type grid(m_numberOfParameters, 2 * m_numberOfParameters);
- for(int i=0; i < static_cast<int>(m_numberOfParameters);i++)
- {
- for(int j = 0 ; j< 2 * static_cast<int>(m_numberOfParameters);j++)
- {
- grid[i][j] = x[i][0] + ((j == i)? maskWidth[i][0] : 0.0)
- + ((j == i+1)? maskWidth[i][0] *(-1.0): 0.0);
- }
- }
-
- matrix_type values = evaluateSetCostFunction(grid);
-
- for(int i=0; i < static_cast<int>(m_numberOfParameters);i++)
- {
- grad[i][0] = ( values[i][0] - values[i+1][0] )/( 2 * maskWidth[i][0]);
-
- if(m_scales[i][0] == 0 )
- {
- grad[i][0] = 0;
- continue;
- }
- }
-
- return grad;
- }
- const matrix_type DerivativeBasedOptimizer::getAnalyticalGradient(const matrix_type & x)
- {
- return (m_maximize == true) ? (m_costFunction->getAnalyticGradient(x) * (-1.0))
- : (m_costFunction->getAnalyticGradient(x)) ;
- }
- const matrix_type DerivativeBasedOptimizer::getAnalyticalHessian(const matrix_type & x)
- {
- return (m_maximize == true) ? (m_costFunction->getAnalyticHessian(x) * (-1.0))
- : (m_costFunction->getAnalyticHessian(x)) ;
- }
|