123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132 |
- //////////////////////////////////////////////////////////////////////
- //
- // DerivativeBasedOptimizer.h: interface of DerivativeBasedOptimizer class.
- //
- // Written by: Matthias Wacker
- //
- //////////////////////////////////////////////////////////////////////
- #ifndef _DERIVATIVE_BASED_OPTIMIZER_H_
- #define _DERIVATIVE_BASED_OPTIMIZER_H_
- #include "core/optimization/blackbox/SimpleOptimizer.h"
- #include "AdaptiveDirectionRandomSearchOptimizer.h"
- namespace OPTIMIZATION
- {
- /*!
- class Abstract base class of all derivative based optimizers.
- */
- class DerivativeBasedOptimizer : public SimpleOptimizer
- {
- public:
- typedef SimpleOptimizer SuperClass;
- ///
- /// Constructor.
- ///
- /// \param loger : OptLogBase * to existing log class
- ///
- DerivativeBasedOptimizer(OptLogBase *loger=NULL);
- ///
- /// Copy constructor
- /// \param opt .. optimizer to copy
- ///
- DerivativeBasedOptimizer( const DerivativeBasedOptimizer &opt);
- ///
- /// Destructor.
- ///
- virtual ~DerivativeBasedOptimizer();
- ///
- /// enumeration for the return reasons of an optimizer,
- /// has all elements of the SuperClass optimizer
- ///
- enum { SUCCESS_GRADIENTTOL = _to_continue_,
- _to_continue_
- };
- ///
- /// \brief Set gradient tolerance abort criteria
- ///
- /// Set parameter tolerance abort criteria. While iterating, if the gradientnorm gets
- /// below the given threshold. The optimization stops and returns SUCCESS if
- /// active is 'true'
- ///
- /// \param active : bool to activate the criteria (true == active..)
- /// \param norm : representing the threshold
- ///
- void setGradientTol(bool active, double norm);
-
- ///
- /// Get the gradient tolerance abort criteria
- /// \return double representing the threshold
- ///
- inline double getGradientTol();
- ///
- /// Get numerical Gradient on position x; use central difference with a maskwitdh of maskWidth.
- ///
- /// grad(f(x))_i \approx
- /// {
- /// [ f( x + (0, ... ,0,maskWidth(i,0),0, ... ,0 ) )
- /// - f( x - (0, ... ,0,maskWidth(i,0),0, ... ,0 ) )]
- /// / (2 * maskWidth(i,0))
- /// }
- ///
- /// \forall i \in [1, ... ,m_numberOfParameters]
- ///
- const OPTIMIZATION::matrix_type getNumericalGradient(const OPTIMIZATION::matrix_type & x , const OPTIMIZATION::matrix_type & maskWidth);
- ///
- /// Get the anylytical Gradient of the costfunction (if available) sign already inverted for maximization.
- ///
- const OPTIMIZATION::matrix_type getAnalyticalGradient(const OPTIMIZATION::matrix_type & x);
-
- ///
- /// UseAnalyticalGradients, if possible
- ///
- void useAnalyticalGradients(bool useAnalyticalGradients);
- ///
- /// Get the analytical Hessian of the costfunction (if available )
- /// sign already inverted for maximization
- ///
- const OPTIMIZATION::matrix_type getAnalyticalHessian(const OPTIMIZATION::matrix_type & x);
-
- protected:
- ///
- /// initialize
- ///
- void init();
- ///
- /// the gradient
- ///
- OPTIMIZATION::matrix_type m_gradient;
-
- ///
- /// gradient tolerance threshold
- ///
- double m_gradientTol;
- ///
- /// gradient tolerance active
- ///
- bool m_gradientTolActive;
- ///
- /// use numerical or analytical Gradient Computation
- ///
- bool m_analyticalGradients;
-
- };//class
- }//namespace
- #endif
|