DerivativeBasedOptimizer.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. //////////////////////////////////////////////////////////////////////
  2. //
  3. // DerivativeBasedOptimizer.h: interface of DerivativeBasedOptimizer class.
  4. //
  5. // Written by: Matthias Wacker
  6. //
  7. //////////////////////////////////////////////////////////////////////
  8. #ifndef _DERIVATIVE_BASED_OPTIMIZER_H_
  9. #define _DERIVATIVE_BASED_OPTIMIZER_H_
  10. #include "optimization/SimpleOptimizer.h"
  11. /*!
  12. class Abstract base class of all derivative based optimizers.
  13. */
  14. class DerivativeBasedOptimizer : public SimpleOptimizer
  15. {
  16. public:
  17. typedef SimpleOptimizer SuperClass;
  18. ///
  19. /// Constructor.
  20. ///
  21. /// \param loger : OptLogBase * to existing log class
  22. ///
  23. DerivativeBasedOptimizer(OptLogBase *loger=NULL);
  24. ///
  25. /// Copy constructor
  26. /// \param opt .. optimizer to copy
  27. ///
  28. DerivativeBasedOptimizer( const DerivativeBasedOptimizer &opt);
  29. ///
  30. /// Destructor.
  31. ///
  32. virtual ~DerivativeBasedOptimizer();
  33. ///
  34. /// enumeration for the return reasons of an optimizer,
  35. /// has all elements of the SuperClass optimizer
  36. ///
  37. enum { SUCCESS_GRADIENTTOL = _to_continue_,
  38. _to_continue_
  39. };
  40. ///
  41. /// \brief Set gradient tolerance abort criteria
  42. ///
  43. /// Set parameter tolerance abort criteria. While iterating, if the gradientnorm gets
  44. /// below the given threshold. The optimization stops and returns SUCCESS if
  45. /// active is 'true'
  46. ///
  47. /// \param active : bool to activate the criteria (true == active..)
  48. /// \param norm : representing the threshold
  49. ///
  50. void setGradientTol(bool active, double norm);
  51. ///
  52. /// Get the gradient tolerance abort criteria
  53. /// \return double representing the threshold
  54. ///
  55. inline double getGradientTol();
  56. ///
  57. /// Get numerical Gradient on position x; use central difference with a maskwitdh of maskWidth.
  58. ///
  59. /// grad(f(x))_i \approx
  60. /// {
  61. /// [ f( x + (0, ... ,0,maskWidth(i,0),0, ... ,0 ) )
  62. /// - f( x - (0, ... ,0,maskWidth(i,0),0, ... ,0 ) )]
  63. /// / (2 * maskWidth(i,0))
  64. /// }
  65. ///
  66. /// \forall i \in [1, ... ,m_numberOfParameters]
  67. ///
  68. const optimization::matrix_type getNumericalGradient(const optimization::matrix_type & x , const optimization::matrix_type & maskWidth);
  69. ///
  70. /// Get the anylytical Gradient of the costfunction (if available) sign already inverted for maximization.
  71. ///
  72. const optimization::matrix_type getAnalyticalGradient(const optimization::matrix_type & x);
  73. ///
  74. /// UseAnalyticalGradients, if possible
  75. ///
  76. void useAnalyticalGradients(bool useAnalyticalGradients);
  77. ///
  78. /// Get the analytical Hessian of the costfunction (if available )
  79. /// sign already inverted for maximization
  80. ///
  81. const optimization::matrix_type getAnalyticalHessian(const optimization::matrix_type & x);
  82. protected:
  83. ///
  84. /// initialize
  85. ///
  86. void init();
  87. ///
  88. /// the gradient
  89. ///
  90. optimization::matrix_type m_gradient;
  91. ///
  92. /// gradient tolerance threshold
  93. ///
  94. double m_gradientTol;
  95. ///
  96. /// gradient tolerance active
  97. ///
  98. bool m_gradientTolActive;
  99. ///
  100. /// use numerical or analytical Gradient Computation
  101. ///
  102. bool m_analyticalGradients;
  103. };
  104. #endif