DerivativeBasedOptimizer.h 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. //////////////////////////////////////////////////////////////////////
  2. //
  3. // DerivativeBasedOptimizer.h: interface of DerivativeBasedOptimizer class.
  4. //
  5. // Written by: Matthias Wacker
  6. //
  7. //////////////////////////////////////////////////////////////////////
  8. #ifndef _DERIVATIVE_BASED_OPTIMIZER_H_
  9. #define _DERIVATIVE_BASED_OPTIMIZER_H_
  10. #include "core/optimization/blackbox/SimpleOptimizer.h"
  11. #include "AdaptiveDirectionRandomSearchOptimizer.h"
  12. namespace OPTIMIZATION
  13. {
  14. /*!
  15. class Abstract base class of all derivative based optimizers.
  16. */
  17. class DerivativeBasedOptimizer : public SimpleOptimizer
  18. {
  19. public:
  20. typedef SimpleOptimizer SuperClass;
  21. ///
  22. /// Constructor.
  23. ///
  24. /// \param loger : OptLogBase * to existing log class
  25. ///
  26. DerivativeBasedOptimizer(OptLogBase *loger=NULL);
  27. ///
  28. /// Copy constructor
  29. /// \param opt .. optimizer to copy
  30. ///
  31. DerivativeBasedOptimizer( const DerivativeBasedOptimizer &opt);
  32. ///
  33. /// Destructor.
  34. ///
  35. virtual ~DerivativeBasedOptimizer();
  36. ///
  37. /// enumeration for the return reasons of an optimizer,
  38. /// has all elements of the SuperClass optimizer
  39. ///
  40. enum { SUCCESS_GRADIENTTOL = _to_continue_,
  41. _to_continue_
  42. };
  43. ///
  44. /// \brief Set gradient tolerance abort criteria
  45. ///
  46. /// Set parameter tolerance abort criteria. While iterating, if the gradientnorm gets
  47. /// below the given threshold. The optimization stops and returns SUCCESS if
  48. /// active is 'true'
  49. ///
  50. /// \param active : bool to activate the criteria (true == active..)
  51. /// \param norm : representing the threshold
  52. ///
  53. void setGradientTol(bool active, double norm);
  54. ///
  55. /// Get the gradient tolerance abort criteria
  56. /// \return double representing the threshold
  57. ///
  58. inline double getGradientTol();
  59. ///
  60. /// Get numerical Gradient on position x; use central difference with a maskwitdh of maskWidth.
  61. ///
  62. /// grad(f(x))_i \approx
  63. /// {
  64. /// [ f( x + (0, ... ,0,maskWidth(i,0),0, ... ,0 ) )
  65. /// - f( x - (0, ... ,0,maskWidth(i,0),0, ... ,0 ) )]
  66. /// / (2 * maskWidth(i,0))
  67. /// }
  68. ///
  69. /// \forall i \in [1, ... ,m_numberOfParameters]
  70. ///
  71. const OPTIMIZATION::matrix_type getNumericalGradient(const OPTIMIZATION::matrix_type & x , const OPTIMIZATION::matrix_type & maskWidth);
  72. ///
  73. /// Get the anylytical Gradient of the costfunction (if available) sign already inverted for maximization.
  74. ///
  75. const OPTIMIZATION::matrix_type getAnalyticalGradient(const OPTIMIZATION::matrix_type & x);
  76. ///
  77. /// UseAnalyticalGradients, if possible
  78. ///
  79. void useAnalyticalGradients(bool useAnalyticalGradients);
  80. ///
  81. /// Get the analytical Hessian of the costfunction (if available )
  82. /// sign already inverted for maximization
  83. ///
  84. const OPTIMIZATION::matrix_type getAnalyticalHessian(const OPTIMIZATION::matrix_type & x);
  85. protected:
  86. ///
  87. /// initialize
  88. ///
  89. void init();
  90. ///
  91. /// the gradient
  92. ///
  93. OPTIMIZATION::matrix_type m_gradient;
  94. ///
  95. /// gradient tolerance threshold
  96. ///
  97. double m_gradientTol;
  98. ///
  99. /// gradient tolerance active
  100. ///
  101. bool m_gradientTolActive;
  102. ///
  103. /// use numerical or analytical Gradient Computation
  104. ///
  105. bool m_analyticalGradients;
  106. };//class
  107. }//namespace
  108. #endif