123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505 |
- //////////////////////////////////////////////////////////////////////
- //
- // MatrixIterativeOptimizer.cpp: Implementation of the MatrixIterativeOptimizer
- // Optimizer class.
- //
- // Written by Matthias Wacker
- //
- //////////////////////////////////////////////////////////////////////
- #include "optimization/MatrixIterativeOptimizer.h"
- #include <iostream>
- #include "optimization/AdditionalIceUtils.h" // for linsolve
- #include "optimization/DownhillSimplexOptimizer.h" //FIXME WACKER: INCLUDE DOWNHILL SIMPLEX FOR THE GET RANK FUNCTION -> EXPORT IT
- MatrixIterativeOptimizer::MatrixIterativeOptimizer(OptLogBase *loger) : SuperClass(loger)
- {
- m_IterationMatrix = matrix_type(m_numberOfParameters,m_numberOfParameters);
- m_hk = matrix_type(m_numberOfParameters,1);
- m_stepSize = matrix_type(m_numberOfParameters,1);
- m_oldGradient = matrix_type(m_numberOfParameters,1);
- m_oldParams = matrix_type(m_numberOfParameters,1);
- m_lambdak = 1.0;
- }
- MatrixIterativeOptimizer::MatrixIterativeOptimizer( const MatrixIterativeOptimizer &opt) : SuperClass(opt)
- {
- m_IterationMatrix = opt.m_IterationMatrix;
- m_hk = opt.m_hk;
- m_lin = opt.m_lin;
- m_stepSize = opt.m_stepSize;
- m_lambdak = opt.m_lambdak;
- m_oldParams = opt.m_oldParams;
- m_oldGradient = opt.m_oldGradient;
- }
- MatrixIterativeOptimizer::~MatrixIterativeOptimizer()
- {
- }
- void MatrixIterativeOptimizer::init()
- {
- if(m_loger)
- m_loger->logTrace("entering MatrixIterativeOptimizer:: init ... \n");
- SuperClass::init();
- m_lin= ArmijoLineSearcher(m_costFunction,m_loger);
- //if(m_verbose==true) //FIXME commented out as output is not human readable
- // m_lin.setVerbose(true);
-
- if(m_maximize == true)
- {
- m_lin.setMaximize(true);
- }
- m_IterationMatrix=matrix_type(m_numberOfParameters,m_numberOfParameters);
- for(int i =0;i < static_cast<int>(m_numberOfParameters); i++)
- {
- m_IterationMatrix[i][i] = 1.0;
- }
-
- if(m_loger)
- m_loger->logTrace("leaving MatrixIterativeOptimizer:: init ...\n");
- }
- MatrixIterativeOptimizer & MatrixIterativeOptimizer::operator=(const MatrixIterativeOptimizer &opt)
- {
- /*
- avoid self-copy
- */
- if(this != &opt)
- {
-
- /*
- =operator of SuperClass
- */
- SuperClass::operator=(opt);
- /*
- own values:
- */
- m_IterationMatrix = opt.m_IterationMatrix;
- m_hk = opt.m_hk;
- m_lin = opt.m_lin;
- m_stepSize = opt.m_stepSize;
- m_lambdak = opt.m_lambdak;
- m_oldParams = opt.m_oldParams;
- m_oldGradient = opt.m_oldGradient;
- }
- return *this;
- }
- void MatrixIterativeOptimizer::computeDescentDirection()
- {
- if(m_loger)
- m_loger->logTrace("entering MatrixIterativeOptimizer::computeDescentDirection ... \n");
-
- if(m_numberOfParameters == 1)
- {
- if(fabs(m_IterationMatrix[0][0]) >= 1.0e-20 )
- {
- m_hk[0][0] = -m_gradient[0][0] / m_IterationMatrix[0][0];
- }
- else
- {
- m_hk = -m_gradient;
- }
- }
- else
- {
-
- //if(getRank(m_IterationMatrix, 1.0e-5 ) == m_numberOfParameters)
- if(true)
- {
- /*
- solve system equation for descent direction
- */
- matrix_type tmp=(m_gradient * -1.0 );
- linSolve(m_IterationMatrix, m_hk, tmp );
- /*
- direction orthogonal to gradient? -> use gradient
- */
- if(fabs((!m_hk * m_gradient)[0][0]) <= 1.0e-3)
- {
- m_hk = -m_gradient;
- }
- }
- else
- {
- m_hk = -m_gradient;
- }
- }
- if(m_loger)
- m_loger->logTrace("leaving MatrixIterativeOptimizer::computeDescentDirection ... \n");
- }
- void MatrixIterativeOptimizer::resetMatrix()
- {
- m_IterationMatrix=matrix_type(m_numberOfParameters,m_numberOfParameters);
- for(int i =0;i < static_cast<int>(m_numberOfParameters); i++)
- {
- m_IterationMatrix[i][i] = 1.0;
- }
- }
- void MatrixIterativeOptimizer::setStepSize(const matrix_type & stepSize)
- {
- m_stepSize = stepSize;
- }
- int MatrixIterativeOptimizer::optimize()
- {
- if(m_loger)
- m_loger->logTrace("entering MatrixIterativeOptimizer:optimize ... \n");
- init();
- bool abort = false;
-
-
- /******************************
- start time criteria
- ******************************/
- m_startTime = clock();
- /******************************
- compute start value
- and first gradient
- ******************************/
- m_currentCostFunctionValue = evaluateCostFunction(m_parameters);
- m_gradient = (m_analyticalGradients == true &&
- (m_costFunction->hasAnalyticGradient() == true) ) ?
- getAnalyticalGradient(m_parameters) :
- getNumericalGradient(m_parameters, m_stepSize);
- m_hk = m_gradient * -1.0;
- matrix_type hk_norm;
- double factor = m_hk.Norm(0);
- if(fabs(factor) > 1.0e-12)
- {
- hk_norm = m_hk * (1.0/ factor);
- }
- matrix_type grad_norm;
- double factor2 = m_gradient.Norm(0);
- if(fabs(factor2) > 1.0e-12)
- {
- grad_norm = m_gradient * (1.0/ factor2);
- }
- /*******************************
- optimize step length
- *******************************/
- /// ARMIJO
- //m_lin.setXkGkDk(m_parameters, m_gradient, hk_norm);
- m_lin.setXkGkDk(m_parameters, grad_norm, hk_norm);
- m_lambdak = m_lin.optimize();
- if(m_verbose == true)
- {
- std::cout << "stepsize lambda: " << m_lambdak << std::endl;
- std::cout << "in direction: " << std::endl;
- for(int r = 0; r < static_cast<int>(m_numberOfParameters); r++)
- {
- std::cout<< hk_norm[r][0] << " ";
- }
- std::cout << std::endl;
-
- }
-
- /*******************************
- update the parameters
- *******************************/
- m_oldParams = m_parameters;
- m_parameters = m_parameters + hk_norm * m_lambdak;
- /******************************
- check abort criteria
- for gradient
- ******************************/
- if(m_gradientTolActive)
- {
- if(m_gradient.Norm(0) < m_gradientTol){
- m_returnReason = SUCCESS_GRADIENTTOL;
- abort = true;
- }
- }
- /* do the optimization loop */
- while(abort == false)
- {
- /******************************
- increase iteration
- counter
- ******************************/
- m_numIter++;
-
- /******************************
- cout actual iteration
- ******************************/
- if(m_verbose == true)
- {
- std::cout << "MatrixIterativeOptimizer :: Iteration: " << m_numIter << " : current parameters : ";
- for(int r = 0; r < static_cast<int>(m_numberOfParameters); r++)
- {
- std::cout<< m_parameters[r][0] << "\t ";
- }
- std::cout << " value: " << m_currentCostFunctionValue << std::endl;
- }
- /******************************
- Check iteration limit
- ******************************/
- if(m_maxNumIterActive)
- {
- if(m_numIter >= m_maxNumIter)
- {
- if(m_verbose == true)
- {
- std::cout << "Gradient Descenct :: aborting because of numIter" << std::endl;
- }
- /* set according return status and return */
- m_returnReason = SUCCESS_MAXITER;
- abort = true;
- continue;
- }
- }
- /******************************
- get the new Gradient
- ******************************/
- m_oldGradient = m_gradient;
- m_gradient = (m_analyticalGradients == true &&
- (m_costFunction->hasAnalyticGradient() == true) ) ?
- getAnalyticalGradient(m_parameters) :
- getNumericalGradient(m_parameters, m_stepSize);
- /******************************
- check gradient tol
- ******************************/
- if(m_gradientTolActive)
- {
- if(m_gradient.Norm(0) < m_gradientTol)
- {
- if(m_verbose == true)
- {
- std::cout << "MatrixIterativeOptimizer :: aborting because of GradientTol" << std::endl;
- }
- m_returnReason = SUCCESS_GRADIENTTOL;
- abort = true;
- continue;
- }
- }
- /******************************
- Update the Iteration
- Matrix
- ******************************/
- //matrix_type oldMatrix = m_IterationMatrix;
- updateIterationMatrix();
- /******************************
- compute a descent
- direction
- ******************************/
- computeDescentDirection();
- // check if descent direction is really a descent direction
- // if not: reset the iteration matrix and take
- // the gradient as iteration direction
- if( (!m_hk *m_gradient)[0][0] > 0.0)
- {
- cout << " resetting matrix" << endl;
- resetMatrix();
- m_hk=m_gradient * -1.0;
- }
-
- double factor = m_hk.Norm(0);
- if(fabs(factor) > 1.0e-12)
- {
- hk_norm = m_hk * (1.0/ factor);
- }
- matrix_type grad_norm;
- double factor2 = m_gradient.Norm(0);
- if(fabs(factor2) > 1.0e-12)
- {
- grad_norm = m_gradient * (1.0/ factor2);
- }
- /*******************************
- optimize step length
- *******************************/
- //m_lin.setXkGkDk(m_parameters, m_gradient, hk_norm);
- m_lin.setXkGkDk(m_parameters, grad_norm, hk_norm);
- m_lambdak = m_lin.optimize();
-
-
- /*******************************
- update the parameters
- *******************************/
- m_oldParams = m_parameters;
- m_parameters = m_parameters + hk_norm * m_lambdak;
- if(m_verbose == true)
- {
- //matrix_type hk_normu=S2U(hk_norm);
- std::cout << "MatrixIterativeOptimizer :: Iterating with lambda = " << m_lambdak << std::endl;
- std::cout << "MatrixIterativeOptimizer :: in direction : ";
- for(int mu = 0; mu < static_cast<int>(m_numberOfParameters); mu++)
- {
- //std::cout<< hk_normu[mu][0] << "\t ";
- std::cout<< hk_norm[mu][0] << "\t ";
- }
- std::cout << std::endl;
- //matrix_type m_gradientu=S2U(m_gradient);
- std::cout << "MatrixIterativeOptimizer :: Gradient was ";
- for(int nu = 0; nu < static_cast<int>(m_numberOfParameters); nu++)
- {
- //std::cout<< m_gradientu[nu][0] << "\t ";
- std::cout<< m_gradient[nu][0] << "\t ";
- }
- std::cout << std::endl;
-
- }
-
-
- /*******************************
- Check new parameters
- are it is in bounds,
- paramTol,
- funcTol,
- maxSeconds
- *******************************/
- if(!checkParameters(m_parameters))
- {
- if(m_verbose == true)
- {
- std::cout << "Gradient Descenct :: aborting because of parameter Bounds" << std::endl;
- }
- /* set according return status and the last parameters and return */
- m_returnReason = ERROR_XOUTOFBOUNDS;
- m_parameters = m_oldParams;
- abort = true;
- continue;
- }
- /*
- Get new Costfunction Value
- */
- double oldFuncValue = m_currentCostFunctionValue;
- m_currentCostFunctionValue = evaluateCostFunction(m_parameters);
- /*
- Check ParamTol
- */
- if(m_paramTolActive)
- {
- if( (m_parameters - m_oldParams).Norm(0) < m_paramTol)
- {
- if(m_verbose == true)
- {
- std::cout << "Gradient Descenct :: aborting because of param Tol" << std::endl;
- }
- /* set according return status and the last parameters and return */
- m_returnReason = SUCCESS_PARAMTOL;
- /*m_parameters = oldParams;*/
- abort = true;
- continue;
- }
- }
-
- if(m_funcTolActive)
- {
- if( abs((m_currentCostFunctionValue - oldFuncValue)) < m_funcTol)
- {
- if(m_verbose == true)
- {
- std::cout << "MatrixIterativeOptimizer :: aborting because of Func Tol" << std::endl;
- }
- /* set according return status and the last parameters and return */
- m_returnReason = SUCCESS_FUNCTOL;
- abort = true;
- continue;
- }
- }
- /*
- Check Optimization Timilimit, if active
- */
- if(m_maxSecondsActive)
- {
- m_currentTime = clock();
-
- /* time limit exceeded ? */
- if(((float)(m_currentTime - m_startTime )/CLOCKS_PER_SEC) >= m_maxSeconds )
- {
- if(m_verbose == true)
- {
- std::cout << "MatrixIterativeOptimizer :: aborting because of Time Limit" << std::endl;
- }
- /* set according return status and the last parameters and return */
- m_returnReason = SUCCESS_TIMELIMIT;
- m_parameters = m_oldParams;
- abort = true;
- continue;
- }
- }
- }
- if(m_verbose)
- {
- std::cout << "MatrixIterativeOptimizer :: RESULT: ";
- for(int r = 0; r < static_cast<int>(m_numberOfParameters); r++)
- {
- std::cout<< m_parameters[r][0] << " ";
- }
- std::cout << " value: " << m_currentCostFunctionValue << std::endl;
- }
- if(m_loger)
- m_loger->logTrace("leaving MatrixIterativeOptimizer:optimize ... \n");
-
- return m_returnReason;
- }
|