123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129 |
- #ifdef NICE_USELIB_CPPUNIT
- #include <string>
- #include <exception>
- #include <map>
- #include "TestGradientDescent.h"
- #include "MyCostFunction.h"
- using namespace std;
- using namespace OPTIMIZATION;
- const bool verboseStartEnd = true;
- const bool verbose = true;
- //const bool verbose = false;
- CPPUNIT_TEST_SUITE_REGISTRATION( TestGradientDescent );
- void TestGradientDescent::setUp() {
- }
- void TestGradientDescent::tearDown() {
- }
- void TestGradientDescent::testGD_1Dim ()
- {
-
- if (verboseStartEnd)
- std::cerr << "================== TestGradientDescent::testGD_1Dim ===================== " << std::endl;
-
- int dim (1);
-
- CostFunction *func = new MyCostFunction(dim, verbose);
-
- //initial guess: 2.0
- OPTIMIZATION::matrix_type initialParams (dim, 1);
- initialParams.set(2.0);
- //we use a dimension scale of 1.0
- OPTIMIZATION::matrix_type scales (dim, 1);
- scales.set(1.0);
- //setup the optimization problem
- SimpleOptProblem optProblem ( func, initialParams, scales );
- optProblem.setMaximize(false);
- GradientDescentOptimizer optimizer;
- //we search with step-width of 1.0
- OPTIMIZATION::matrix_type searchSteps (dim, 1);
- searchSteps(0,0) = 1.0f;
- optimizer.setVerbose(true);
- optimizer.setStepSize( searchSteps );
- optimizer.setMaxNumIter(true, 1000);
- // optimizer.setFuncTol(true, 1e-8);
- optimizer.optimizeProb ( optProblem );
-
- OPTIMIZATION::matrix_type optimizedParams (optProblem.getAllCurrentParams());
-
- double goal(4.2);
-
- if (verbose)
- std::cerr << "1d optimization -- result " << optimizedParams(0,0) << " -- goal: " << goal << std::endl;
- CPPUNIT_ASSERT_DOUBLES_EQUAL( optimizedParams(0,0), goal, 1e-5 /* tolerance */);
- if (verboseStartEnd)
- std::cerr << "================== TestGradientDescent::testGD_1Dim done ===================== " << std::endl;
- }
- void TestGradientDescent::testGD_2Dim()
- {
-
- if (verboseStartEnd)
- std::cerr << "================== TestGradientDescent::testGD_2Dim ===================== " << std::endl;
-
- int dim (2);
-
- CostFunction *func = new MyCostFunction(dim, verbose);
-
- //initial guess: 2.0
- OPTIMIZATION::matrix_type initialParams (dim, 1);
- initialParams.set(2.0);
- //we use a dimension scale of 1.0
- OPTIMIZATION::matrix_type scales (dim, 1);
- scales.set(1.0);
- //setup the optimization problem
- SimpleOptProblem optProblem ( func, initialParams, scales );
- optProblem.setMaximize(false);
- GradientDescentOptimizer optimizer;
- //we search with step-width of 1.0
- OPTIMIZATION::matrix_type searchSteps (dim, 1);
- searchSteps(0,0) = 1.0f;
- searchSteps(1,0) = 1.0f;
- optimizer.setVerbose(true);
- optimizer.setStepSize( searchSteps );
- optimizer.setMaxNumIter(true, 1000);
- //optimizer.setMinimalGradientMagnitude(1e-25);
- // optimizer.setFuncTol(true, 1e-8);
- optimizer.optimizeProb ( optProblem );
-
- OPTIMIZATION::matrix_type optimizedParams (optProblem.getAllCurrentParams());
- double goalFirstDim(4.7);
- double goalSecondDim(1.1);
-
- if (verbose)
- {
- std::cerr << "2d optimization 1st dim-- result " << optimizedParams(0,0) << " -- goal: " << goalFirstDim << std::endl;
- std::cerr << "2d optimization 1st dim-- result " << optimizedParams(1,0) << " -- goal: " << goalSecondDim << std::endl;
- }
- CPPUNIT_ASSERT_DOUBLES_EQUAL( optimizedParams(0,0), goalFirstDim, 1e-5 /* tolerance */);
- CPPUNIT_ASSERT_DOUBLES_EQUAL( optimizedParams(1,0), goalSecondDim, 1e-5 /* tolerance */);
-
-
- if (verboseStartEnd)
- std::cerr << "================== TestGradientDescent::testGD_2Dim done ===================== " << std::endl;
- }
- #endif
|