TestGPHIKRegression.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532
  1. /**
  2. * @file TestGPHIKRegression.cpp
  3. * @brief CppUnit-Testcase to verify that GPHIKRegression works as desired.
  4. * @author Alexander Freytag
  5. * @date 16-01-2014 (dd-mm-yyyy)
  6. */
  7. #ifdef NICE_USELIB_CPPUNIT
  8. // STL includes
  9. #include <iostream>
  10. #include <vector>
  11. // NICE-core includes
  12. #include <core/basics/Config.h>
  13. #include <core/basics/Timer.h>
  14. // gp-hik-core includes
  15. #include "gp-hik-core/GPHIKRegression.h"
  16. #include "TestGPHIKRegression.h"
  17. using namespace std; //C basics
  18. using namespace NICE; // nice-core
  19. const bool verboseStartEnd = true;
  20. const bool verbose = false;
  21. const bool writeRegressionObjectsForVerification = false;
  22. CPPUNIT_TEST_SUITE_REGISTRATION( TestGPHIKRegression );
  23. void TestGPHIKRegression::setUp() {
  24. }
  25. void TestGPHIKRegression::tearDown() {
  26. }
  27. void readData ( const std::string filename, NICE::Matrix & data, NICE::Vector & yValues )
  28. {
  29. std::ifstream ifs ( filename.c_str() , ios::in );
  30. if ( ifs.good() )
  31. {
  32. NICE::Vector tmp;
  33. ifs >> data;
  34. ifs >> tmp; //yBin;
  35. ifs >> yValues;
  36. ifs.close();
  37. }
  38. else
  39. {
  40. std::cerr << "Unable to read data from file " << filename << " -- aborting." << std::endl;
  41. CPPUNIT_ASSERT ( ifs.good() );
  42. }
  43. }
  44. void evaluateRegressionMethod ( double & regressionLoss,
  45. const NICE::GPHIKRegression * regressionMethod,
  46. const NICE::Matrix & data,
  47. const NICE::Vector & yValues
  48. )
  49. {
  50. regressionLoss = 0.0;
  51. int i_loopEnd ( (int)data.rows() );
  52. for (int i = 0; i < i_loopEnd ; i++)
  53. {
  54. NICE::Vector example ( data.getRow(i) );
  55. double result;
  56. // classify with previously trained regression method
  57. regressionMethod->estimate( &example, result );
  58. if ( verbose )
  59. std::cerr << "i: " << i << " gt: " << yValues[i] << " result: " << result << std::endl;
  60. //use L2-loss for evaluation
  61. regressionLoss += pow( yValues[i] - result, 2 );
  62. }
  63. }
  64. void TestGPHIKRegression::testRegressionHoldInData()
  65. {
  66. if (verboseStartEnd)
  67. std::cerr << "================== TestGPHIKRegression::testRegressionHoldInData ===================== " << std::endl;
  68. NICE::Config conf;
  69. conf.sB ( "GPHIKRegression", "eig_verbose", false);
  70. conf.sS ( "GPHIKRegression", "optimization_method", "downhillsimplex");
  71. // set pretty low built-in noise for hold-in regression estimation
  72. conf.sD ( "GPHIKRegression", "noise", 1e-6 );
  73. std::string s_trainData = conf.gS( "main", "trainData", "toyExampleSmallScaleTrain.data" );
  74. //------------- read the training data --------------
  75. NICE::Matrix dataTrain;
  76. NICE::Vector yValues;
  77. readData ( s_trainData, dataTrain, yValues );
  78. //----------------- convert data to sparse data structures ---------
  79. std::vector< const NICE::SparseVector *> examplesTrain;
  80. examplesTrain.resize( dataTrain.rows() );
  81. std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin();
  82. for (int i = 0; i < (int)dataTrain.rows(); i++, exTrainIt++)
  83. {
  84. *exTrainIt = new NICE::SparseVector( dataTrain.getRow(i) );
  85. }
  86. //create regressionMethod object
  87. NICE::GPHIKRegression * regressionMethod;
  88. regressionMethod = new NICE::GPHIKRegression ( &conf );
  89. regressionMethod->train ( examplesTrain , yValues );
  90. std::cerr << " yValues used for training regression object" << std::endl;
  91. std::cerr << yValues << std::endl;
  92. double holdInLoss ( 0.0 );
  93. // ------------------------------------------
  94. // ------------- REGRESSION --------------
  95. // ------------------------------------------
  96. evaluateRegressionMethod ( holdInLoss, regressionMethod, dataTrain, yValues );
  97. if ( verbose )
  98. {
  99. std::cerr << " holdInLoss: " << holdInLoss << std::endl;
  100. }
  101. CPPUNIT_ASSERT_DOUBLES_EQUAL( 0.0, holdInLoss, 1e-8);
  102. // don't waste memory
  103. delete regressionMethod;
  104. for (std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin(); exTrainIt != examplesTrain.end(); exTrainIt++)
  105. {
  106. delete *exTrainIt;
  107. }
  108. if (verboseStartEnd)
  109. std::cerr << "================== TestGPHIKRegression::testRegressionHoldInData done ===================== " << std::endl;
  110. }
  111. void TestGPHIKRegression::testRegressionHoldOutData()
  112. {
  113. if (verboseStartEnd)
  114. std::cerr << "================== TestGPHIKRegression::testRegressionHoldOutData ===================== " << std::endl;
  115. NICE::Config conf;
  116. conf.sB ( "GPHIKRegression", "eig_verbose", false);
  117. conf.sS ( "GPHIKRegression", "optimization_method", "downhillsimplex");
  118. // set higher built-in noise for hold-out regression estimation
  119. conf.sD ( "GPHIKRegression", "noise", 1e-4 );
  120. std::string s_trainData = conf.gS( "main", "trainData", "toyExampleSmallScaleTrain.data" );
  121. //------------- read the training data --------------
  122. NICE::Matrix dataTrain;
  123. NICE::Vector yValues;
  124. readData ( s_trainData, dataTrain, yValues );
  125. //----------------- convert data to sparse data structures ---------
  126. std::vector< const NICE::SparseVector *> examplesTrain;
  127. examplesTrain.resize( dataTrain.rows() );
  128. std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin();
  129. for (int i = 0; i < (int)dataTrain.rows(); i++, exTrainIt++)
  130. {
  131. *exTrainIt = new NICE::SparseVector( dataTrain.getRow(i) );
  132. }
  133. //create regressionMethod object
  134. NICE::GPHIKRegression * regressionMethod;
  135. regressionMethod = new NICE::GPHIKRegression ( &conf, "GPHIKRegression" );
  136. regressionMethod->train ( examplesTrain , yValues );
  137. //------------- read the test data --------------
  138. NICE::Matrix dataTest;
  139. NICE::Vector yValuesTest;
  140. std::string s_testData = conf.gS( "main", "testData", "toyExampleTest.data" );
  141. readData ( s_testData, dataTest, yValuesTest );
  142. double holdOutLoss ( 0.0 );
  143. // ------------------------------------------
  144. // ------------- REGRESSION --------------
  145. // ------------------------------------------
  146. evaluateRegressionMethod ( holdOutLoss, regressionMethod, dataTest, yValuesTest );
  147. // acceptable difference for every estimated y-value on average
  148. double diffOkay ( 0.4 );
  149. if ( verbose )
  150. {
  151. std::cerr << " holdOutLoss: " << holdOutLoss << " accepting: " << pow(diffOkay,2)*yValuesTest.size() << std::endl;
  152. }
  153. CPPUNIT_ASSERT( pow(diffOkay,2)*yValuesTest.size() - holdOutLoss > 0.0);
  154. // don't waste memory
  155. delete regressionMethod;
  156. for (std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin(); exTrainIt != examplesTrain.end(); exTrainIt++)
  157. {
  158. delete *exTrainIt;
  159. }
  160. if (verboseStartEnd)
  161. std::cerr << "================== TestGPHIKRegression::testRegressionHoldOutData done ===================== " << std::endl;
  162. }
  163. void TestGPHIKRegression::testRegressionOnlineLearnableAdd1Example()
  164. {
  165. if (verboseStartEnd)
  166. std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearnableAdd1Example ===================== " << std::endl;
  167. NICE::Config conf;
  168. conf.sB ( "GPHIKRegressionMethod", "eig_verbose", false);
  169. conf.sS ( "GPHIKRegressionMethod", "optimization_method", "downhillsimplex");//downhillsimplex greedy
  170. // set higher built-in noise for hold-out regression estimation
  171. conf.sD ( "GPHIKRegression", "noise", 1e-4 );
  172. std::string s_trainData = conf.gS( "main", "trainData", "toyExampleSmallScaleTrain.data" );
  173. //------------- read the training data --------------
  174. NICE::Matrix dataTrain;
  175. NICE::Vector yValuesTrain;
  176. readData ( s_trainData, dataTrain, yValuesTrain );
  177. //----------------- convert data to sparse data structures ---------
  178. std::vector< const NICE::SparseVector *> examplesTrain;
  179. examplesTrain.resize( dataTrain.rows()-1 );
  180. std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin();
  181. for (int i = 0; i < (int)dataTrain.rows()-1; i++, exTrainIt++)
  182. {
  183. *exTrainIt = new NICE::SparseVector( dataTrain.getRow(i) );
  184. }
  185. // TRAIN INITIAL CLASSIFIER FROM SCRATCH
  186. NICE::GPHIKRegression * regressionMethod;
  187. regressionMethod = new NICE::GPHIKRegression ( &conf, "GPHIKRegression" );
  188. //use all but the first example for training and add the first one lateron
  189. NICE::Vector yValuesRelevantTrain ( yValuesTrain.getRangeRef( 0, yValuesTrain.size()-2 ) );
  190. regressionMethod->train ( examplesTrain , yValuesRelevantTrain );
  191. // RUN INCREMENTAL LEARNING
  192. bool performOptimizationAfterIncrement ( true );
  193. NICE::SparseVector * exampleToAdd = new NICE::SparseVector ( dataTrain.getRow( (int)dataTrain.rows()-1 ) );
  194. regressionMethod->addExample ( exampleToAdd, yValuesTrain[ (int)dataTrain.rows()-2 ], performOptimizationAfterIncrement );
  195. if ( verbose )
  196. std::cerr << "label of example to add: " << yValuesTrain[ (int)dataTrain.rows()-1 ] << std::endl;
  197. // TRAIN SECOND REGRESSOR FROM SCRATCH USING THE SAME OVERALL AMOUNT OF EXAMPLES
  198. examplesTrain.push_back( exampleToAdd );
  199. NICE::GPHIKRegression * regressionMethodScratch = new NICE::GPHIKRegression ( &conf, "GPHIKRegression" );
  200. regressionMethodScratch->train ( examplesTrain, yValuesTrain );
  201. if ( verbose )
  202. std::cerr << "trained both regressionMethods - now start evaluating them" << std::endl;
  203. // TEST that both regressionMethods produce equal store-files
  204. if ( writeRegressionObjectsForVerification )
  205. {
  206. std::string s_destination_save_IL ( "myRegressionMethodIL.txt" );
  207. std::filebuf fbOut;
  208. fbOut.open ( s_destination_save_IL.c_str(), ios::out );
  209. std::ostream os (&fbOut);
  210. //
  211. regressionMethod->store( os );
  212. //
  213. fbOut.close();
  214. std::string s_destination_save_scratch ( "myRegressionMethodScratch.txt" );
  215. std::filebuf fbOutScratch;
  216. fbOutScratch.open ( s_destination_save_scratch.c_str(), ios::out );
  217. std::ostream osScratch (&fbOutScratch);
  218. //
  219. regressionMethodScratch->store( osScratch );
  220. //
  221. fbOutScratch.close();
  222. }
  223. // TEST both regressionMethods to produce equal results
  224. //------------- read the test data --------------
  225. NICE::Matrix dataTest;
  226. NICE::Vector yValuesTest;
  227. std::string s_testData = conf.gS( "main", "testData", "toyExampleTest.data" );
  228. readData ( s_testData, dataTest, yValuesTest );
  229. // ------------------------------------------
  230. // ------------- REGRESSION --------------
  231. // ------------------------------------------
  232. double holdOutLossIL ( 0.0 );
  233. double holdOutLossScratch ( 0.0 );
  234. evaluateRegressionMethod ( holdOutLossIL, regressionMethod, dataTest, yValuesTest );
  235. evaluateRegressionMethod ( holdOutLossScratch, regressionMethodScratch, dataTest, yValuesTest );
  236. if ( verbose )
  237. {
  238. std::cerr << "holdOutLossIL: " << holdOutLossIL << std::endl;
  239. std::cerr << "holdOutLossScratch: " << holdOutLossScratch << std::endl;
  240. }
  241. CPPUNIT_ASSERT_DOUBLES_EQUAL( holdOutLossIL, holdOutLossScratch, 1e-4);
  242. // don't waste memory
  243. delete regressionMethod;
  244. delete regressionMethodScratch;
  245. for (std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin(); exTrainIt != examplesTrain.end(); exTrainIt++)
  246. {
  247. delete *exTrainIt;
  248. }
  249. if (verboseStartEnd)
  250. std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearnableAdd1Example done ===================== " << std::endl;
  251. }
  252. void TestGPHIKRegression::testRegressionOnlineLearnableAddMultipleExamples()
  253. {
  254. if (verboseStartEnd)
  255. std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearnableAddMultipleExamples ===================== " << std::endl;
  256. NICE::Config conf;
  257. conf.sB ( "GPHIKRegressionMethod", "eig_verbose", false);
  258. conf.sS ( "GPHIKRegressionMethod", "optimization_method", "downhillsimplex");//downhillsimplex greedy
  259. // set higher built-in noise for hold-out regression estimation
  260. conf.sD ( "GPHIKRegression", "noise", 1e-4 );
  261. std::string s_trainData = conf.gS( "main", "trainData", "toyExampleSmallScaleTrain.data" );
  262. //------------- read the training data --------------
  263. NICE::Matrix dataTrain;
  264. NICE::Vector yValuesTrain;
  265. readData ( s_trainData, dataTrain, yValuesTrain );
  266. //----------------- convert data to sparse data structures ---------
  267. std::vector< const NICE::SparseVector *> examplesTrain;
  268. std::vector< const NICE::SparseVector *> examplesTrainPlus;
  269. std::vector< const NICE::SparseVector *> examplesTrainMinus;
  270. examplesTrain.resize( dataTrain.rows() );
  271. NICE::Vector yValuesPlus( dataTrain.rows() );
  272. NICE::Vector yValuesMinus( dataTrain.rows() );
  273. std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin();
  274. int cntPlus ( 0 );
  275. int cntMinus ( 0 );
  276. // note: we also slightly shuffle the order of how examples are added compared to the scratch-classifier...
  277. // this should not result in any difference of behaviour...
  278. for (int i = 0; i < (int)dataTrain.rows(); i++, exTrainIt++)
  279. {
  280. *exTrainIt = new NICE::SparseVector( dataTrain.getRow(i) );
  281. if ( ( yValuesTrain[i] == 1 ) || ( yValuesTrain[i] == 2 ) )
  282. {
  283. examplesTrainPlus.push_back ( *exTrainIt );
  284. yValuesPlus[cntPlus] = yValuesTrain[i];
  285. cntPlus++;
  286. }
  287. else
  288. {
  289. examplesTrainMinus.push_back ( *exTrainIt );
  290. yValuesMinus[cntMinus] = yValuesTrain[i];
  291. cntMinus++;
  292. }
  293. }
  294. yValuesPlus.resize ( examplesTrainPlus.size() ) ;
  295. yValuesMinus.resize( examplesTrainMinus.size() );
  296. // TRAIN INITIAL CLASSIFIER FROM SCRATCH
  297. NICE::GPHIKRegression * regressionMethod;
  298. regressionMethod = new NICE::GPHIKRegression ( &conf, "GPHIKRegression" );
  299. regressionMethod->train ( examplesTrainPlus , yValuesPlus );
  300. if ( verbose )
  301. {
  302. std::cerr << "Initial values: " << yValuesPlus << std::endl;
  303. std::cerr << "Values to add: " << yValuesMinus << std::endl;
  304. }
  305. // RUN INCREMENTAL LEARNING
  306. bool performOptimizationAfterIncrement ( true );
  307. regressionMethod->addMultipleExamples ( examplesTrainMinus, yValuesMinus, performOptimizationAfterIncrement );
  308. // TRAIN SECOND REGRESSOR FROM SCRATCH USING THE SAME OVERALL AMOUNT OF EXAMPLES
  309. NICE::GPHIKRegression * regressionMethodScratch = new NICE::GPHIKRegression ( &conf, "GPHIKRegression" );
  310. regressionMethodScratch->train ( examplesTrain, yValuesTrain );
  311. if ( verbose )
  312. std::cerr << "trained both regressionMethods - now start evaluating them" << std::endl;
  313. // TEST that both regressionMethods produce equal store-files
  314. if ( writeRegressionObjectsForVerification )
  315. {
  316. std::string s_destination_save_IL ( "myRegressionMethodIL.txt" );
  317. std::filebuf fbOut;
  318. fbOut.open ( s_destination_save_IL.c_str(), ios::out );
  319. std::ostream os (&fbOut);
  320. //
  321. regressionMethod->store( os );
  322. //
  323. fbOut.close();
  324. std::string s_destination_save_scratch ( "myRegressionMethodScratch.txt" );
  325. std::filebuf fbOutScratch;
  326. fbOutScratch.open ( s_destination_save_scratch.c_str(), ios::out );
  327. std::ostream osScratch (&fbOutScratch);
  328. //
  329. regressionMethodScratch->store( osScratch );
  330. //
  331. fbOutScratch.close();
  332. }
  333. // TEST both regressionMethods to produce equal results
  334. //------------- read the test data --------------
  335. NICE::Matrix dataTest;
  336. NICE::Vector yValuesTest;
  337. std::string s_testData = conf.gS( "main", "testData", "toyExampleTest.data" );
  338. readData ( s_testData, dataTest, yValuesTest );
  339. // ------------------------------------------
  340. // ------------- REGRESSION --------------
  341. // ------------------------------------------
  342. double holdOutLossIL ( 0.0 );
  343. double holdOutLossScratch ( 0.0 );
  344. evaluateRegressionMethod ( holdOutLossIL, regressionMethod, dataTest, yValuesTest );
  345. evaluateRegressionMethod ( holdOutLossScratch, regressionMethodScratch, dataTest, yValuesTest );
  346. if ( verbose )
  347. {
  348. std::cerr << "holdOutLossIL: " << holdOutLossIL << std::endl;
  349. std::cerr << "holdOutLossScratch: " << holdOutLossScratch << std::endl;
  350. }
  351. CPPUNIT_ASSERT_DOUBLES_EQUAL( holdOutLossIL, holdOutLossScratch, 1e-4);
  352. // don't waste memory
  353. delete regressionMethod;
  354. delete regressionMethodScratch;
  355. for (std::vector< const NICE::SparseVector *>::iterator exTrainIt = examplesTrain.begin(); exTrainIt != examplesTrain.end(); exTrainIt++)
  356. {
  357. delete *exTrainIt;
  358. }
  359. if (verboseStartEnd)
  360. std::cerr << "================== TestGPHIKRegression::testRegressionOnlineLearnableAddMultipleExamples done ===================== " << std::endl;
  361. }
  362. #endif