GPHIKRegression.cpp 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646
  1. /**
  2. * @file GPHIKRegression.cpp
  3. * @brief Main interface for our GP HIK regression implementation (Implementation)
  4. * @author Alexander Freytag
  5. * @date 15-01-2014 (dd-mm-yyyy)
  6. */
  7. // STL includes
  8. #include <iostream>
  9. // NICE-core includes
  10. #include <core/basics/numerictools.h>
  11. #include <core/basics/Timer.h>
  12. // gp-hik-core includes
  13. #include "GPHIKRegression.h"
  14. #include "gp-hik-core/parameterizedFunctions/PFAbsExp.h"
  15. #include "gp-hik-core/parameterizedFunctions/PFExp.h"
  16. #include "gp-hik-core/parameterizedFunctions/PFMKL.h"
  17. using namespace std;
  18. using namespace NICE;
  19. /////////////////////////////////////////////////////
  20. /////////////////////////////////////////////////////
  21. // PROTECTED METHODS
  22. /////////////////////////////////////////////////////
  23. /////////////////////////////////////////////////////
  24. void GPHIKRegression::init(const Config *conf, const string & s_confSection)
  25. {
  26. //copy the given config to have it accessible lateron
  27. if ( this->confCopy != conf )
  28. {
  29. if ( this->confCopy != NULL )
  30. delete this->confCopy;
  31. this->confCopy = new Config ( *conf );
  32. //we do not want to read until end of file for restoring
  33. this->confCopy->setIoUntilEndOfFile(false);
  34. }
  35. double parameterUpperBound = confCopy->gD(confSection, "parameter_upper_bound", 5.0 );
  36. double parameterLowerBound = confCopy->gD(confSection, "parameter_lower_bound", 1.0 );
  37. this->noise = confCopy->gD(confSection, "noise", 0.01);
  38. string transform = confCopy->gS(confSection, "transform", "absexp" );
  39. if (pf == NULL)
  40. {
  41. if ( transform == "absexp" )
  42. {
  43. this->pf = new PFAbsExp( 1.0, parameterLowerBound, parameterUpperBound );
  44. } else if ( transform == "exp" ) {
  45. this->pf = new PFExp( 1.0, parameterLowerBound, parameterUpperBound );
  46. }else if ( transform == "MKL" ) {
  47. //TODO generic, please :) load from a separate file or something like this!
  48. std::set<int> steps; steps.insert(4000); steps.insert(6000); //specific for VISAPP
  49. this->pf = new PFMKL( steps, parameterLowerBound, parameterUpperBound );
  50. } else {
  51. fthrow(Exception, "Transformation type is unknown " << transform);
  52. }
  53. }
  54. else
  55. {
  56. //we already know the pf from the restore-function
  57. }
  58. this->confSection = confSection;
  59. this->verbose = confCopy->gB(confSection, "verbose", false);
  60. this->debug = confCopy->gB(confSection, "debug", false);
  61. this->uncertaintyPredictionForRegression = confCopy->gB( confSection, "uncertaintyPredictionForRegression", false );
  62. //how do we approximate the predictive variance for regression uncertainty?
  63. string s_varianceApproximation = confCopy->gS(confSection, "varianceApproximation", "approximate_fine"); //default: fine approximative uncertainty prediction
  64. if ( (s_varianceApproximation.compare("approximate_rough") == 0) || ((s_varianceApproximation.compare("1") == 0)) )
  65. {
  66. this->varianceApproximation = APPROXIMATE_ROUGH;
  67. //no additional eigenvalue is needed here at all.
  68. this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0 );
  69. }
  70. else if ( (s_varianceApproximation.compare("approximate_fine") == 0) || ((s_varianceApproximation.compare("2") == 0)) )
  71. {
  72. this->varianceApproximation = APPROXIMATE_FINE;
  73. //security check - compute at least one eigenvalue for this approximation strategy
  74. this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", std::max( confCopy->gI(confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 ), 1) );
  75. }
  76. else if ( (s_varianceApproximation.compare("exact") == 0) || ((s_varianceApproximation.compare("3") == 0)) )
  77. {
  78. this->varianceApproximation = EXACT;
  79. //no additional eigenvalue is needed here at all.
  80. this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );
  81. }
  82. else
  83. {
  84. this->varianceApproximation = NONE;
  85. //no additional eigenvalue is needed here at all.
  86. this->confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 1 );
  87. }
  88. if ( this->verbose )
  89. std::cerr << "varianceApproximationStrategy: " << s_varianceApproximation << std::endl;
  90. }
  91. /////////////////////////////////////////////////////
  92. /////////////////////////////////////////////////////
  93. // PUBLIC METHODS
  94. /////////////////////////////////////////////////////
  95. /////////////////////////////////////////////////////
  96. GPHIKRegression::GPHIKRegression( const Config *conf, const string & s_confSection )
  97. {
  98. //default settings, may be overwritten lateron
  99. gphyper = NULL;
  100. pf = NULL;
  101. confCopy = NULL;
  102. //just a default value
  103. uncertaintyPredictionForRegression = false;
  104. this->confSection = s_confSection;
  105. // if no config file was given, we either restore the classifier from an external file, or run ::init with
  106. // an emtpy config (using default values thereby) when calling the train-method
  107. if ( conf != NULL )
  108. {
  109. this->init(conf, confSection);
  110. }
  111. }
  112. GPHIKRegression::~GPHIKRegression()
  113. {
  114. if ( gphyper != NULL )
  115. delete gphyper;
  116. if (pf != NULL)
  117. delete pf;
  118. if ( confCopy != NULL )
  119. delete confCopy;
  120. }
  121. ///////////////////// ///////////////////// /////////////////////
  122. // GET / SET
  123. ///////////////////// ///////////////////// /////////////////////
  124. ///////////////////// ///////////////////// /////////////////////
  125. // REGRESSION STUFF
  126. ///////////////////// ///////////////////// /////////////////////
  127. void GPHIKRegression::estimate ( const SparseVector * example, double & result ) const
  128. {
  129. double tmpUncertainty;
  130. this->estimate( example, result, tmpUncertainty );
  131. }
  132. void GPHIKRegression::estimate ( const NICE::Vector * example, double & result ) const
  133. {
  134. double tmpUncertainty;
  135. this->estimate( example, result, tmpUncertainty );
  136. }
  137. void GPHIKRegression::estimate ( const SparseVector * example, double & result, double & uncertainty ) const
  138. {
  139. if (gphyper == NULL)
  140. fthrow(Exception, "Regression object not trained yet -- aborting!" );
  141. NICE::SparseVector scores;
  142. scores.clear();
  143. gphyper->classify ( *example, scores );
  144. if ( scores.size() == 0 ) {
  145. fthrow(Exception, "Zero scores, something is likely to be wrong here: svec.size() = " << example->size() );
  146. }
  147. // the internal gphyper object returns for regression a sparse vector with a single entry only
  148. result = scores.begin()->second;
  149. if (uncertaintyPredictionForRegression)
  150. {
  151. if (varianceApproximation != NONE)
  152. {
  153. this->predictUncertainty( example, uncertainty );
  154. }
  155. else
  156. {
  157. //do nothing
  158. uncertainty = std::numeric_limits<double>::max();
  159. }
  160. }
  161. else
  162. {
  163. //do nothing
  164. uncertainty = std::numeric_limits<double>::max();
  165. }
  166. }
  167. void GPHIKRegression::estimate ( const NICE::Vector * example, double & result, double & uncertainty ) const
  168. {
  169. if (gphyper == NULL)
  170. fthrow(Exception, "Regression object not trained yet -- aborting!" );
  171. NICE::SparseVector scores;
  172. scores.clear();
  173. gphyper->classify ( *example, scores );
  174. if ( scores.size() == 0 ) {
  175. fthrow(Exception, "Zero scores, something is likely to be wrong here: svec.size() = " << example->size() );
  176. }
  177. // the internal gphyper object returns for regression a sparse vector with a single entry only
  178. result = scores.begin()->second;
  179. if (uncertaintyPredictionForRegression)
  180. {
  181. if (varianceApproximation != NONE)
  182. {
  183. this->predictUncertainty( example, uncertainty );
  184. }
  185. else
  186. {
  187. //do nothing
  188. uncertainty = std::numeric_limits<double>::max();
  189. }
  190. }
  191. else
  192. {
  193. //do nothing
  194. uncertainty = std::numeric_limits<double>::max();
  195. }
  196. }
  197. /** training process */
  198. void GPHIKRegression::train ( const std::vector< const NICE::SparseVector *> & examples, const NICE::Vector & labels )
  199. {
  200. // security-check: examples and labels have to be of same size
  201. if ( examples.size() != labels.size() )
  202. {
  203. fthrow(Exception, "Given examples do not match label vector in size -- aborting!" );
  204. }
  205. if (verbose)
  206. {
  207. std::cerr << "GPHIKRegression::train" << std::endl;
  208. }
  209. if ( this->confCopy == NULL )
  210. {
  211. std::cerr << "WARNING -- No config used so far, initialize values with empty config file now..." << std::endl;
  212. NICE::Config tmpConfEmpty ;
  213. this->init ( &tmpConfEmpty, this->confSection );
  214. }
  215. Timer t;
  216. t.start();
  217. FastMinKernel *fmk = new FastMinKernel ( examples, noise, this->debug );
  218. t.stop();
  219. if (verbose)
  220. std::cerr << "Time used for setting up the fmk object: " << t.getLast() << std::endl;
  221. if (gphyper != NULL)
  222. delete gphyper;
  223. if ( ( varianceApproximation != APPROXIMATE_FINE) )
  224. confCopy->sI ( confSection, "nrOfEigenvaluesToConsiderForVarApprox", 0);
  225. // add flag for gphyper that only regression is performed
  226. // thereby, all the binary-label-stuff should be skipped :)
  227. confCopy->sB ( confSection, "b_performRegression", true );
  228. gphyper = new FMKGPHyperparameterOptimization ( confCopy, pf, fmk, confSection );
  229. if (verbose)
  230. cerr << "Learning ..." << endl;
  231. // go go go
  232. gphyper->optimize ( labels );
  233. if (verbose)
  234. std::cerr << "optimization done" << std::endl;
  235. if ( ( varianceApproximation != NONE ) )
  236. {
  237. switch (varianceApproximation)
  238. {
  239. case APPROXIMATE_ROUGH:
  240. {
  241. gphyper->prepareVarianceApproximationRough();
  242. break;
  243. }
  244. case APPROXIMATE_FINE:
  245. {
  246. gphyper->prepareVarianceApproximationFine();
  247. break;
  248. }
  249. case EXACT:
  250. {
  251. //nothing to prepare
  252. break;
  253. }
  254. default:
  255. {
  256. //nothing to prepare
  257. }
  258. }
  259. }
  260. // clean up all examples ??
  261. if (verbose)
  262. std::cerr << "Learning finished" << std::endl;
  263. }
  264. GPHIKRegression *GPHIKRegression::clone () const
  265. {
  266. fthrow(Exception, "GPHIKRegression: clone() not yet implemented" );
  267. return NULL;
  268. }
  269. void GPHIKRegression::predictUncertainty( const NICE::SparseVector * example, double & uncertainty ) const
  270. {
  271. if (gphyper == NULL)
  272. fthrow(Exception, "Regression object not trained yet -- aborting!" );
  273. switch (varianceApproximation)
  274. {
  275. case APPROXIMATE_ROUGH:
  276. {
  277. gphyper->computePredictiveVarianceApproximateRough( *example, uncertainty );
  278. break;
  279. }
  280. case APPROXIMATE_FINE:
  281. {
  282. gphyper->computePredictiveVarianceApproximateFine( *example, uncertainty );
  283. break;
  284. }
  285. case EXACT:
  286. {
  287. gphyper->computePredictiveVarianceExact( *example, uncertainty );
  288. break;
  289. }
  290. default:
  291. {
  292. fthrow(Exception, "GPHIKRegression - your settings disabled the variance approximation needed for uncertainty prediction.");
  293. }
  294. }
  295. }
  296. void GPHIKRegression::predictUncertainty( const NICE::Vector * example, double & uncertainty ) const
  297. {
  298. if (gphyper == NULL)
  299. fthrow(Exception, "Regression object not trained yet -- aborting!" );
  300. switch (varianceApproximation)
  301. {
  302. case APPROXIMATE_ROUGH:
  303. {
  304. gphyper->computePredictiveVarianceApproximateRough( *example, uncertainty );
  305. break;
  306. }
  307. case APPROXIMATE_FINE:
  308. {
  309. gphyper->computePredictiveVarianceApproximateFine( *example, uncertainty );
  310. break;
  311. }
  312. case EXACT:
  313. {
  314. gphyper->computePredictiveVarianceExact( *example, uncertainty );
  315. break;
  316. }
  317. default:
  318. {
  319. fthrow(Exception, "GPHIKRegression - your settings disabled the variance approximation needed for uncertainty prediction.");
  320. }
  321. }
  322. }
  323. ///////////////////// INTERFACE PERSISTENT /////////////////////
  324. // interface specific methods for store and restore
  325. ///////////////////// INTERFACE PERSISTENT /////////////////////
  326. void GPHIKRegression::restore ( std::istream & is, int format )
  327. {
  328. //delete everything we knew so far...
  329. this->clear();
  330. bool b_restoreVerbose ( false );
  331. #ifdef B_RESTOREVERBOSE
  332. b_restoreVerbose = true;
  333. #endif
  334. if ( is.good() )
  335. {
  336. if ( b_restoreVerbose )
  337. std::cerr << " restore GPHIKRegression" << std::endl;
  338. std::string tmp;
  339. is >> tmp; //class name
  340. if ( ! this->isStartTag( tmp, "GPHIKRegression" ) )
  341. {
  342. std::cerr << " WARNING - attempt to restore GPHIKRegression, but start flag " << tmp << " does not match! Aborting... " << std::endl;
  343. throw;
  344. }
  345. if (pf != NULL)
  346. {
  347. delete pf;
  348. pf = NULL;
  349. }
  350. if ( confCopy != NULL )
  351. {
  352. delete confCopy;
  353. confCopy = NULL;
  354. }
  355. if (gphyper != NULL)
  356. {
  357. delete gphyper;
  358. gphyper = NULL;
  359. }
  360. is.precision (numeric_limits<double>::digits10 + 1);
  361. bool b_endOfBlock ( false ) ;
  362. while ( !b_endOfBlock )
  363. {
  364. is >> tmp; // start of block
  365. if ( this->isEndTag( tmp, "GPHIKRegression" ) )
  366. {
  367. b_endOfBlock = true;
  368. continue;
  369. }
  370. tmp = this->removeStartTag ( tmp );
  371. if ( b_restoreVerbose )
  372. std::cerr << " currently restore section " << tmp << " in GPHIKRegression" << std::endl;
  373. if ( tmp.compare("confSection") == 0 )
  374. {
  375. is >> confSection;
  376. is >> tmp; // end of block
  377. tmp = this->removeEndTag ( tmp );
  378. }
  379. else if ( tmp.compare("pf") == 0 )
  380. {
  381. is >> tmp; // start of block
  382. if ( this->isEndTag( tmp, "pf" ) )
  383. {
  384. std::cerr << " ParameterizedFunction object can not be restored. Aborting..." << std::endl;
  385. throw;
  386. }
  387. std::string transform = this->removeStartTag ( tmp );
  388. if ( transform == "PFAbsExp" )
  389. {
  390. this->pf = new PFAbsExp ();
  391. } else if ( transform == "PFExp" ) {
  392. this->pf = new PFExp ();
  393. } else {
  394. fthrow(Exception, "Transformation type is unknown " << transform);
  395. }
  396. pf->restore(is, format);
  397. is >> tmp; // end of block
  398. tmp = this->removeEndTag ( tmp );
  399. }
  400. else if ( tmp.compare("ConfigCopy") == 0 )
  401. {
  402. // possibly obsolete safety checks
  403. if ( confCopy == NULL )
  404. confCopy = new Config;
  405. confCopy->clear();
  406. //we do not want to read until the end of the file
  407. confCopy->setIoUntilEndOfFile( false );
  408. //load every options we determined explicitely
  409. confCopy->restore(is, format);
  410. is >> tmp; // end of block
  411. tmp = this->removeEndTag ( tmp );
  412. }
  413. else if ( tmp.compare("gphyper") == 0 )
  414. {
  415. if ( gphyper == NULL )
  416. gphyper = new NICE::FMKGPHyperparameterOptimization();
  417. //then, load everything that we stored explicitely,
  418. // including precomputed matrices, LUTs, eigenvalues, ... and all that stuff
  419. gphyper->restore(is, format);
  420. is >> tmp; // end of block
  421. tmp = this->removeEndTag ( tmp );
  422. }
  423. else
  424. {
  425. std::cerr << "WARNING -- unexpected GPHIKRegression object -- " << tmp << " -- for restoration... aborting" << std::endl;
  426. throw;
  427. }
  428. }
  429. //load every settings as well as default options
  430. std::cerr << "run this->init" << std::endl;
  431. this->init(confCopy, confSection);
  432. std::cerr << "run gphyper->initialize" << std::endl;
  433. gphyper->initialize ( confCopy, pf, NULL, confSection );
  434. }
  435. else
  436. {
  437. std::cerr << "GPHIKRegression::restore -- InStream not initialized - restoring not possible!" << std::endl;
  438. throw;
  439. }
  440. }
  441. void GPHIKRegression::store ( std::ostream & os, int format ) const
  442. {
  443. if (gphyper == NULL)
  444. fthrow(Exception, "Regression object not trained yet -- aborting!" );
  445. if (os.good())
  446. {
  447. // show starting point
  448. os << this->createStartTag( "GPHIKRegression" ) << std::endl;
  449. os.precision (numeric_limits<double>::digits10 + 1);
  450. os << this->createStartTag( "confSection" ) << std::endl;
  451. os << confSection << std::endl;
  452. os << this->createEndTag( "confSection" ) << std::endl;
  453. os << this->createStartTag( "pf" ) << std::endl;
  454. pf->store(os, format);
  455. os << this->createEndTag( "pf" ) << std::endl;
  456. os << this->createStartTag( "ConfigCopy" ) << std::endl;
  457. //we do not want to read until end of file for restoring
  458. confCopy->setIoUntilEndOfFile(false);
  459. confCopy->store(os,format);
  460. os << this->createEndTag( "ConfigCopy" ) << std::endl;
  461. os << this->createStartTag( "gphyper" ) << std::endl;
  462. //store the underlying data
  463. //will be done in gphyper->store(of,format)
  464. //store the optimized parameter values and all that stuff
  465. gphyper->store(os, format);
  466. os << this->createEndTag( "gphyper" ) << std::endl;
  467. // done
  468. os << this->createEndTag( "GPHIKRegression" ) << std::endl;
  469. }
  470. else
  471. {
  472. std::cerr << "OutStream not initialized - storing not possible!" << std::endl;
  473. }
  474. }
  475. void GPHIKRegression::clear ()
  476. {
  477. if ( gphyper != NULL )
  478. {
  479. delete gphyper;
  480. gphyper = NULL;
  481. }
  482. if (pf != NULL)
  483. {
  484. delete pf;
  485. pf = NULL;
  486. }
  487. if ( confCopy != NULL )
  488. {
  489. delete confCopy;
  490. confCopy = NULL;
  491. }
  492. }
  493. ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////
  494. // interface specific methods for incremental extensions
  495. ///////////////////// INTERFACE ONLINE LEARNABLE /////////////////////
  496. void GPHIKRegression::addExample( const NICE::SparseVector * example,
  497. const double & label,
  498. const bool & performOptimizationAfterIncrement
  499. )
  500. {
  501. if ( this->gphyper == NULL )
  502. {
  503. //call train method instead
  504. std::cerr << "Regression object not initially trained yet -- run initial training instead of incremental extension!" << std::endl;
  505. std::vector< const NICE::SparseVector *> examplesVec;
  506. examplesVec.push_back ( example );
  507. NICE::Vector labelsVec ( 1 , label );
  508. this->train ( examplesVec, labelsVec );
  509. }
  510. else
  511. {
  512. this->gphyper->addExample( example, label, performOptimizationAfterIncrement );
  513. }
  514. }
  515. void GPHIKRegression::addMultipleExamples( const std::vector< const NICE::SparseVector * > & newExamples,
  516. const NICE::Vector & newLabels,
  517. const bool & performOptimizationAfterIncrement
  518. )
  519. {
  520. //are new examples available? If not, nothing has to be done
  521. if ( newExamples.size() < 1)
  522. return;
  523. if ( this->gphyper == NULL )
  524. {
  525. //call train method instead
  526. std::cerr << "Regression object not initially trained yet -- run initial training instead of incremental extension!" << std::endl;
  527. this->train ( newExamples, newLabels );
  528. }
  529. else
  530. {
  531. this->gphyper->addMultipleExamples( newExamples, newLabels, performOptimizationAfterIncrement );
  532. }
  533. }