CodebookRandomForestMex.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458
  1. /**
  2. * @file GPHIKRegressionMex.cpp
  3. * @author Alexander Freytag
  4. * @date 17-01-2014 (dd-mm-yyyy)
  5. * @brief Matlab-Interface of our GPHIKRegression, allowing for training, regression, optimization, variance prediction, incremental learning, and storing/re-storing.
  6. */
  7. // STL includes
  8. #include <math.h>
  9. #include <matrix.h>
  10. #include <mex.h>
  11. // NICE-core includes
  12. #include <core/basics/Config.h>
  13. #include <core/basics/Timer.h>
  14. #include <core/vector/MatrixT.h>
  15. #include <core/vector/VectorT.h>
  16. // CodebookRandomForest stuff
  17. #include "vislearning/features/simplefeatures/CodebookRandomForest.h"
  18. #include "vislearning/features/fpfeatures/VectorFeature.h"
  19. // Interface for conversion between Matlab and C objects
  20. #include "gp-hik-core/matlab/classHandleMtoC.h"
  21. #include "gp-hik-core/matlab/ConverterMatlabToNICE.h"
  22. #include "gp-hik-core/matlab/ConverterNICEToMatlab.h"
  23. #include "HelperDataConversionMex.h"
  24. using namespace std; //C basics
  25. using namespace NICE; // nice-core
  26. NICE::Config parseParametersERC(const mxArray *prhs[], int nrhs)
  27. {
  28. NICE::Config conf;
  29. // if first argument is the filename of an existing config file,
  30. // read the config accordingly
  31. int i_start ( 0 );
  32. std::string variable = MatlabConversion::convertMatlabToString(prhs[i_start]);
  33. if(variable == "conf")
  34. {
  35. conf = NICE::Config ( MatlabConversion::convertMatlabToString( prhs[i_start+1] ) );
  36. i_start = i_start+2;
  37. }
  38. // now run over all given parameter specifications
  39. // and add them to the config
  40. for( int i=i_start; i < nrhs; i+=2 )
  41. {
  42. std::string variable = MatlabConversion::convertMatlabToString(prhs[i]);
  43. /////////////
  44. //CodebookRandomForest( int maxDepth
  45. // number_of_trees = conf->gI(section, "number_of_trees", 20 );
  46. // features_per_tree = conf->gD(section, "features_per_tree", 1.0 );
  47. // samples_per_tree = conf->gD(section, "samples_per_tree", 0.2 );
  48. // use_simple_balancing = conf->gB(section, "use_simple_balancing", false);
  49. // weight_examples = conf->gB(section, "weight_examples", false);
  50. // memory_efficient = conf->gB(section, "memory_efficient", false);
  51. //std::string builder_section = conf->gS(section, "builder_section", "DTBRandom");
  52. if( variable == "number_of_trees")
  53. {
  54. if ( mxIsInt32( prhs[i+1] ) )
  55. {
  56. int value = MatlabConversion::convertMatlabToInt32(prhs[i+1]);
  57. conf.sI("FPCRandomForests", variable, value);
  58. }
  59. else
  60. {
  61. std::string errorMsg = "Unexpected parameter value for \'" + variable + "\'. Int32 expected.";
  62. mexErrMsgIdAndTxt( "mexnice:error", errorMsg.c_str() );
  63. }
  64. }
  65. }
  66. return conf;
  67. }
  68. // MAIN MATLAB FUNCTION
  69. void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
  70. {
  71. // get the command string specifying what to do
  72. if (nrhs < 1)
  73. mexErrMsgTxt("No commands and options passed... Aborting!");
  74. if( !mxIsChar( prhs[0] ) )
  75. mexErrMsgTxt("First argument needs to be the command, ie.e, the class method to call... Aborting!");
  76. std::string cmd = MatlabConversion::convertMatlabToString( prhs[0] );
  77. // create object
  78. if ( !strcmp("new", cmd.c_str() ) )
  79. {
  80. // check output variable
  81. if (nlhs != 1)
  82. mexErrMsgTxt("New: One output expected.");
  83. // read config settings
  84. //NICE::Config conf = parseParametersGPHIKRegression(prhs+1,nrhs-1);
  85. int nMaxDepth = 10;
  86. // create class instance
  87. OBJREC::CodebookRandomForest *pRandomForest = new OBJREC::CodebookRandomForest(nMaxDepth);
  88. // handle to the C++ instance
  89. plhs[0] = MatlabConversion::convertPtr2Mat<OBJREC::CodebookRandomForest>( pRandomForest );
  90. return;
  91. }
  92. // in all other cases, there should be a second input,
  93. // which the be the class instance handle
  94. if (nrhs < 2)
  95. mexErrMsgTxt("Second input should be a class instance handle.");
  96. // delete object
  97. if ( !strcmp("delete", cmd.c_str() ) )
  98. {
  99. // Destroy the C++ object
  100. MatlabConversion::destroyObject<OBJREC::CodebookRandomForest>(prhs[1]);
  101. return;
  102. }
  103. // get the class instance pointer from the second input
  104. // every following function needs the object
  105. OBJREC::CodebookRandomForest *pCodebookClusterer = MatlabConversion::convertMat2Ptr<OBJREC::CodebookRandomForest>(prhs[1]);
  106. ////////////////////////////////////////
  107. // Check which class method to call //
  108. ////////////////////////////////////////
  109. // standard train - assumes initialized object
  110. if (!strcmp("train", cmd.c_str() ))
  111. {
  112. // Check parameters
  113. if (nlhs < 0 || nrhs < 4)
  114. {
  115. mexErrMsgTxt("Train: Unexpected arguments.");
  116. }
  117. //------------- read the data --------------
  118. if (nrhs != 4)
  119. {
  120. mexErrMsgTxt("needs 2 matrix inputs, first the training features, second the sample labels");
  121. return;
  122. }
  123. const mxArray *t_pArrTrainData = prhs[2];
  124. const mxArray *t_pArrTrainLabels = prhs[3];
  125. //std::vector< const NICE::SparseVector *> examplesTrain;
  126. int iNumFeatureDimension = mxGetM( t_pArrTrainData ); // feature dimensions
  127. OBJREC::Examples examplesTrain;
  128. bool bRet = MatlabConversion::convertDoubleRawPointersToExamples( t_pArrTrainData, t_pArrTrainLabels, examplesTrain);
  129. if( ~bRet )
  130. {
  131. mexErrMsgTxt("Train: Error creating Examples from raw feature matrix and labels.");
  132. }
  133. //----------------- train our random Forest -------------
  134. // read config settings
  135. OBJREC::FeaturePool fp;
  136. OBJREC::VectorFeature *pVecFeature = new OBJREC::VectorFeature(iNumFeatureDimension);
  137. pVecFeature->explode(fp);
  138. NICE::Config conf = parseParametersERC(prhs+1,nrhs-1);
  139. OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&conf,"FPCRandomForests");
  140. pRandForest->train(fp, examplesTrain);
  141. pCodebookClusterer->setClusterForest( pRandForest );
  142. //----------------- clean up -------------
  143. delete pVecFeature;
  144. pVecFeature = NULL;
  145. // delete all "exploded" features, they are internally cloned in the random trees anyway
  146. fp.destroy();
  147. //
  148. for(int i=0;i<examplesTrain.size(); i++)
  149. {
  150. if ( examplesTrain[i].second.vec != NULL )
  151. {
  152. delete examplesTrain[i].second.vec;
  153. examplesTrain[i].second.vec = NULL;
  154. }
  155. }
  156. return;
  157. }
  158. /*
  159. // perform regression
  160. if ( !strcmp("estimate", cmd.c_str() ) )
  161. {
  162. // Check parameters
  163. if ( (nlhs < 0) || (nrhs < 2) )
  164. {
  165. mexErrMsgTxt("Test: Unexpected arguments.");
  166. }
  167. //------------- read the data --------------
  168. double result;
  169. double uncertainty;
  170. if ( mxIsSparse( prhs[2] ) )
  171. {
  172. NICE::SparseVector * example;
  173. example = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
  174. regressor->estimate ( example, result, uncertainty );
  175. //----------------- clean up -------------
  176. delete example;
  177. }
  178. else
  179. {
  180. NICE::Vector * example;
  181. example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) );
  182. regressor->estimate ( example, result, uncertainty );
  183. //----------------- clean up -------------
  184. delete example;
  185. }
  186. // output
  187. plhs[0] = mxCreateDoubleScalar( result );
  188. if(nlhs >= 2)
  189. {
  190. plhs[1] = mxCreateDoubleScalar( uncertainty );
  191. }
  192. return;
  193. }
  194. // Uncertainty prediction
  195. if ( !strcmp("uncertainty", cmd.c_str() ) )
  196. {
  197. // Check parameters
  198. if ( (nlhs < 0) || (nrhs < 2) )
  199. {
  200. mexErrMsgTxt("Test: Unexpected arguments.");
  201. }
  202. double uncertainty;
  203. //------------- read the data --------------
  204. if ( mxIsSparse( prhs[2] ) )
  205. {
  206. NICE::SparseVector * example;
  207. example = new NICE::SparseVector ( converterMtoNICE.convertSparseVectorToNice( prhs[2] ) );
  208. regressor->predictUncertainty( example, uncertainty );
  209. //----------------- clean up -------------
  210. delete example;
  211. }
  212. else
  213. {
  214. NICE::Vector * example;
  215. example = new NICE::Vector ( converterMtoNICE.convertDoubleVectorToNice(prhs[2]) );
  216. regressor->predictUncertainty( example, uncertainty );
  217. //----------------- clean up -------------
  218. delete example;
  219. }
  220. // output
  221. plhs[0] = mxCreateDoubleScalar( uncertainty );
  222. return;
  223. }
  224. // Test - evaluate regressor on whole test set
  225. if ( !strcmp("testL2loss", cmd.c_str() ) )
  226. {
  227. // Check parameters
  228. if (nlhs < 0 || nrhs < 3)
  229. mexErrMsgTxt("Test: Unexpected arguments.");
  230. //------------- read the data --------------
  231. bool dataIsSparse ( mxIsSparse( prhs[2] ) );
  232. std::vector< const NICE::SparseVector *> dataTest_sparse;
  233. NICE::Matrix dataTest_dense;
  234. if ( dataIsSparse )
  235. {
  236. dataTest_sparse = converterMtoNICE.convertSparseMatrixToNice( prhs[2] );
  237. }
  238. else
  239. {
  240. dataTest_dense = converterMtoNICE.convertDoubleMatrixToNice(prhs[2]);
  241. }
  242. NICE::Vector yValuesTest;
  243. yValuesTest = converterMtoNICE.convertDoubleVectorToNice(prhs[3]);
  244. int i_numTestSamples ( yValuesTest.size() );
  245. double l2loss ( 0.0 );
  246. NICE::Vector scores;
  247. NICE::Vector::iterator itScores;
  248. if ( nlhs >= 2 )
  249. {
  250. scores.resize( i_numTestSamples );
  251. itScores = scores.begin();
  252. }
  253. // ------------------------------------------
  254. // ------------- REGRESSION --------------
  255. // ------------------------------------------
  256. NICE::Timer t;
  257. double testTime (0.0);
  258. for (int i = 0; i < i_numTestSamples; i++)
  259. {
  260. //----------------- convert data to sparse data structures ---------
  261. double result;
  262. if ( dataIsSparse )
  263. {
  264. // and perform regression
  265. t.start();
  266. regressor->estimate( dataTest_sparse[ i ], result);
  267. t.stop();
  268. testTime += t.getLast();
  269. }
  270. else
  271. {
  272. NICE::Vector example ( dataTest_dense.getRow(i) );
  273. // and perform regression
  274. t.start();
  275. regressor->estimate( &example, result );
  276. t.stop();
  277. testTime += t.getLast();
  278. }
  279. l2loss += pow ( yValuesTest[i] - result, 2);
  280. if ( nlhs >= 2 )
  281. {
  282. *itScores = result;
  283. itScores++;
  284. }
  285. }
  286. std::cerr << "Time for testing: " << testTime << std::endl;
  287. // clean up
  288. if ( dataIsSparse )
  289. {
  290. for ( std::vector<const NICE::SparseVector *>::iterator it = dataTest_sparse.begin(); it != dataTest_sparse.end(); it++)
  291. delete *it;
  292. }
  293. plhs[0] = mxCreateDoubleScalar( l2loss );
  294. if(nlhs >= 2)
  295. plhs[1] = converterNICEtoM.convertVectorFromNice(scores);
  296. return;
  297. }
  298. ///////////////////// INTERFACE PERSISTENT /////////////////////
  299. // interface specific methods for store and restore
  300. ///////////////////// INTERFACE PERSISTENT /////////////////////
  301. // store the regressor to an external file
  302. if ( !strcmp("store", cmd.c_str() ) || !strcmp("save", cmd.c_str() ) )
  303. {
  304. // Check parameters
  305. if ( nrhs < 3 )
  306. mexErrMsgTxt("store: no destination given.");
  307. std::string s_destination = converterMtoNICE.convertMatlabToString( prhs[2] );
  308. std::filebuf fb;
  309. fb.open ( s_destination.c_str(), ios::out );
  310. std::ostream os(&fb);
  311. //
  312. regressor->store( os );
  313. //
  314. fb.close();
  315. return;
  316. }
  317. // load regressor from external file
  318. if ( !strcmp("restore", cmd.c_str() ) || !strcmp("load", cmd.c_str() ) )
  319. {
  320. // Check parameters
  321. if ( nrhs < 3 )
  322. mexErrMsgTxt("restore: no destination given.");
  323. std::string s_destination = converterMtoNICE.convertMatlabToString( prhs[2] );
  324. std::cerr << " aim at restoring the regressor from " << s_destination << std::endl;
  325. std::filebuf fbIn;
  326. fbIn.open ( s_destination.c_str(), ios::in );
  327. std::istream is (&fbIn);
  328. //
  329. regressor->restore( is );
  330. //
  331. fbIn.close();
  332. return;
  333. }
  334. */
  335. // Got here, so command not recognized
  336. std::string errorMsg (cmd.c_str() );
  337. errorMsg += " -- command not recognized.";
  338. mexErrMsgTxt( errorMsg.c_str() );
  339. }