progCodebookRandomForest.cpp 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402
  1. /**
  2. * @brief Extremely randomized clustering forest program for Matlab input data.
  3. *
  4. * @author Johannes Ruehle
  5. * @date 10/05/2014
  6. */
  7. #include <string>
  8. #include <exception>
  9. #include <iostream>
  10. #include <fstream>
  11. //----------
  12. #include "vislearning/features/simplefeatures/CodebookRandomForest.h"
  13. #include "vislearning/features/fpfeatures/VectorFeature.h"
  14. #include "vislearning/cbaselib/FeaturePool.h"
  15. #ifdef NICE_USELIB_MATIO
  16. #include <core/matlabAccess/MatFileIO.h>
  17. const bool verbose = false;
  18. const bool verboseStartEnd = true;
  19. using namespace OBJREC;
  20. using namespace NICE;
  21. using namespace std;
  22. #undef DEBUG_VERBOSE
  23. struct structCommands
  24. {
  25. QString sFunction;
  26. QString sFileTrainData;
  27. QString sFileTrainDataLabels;
  28. QString sConfigFile;
  29. QString sFileStoreClassifier; // txt file storing the config of the trained codebook rdf
  30. QString sFileStoreResult; // matlab mat file storing the generated histogram
  31. };
  32. bool loadMatlabMatrix(const std::string &sFilename, const std::string &matrix_name, NICE::Matrix &p_Matrix)
  33. {
  34. NICE::MatFileIO matlab_file(sFilename, MAT_ACC_RDONLY);
  35. #ifdef DEBUG_VERBOSE
  36. // Show the number of variables in the file
  37. int vars_in_file = matlab_file.getNumberOfVariables();
  38. std::cout << vars_in_file << " Variables in " << sFilename << "\n";
  39. // Load the matrix
  40. std::cout << "Loading matrix \"" << matrix_name << "\"...\n";
  41. #endif
  42. // Check if the variable is a matrix
  43. matvar_t* matrix_variable = matlab_file.getVariableViaName(matrix_name);
  44. if(matrix_variable == NULL)
  45. {
  46. std::cout << "variable is not found in mat file.\n";
  47. return false;
  48. }
  49. if(matrix_variable->rank != 2) {
  50. std::cout << "Variable is not a matrix. Rank: " << matrix_variable->rank << ".\n";
  51. return false;
  52. }
  53. // Read the dimensions
  54. int cols = matrix_variable->dims[1];
  55. int rows = matrix_variable->dims[0];
  56. std::cout << "Dimensions: " << cols << " x " << rows << "\n";
  57. // Read the matrix into a vector of vectors
  58. std::vector< std::vector<double> > matrix_vecvec(rows, std::vector<double>(cols));
  59. matlab_file.getFeatureMatrixViaName(matrix_vecvec, matrix_name);
  60. // Now, we want a NICE matrix
  61. //NICE::MatrixT<double> matrix(rows, cols);
  62. p_Matrix.resize(rows, cols);
  63. for(int i = 0; i < rows; i++) {
  64. for(int j = 0; j < cols; j++) {
  65. p_Matrix(i,j) = matrix_vecvec[i][j];
  66. }
  67. }
  68. return true;
  69. }
  70. NICE::Matrix* loadMatlabVec(const std::string &sFilename, const std::string &matrix_name)
  71. {
  72. NICE::Matrix *pMatrix = NULL;
  73. NICE::MatFileIO *matFile = new NICE::MatFileIO(sFilename, MAT_ACC_RDONLY );
  74. matvar_t *t = matFile->getVariableViaName(matrix_name);
  75. if ( t->class_type == MAT_C_DOUBLE)
  76. {
  77. double *pD = (double*)( t->data );
  78. pMatrix = new NICE::Matrix(pD , (int)t->dims[0], (int)t->dims[1], Matrix::copy );
  79. }
  80. else
  81. {
  82. std::cerr << "raw format of matlab matrix not supported" << std::endl;
  83. }
  84. Mat_VarFree(t);
  85. delete matFile;
  86. return pMatrix;
  87. }
  88. bool saveMatlabVector(const std::string &sFilename, const NICE::Vector &p_Vector, int p_iFodID)
  89. {
  90. std::ofstream ofs;
  91. ofs.open (sFilename.c_str(), std::ofstream::out);
  92. if (!ofs.is_open())
  93. return false;
  94. ofs << p_iFodID << " #fodID" << std::endl;
  95. ofs << p_Vector.size() << std::endl;
  96. for(int i=0; i<p_Vector.size(); i++)
  97. ofs << p_Vector[i] << std::endl;
  98. ofs.close();
  99. return true;
  100. }
  101. bool storeClassifier(const structCommands &p_Command, const OBJREC::CodebookRandomForest *p_pCodebookRandomForest)
  102. {
  103. if( p_Command.sFileStoreClassifier.isEmpty() )
  104. return false;
  105. std::string t_sDestinationSave = p_Command.sFileStoreClassifier.toStdString();
  106. std::ofstream ofs;
  107. ofs.open (t_sDestinationSave.c_str(), std::ofstream::out);
  108. p_pCodebookRandomForest->store( ofs );
  109. ofs.close();
  110. return true;
  111. }
  112. bool restoreClassifier(const structCommands &p_Command, OBJREC::CodebookRandomForest *p_pCodebookRandomForest)
  113. {
  114. if( p_Command.sFileStoreClassifier.isEmpty() )
  115. return false;
  116. if (p_pCodebookRandomForest == NULL )
  117. return false;
  118. std::string t_sDestinationSave = p_Command.sFileStoreClassifier.toStdString();
  119. std::ifstream ifs2;
  120. ifs2.open (t_sDestinationSave.c_str() );
  121. p_pCodebookRandomForest->restore( ifs2 );
  122. ifs2.close();
  123. return true;
  124. }
  125. bool createAndTrain( const structCommands &p_Command)
  126. {
  127. if( p_Command.sConfigFile.isEmpty() )
  128. {
  129. std::cout << "no config file provided. Exiting" << std::endl;
  130. return false;
  131. }
  132. NICE::Config t_conf = NICE::Config( p_Command.sConfigFile.toStdString() );
  133. Matrix *t_pMatDataTrain = loadMatlabVec( p_Command.sFileTrainData.toStdString(), "matFeatures");
  134. if( t_pMatDataTrain == NULL )
  135. {
  136. std::cout << "Training data Matrix couldn't be loaded" << std::endl;
  137. return 0;
  138. }
  139. #ifdef DEBUG_VERBOSE
  140. for(int i = 0; i<10; i++)
  141. {
  142. std::cerr << (*t_pMatDataTrain)(i,0) << " ## " << (*t_pMatDataTrain)(0,i) << std::endl;
  143. }
  144. #endif
  145. Matrix *t_pMatDataTrainLabels = loadMatlabVec( p_Command.sFileTrainDataLabels.toStdString(), "matLabels");
  146. if( t_pMatDataTrainLabels == NULL )
  147. {
  148. std::cout << "Training data label Matrix couldn't be loaded" << std::endl;
  149. return 0;
  150. }
  151. int iNumFeatureDimension = t_pMatDataTrain->rows();
  152. NICE::Vector t_vecLabelsTrain(t_pMatDataTrainLabels->getDataPointer(), t_pMatDataTrainLabels->rows(), Vector::external);
  153. OBJREC::Examples examplesTrain;
  154. bool bRet = OBJREC::Examples::wrapExamplesAroundFeatureMatrix( *t_pMatDataTrain, t_vecLabelsTrain, examplesTrain );
  155. if( !bRet )
  156. {
  157. std::cout << "createAndTrain: Error creating Examples from raw feature matrix and labels." << std::endl;
  158. return 0;
  159. }
  160. //----------------- create raw feature mapping -------------
  161. OBJREC::FeaturePool fp;
  162. OBJREC::VectorFeature *pVecFeature = new OBJREC::VectorFeature(iNumFeatureDimension);
  163. pVecFeature->explode(fp);
  164. #ifdef DEBUG_VERBOSE
  165. //----------------- debug features -------------
  166. OBJREC::Example t_Exp = examplesTrain[0].second;
  167. NICE::Vector t_FeatVector;
  168. fp.calcFeatureVector(t_Exp, t_FeatVector);
  169. std::cerr << "first full Feature Vec: " <<t_FeatVector << std::endl;
  170. #endif
  171. //----------------- train our random Forest -------------
  172. OBJREC::FPCRandomForests *pRandForest = new OBJREC::FPCRandomForests(&t_conf,"RandomForest");
  173. pRandForest->train(fp, examplesTrain);
  174. //----------------- create codebook ERC clusterer -------------
  175. int nMaxDepth = t_conf.gI("CodebookRandomForest", "maxDepthTree",10);
  176. int nMaxCodebookSize = t_conf.gI("CodebookRandomForest", "maxCodebookSize",100);
  177. #ifdef DEBUG_VERBOSE
  178. std::cerr << "maxDepthTree " << nMaxDepth << std::endl;
  179. std::cerr << "nMaxCodebookSize " << nMaxCodebookSize << std::endl;
  180. #endif
  181. OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(pRandForest, nMaxDepth,nMaxCodebookSize);
  182. //----------------- store classifier in file ---------------------
  183. bool bSuccess = storeClassifier(p_Command, pCodebookRandomForest);
  184. //----------------- clean up -------------
  185. delete pCodebookRandomForest;
  186. delete pVecFeature;
  187. pVecFeature = NULL;
  188. // delete all "exploded" features, they are internally cloned in the random trees anyway
  189. fp.destroy();
  190. //
  191. examplesTrain.clean();
  192. delete t_pMatDataTrain;
  193. delete t_pMatDataTrainLabels;
  194. return true;
  195. }
  196. bool generateHistogram( const structCommands &p_Command)
  197. {
  198. Matrix *t_pMatFodID = loadMatlabVec( p_Command.sFileTrainData.toStdString(), "fodID");
  199. if( t_pMatFodID == NULL )
  200. {
  201. std::cout << "Data Matrix didn't include a fodID, so couldn't be loaded" << std::endl;
  202. return 0;
  203. }
  204. int iFodID = (*t_pMatFodID)(0,0);
  205. Matrix *t_pMatDataTrain = loadMatlabVec( p_Command.sFileTrainData.toStdString(), "matFeatures");
  206. if( t_pMatDataTrain == NULL )
  207. {
  208. std::cout << "Data Matrix couldn't be loaded" << std::endl;
  209. return 0;
  210. }
  211. //----------------- restore trained codebook forest -------------
  212. OBJREC::CodebookRandomForest *pCodebookRandomForest = new OBJREC::CodebookRandomForest(-1,-1);
  213. if( !restoreClassifier(p_Command, pCodebookRandomForest ) )
  214. {
  215. std::cout << "Error restoring codebook random forest" << std::endl;
  216. return false;
  217. }
  218. size_t numTrainSamples = t_pMatDataTrain->cols();
  219. size_t iNumFeatureDimension = t_pMatDataTrain->rows();
  220. size_t iNumCodewords = pCodebookRandomForest->getCodebookSize();
  221. #ifdef DEBUG_VERBOSE
  222. std::cerr << "numTrainSamples " << numTrainSamples << std::endl;
  223. std::cerr << "iNumFeatureDimension " << iNumFeatureDimension << std::endl;
  224. std::cerr << "iNumCodewords " << iNumCodewords << std::endl;
  225. #endif
  226. //----------------- parse config options -------------
  227. bool bVerboseOutput = false;
  228. // if( nrhs > 3)
  229. // {
  230. // NICE::Config conf = parseParametersERC(prhs+3, nrhs-3 );
  231. // bVerboseOutput = conf.gB("CodebookRandomForest", "verbose", false);
  232. // }
  233. //----------------- quantize samples into histogram -------------
  234. NICE::Vector histogram(iNumCodewords, 0.0f);
  235. const double *pDataPtr = t_pMatDataTrain->getDataPointer();
  236. int t_iCodebookEntry; double t_fWeight; double t_fDistance;
  237. for (size_t i = 0; i < numTrainSamples; i++, pDataPtr+= iNumFeatureDimension )
  238. {
  239. const NICE::Vector t_VecTrainData( pDataPtr , iNumFeatureDimension);
  240. pCodebookRandomForest->voteVQ(t_VecTrainData, histogram, t_iCodebookEntry, t_fWeight, t_fDistance );
  241. if(bVerboseOutput)
  242. std::cerr << i << ": " << "CBEntry " << t_iCodebookEntry << " Weight: " << t_fWeight << " Distance: " << t_fDistance << std::endl;
  243. }
  244. // store histogram
  245. bool bSuccess = saveMatlabVector(p_Command.sFileStoreResult.toStdString(), histogram , iFodID);
  246. //----------------- clean up -------------
  247. delete pCodebookRandomForest;
  248. delete t_pMatDataTrain;
  249. return bSuccess;
  250. }
  251. #endif
  252. int main(int argc, char **argv)
  253. {
  254. #ifdef NICE_USELIB_MATIO
  255. #ifndef __clang__
  256. #ifndef __llvm__
  257. std::set_terminate(__gnu_cxx::__verbose_terminate_handler);
  258. #endif
  259. #endif
  260. structCommands sCommand;
  261. QString sCmdArg;
  262. int iCurrArgIdx = 1;
  263. while(iCurrArgIdx < argc)
  264. {
  265. sCmdArg = QString(argv[iCurrArgIdx]);
  266. if ( sCmdArg == "--function" )
  267. {
  268. iCurrArgIdx++;
  269. sCommand.sFunction = QString(argv[iCurrArgIdx]);
  270. }
  271. else if( sCmdArg == "--config" )
  272. {
  273. iCurrArgIdx++;
  274. sCommand.sConfigFile = QString(argv[iCurrArgIdx]);
  275. }
  276. else if( sCmdArg == "--traindata" )
  277. {
  278. iCurrArgIdx++;
  279. sCommand.sFileTrainData = QString(argv[iCurrArgIdx]);
  280. }
  281. else if( sCmdArg == "--traindatalabels" )
  282. {
  283. iCurrArgIdx++;
  284. sCommand.sFileTrainDataLabels = QString(argv[iCurrArgIdx]);
  285. }
  286. else if( sCmdArg == "--results" )
  287. {
  288. iCurrArgIdx++;
  289. sCommand.sFileStoreResult = QString(argv[iCurrArgIdx]);
  290. }
  291. else if( sCmdArg == "--classifier" )
  292. {
  293. iCurrArgIdx++;
  294. sCommand.sFileStoreClassifier = QString(argv[iCurrArgIdx]);
  295. }
  296. else if( sCmdArg == "--help" )
  297. {
  298. // print_usage();
  299. return 0;
  300. }
  301. else
  302. {
  303. std::cout << "unknown command arg: " << sCmdArg.toStdString() << std::endl;
  304. }
  305. iCurrArgIdx++;
  306. }
  307. ///////////////////////////////////////////////////
  308. try
  309. {
  310. if( sCommand.sFunction.compare("createAndTrain") == 0)
  311. {
  312. bool bSuccess = createAndTrain(sCommand);
  313. }
  314. else if( sCommand.sFunction.compare("generateHistogram") == 0)
  315. {
  316. bool bSuccess = generateHistogram(sCommand);
  317. }
  318. }
  319. catch(std::exception &e)
  320. {
  321. std::cerr << "exception occured: " << e.what() << std::endl;
  322. }
  323. #else
  324. return -1;
  325. #endif
  326. return 0;
  327. }