liblinear_train.m 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. function svmmodel = liblinear_train ( labels, feat, settings )
  2. %
  3. % BRIEF
  4. % A simple wrapper to provide training of 1-vs-all-classification for LIBLINEAR. No
  5. % further settings are adjustable currently.
  6. %
  7. % INPUT
  8. % labels -- multi-class labels (#sample x 1)
  9. % feat -- features for training images (#samples x # dimensions)
  10. % settings -- struct for configuring the svm model training, e.g., via
  11. % 'b_verbose', 'f_svm_C', ...
  12. %
  13. % OUTPUT:
  14. % svmmodel -- cell ( #classes x 1 ), every model entry is obtained via
  15. % svmtrain of the corresponding 1-vs-all-problem
  16. %
  17. % date: 30-04-2014 ( dd-mm-yyyy )
  18. % last modified: 22-10-2015
  19. % author: Alexander Freytag, Christoph Käding
  20. if ( nargin < 3 )
  21. settings = [];
  22. end
  23. liblinear_options = '';
  24. % outputs for training
  25. if ( ~ getFieldWithDefault ( settings, 'b_verbose', false ) )
  26. liblinear_options = sprintf('%s -q', liblinear_options);
  27. end
  28. % cost parameter
  29. f_svm_C = getFieldWithDefault ( settings, 'f_svm_C', 1);
  30. liblinear_options = sprintf('%s -c %f', liblinear_options, f_svm_C);
  31. % do we want to use an offset for the hyperplane?
  32. if ( getFieldWithDefault ( settings, 'b_addOffset', false) )
  33. liblinear_options = sprintf('%s -B 1', liblinear_options);
  34. end
  35. % add multithreading
  36. % NOTE: - requires liblinear-multicore
  37. % - supports only -s 0, -s 2, or -s 11 (so far)
  38. i_numThreads = getFieldWithDefault ( settings, 'i_numThreads', 1);
  39. if i_numThreads > 1
  40. liblinear_options = sprintf('%s -n %d', liblinear_options, i_numThreads);
  41. end
  42. % which solver to use
  43. % copied from the liblinear manual:
  44. % for multi-class classification
  45. % 0 -- L2-regularized logistic regression (primal)
  46. % 1 -- L2-regularized L2-loss support vector classification (dual)
  47. % 2 -- L2-regularized L2-loss support vector classification (primal)
  48. % 3 -- L2-regularized L1-loss support vector classification (dual)
  49. % 4 -- support vector classification by Crammer and Singer
  50. % 5 -- L1-regularized L2-loss support vector classification
  51. % 6 -- L1-regularized logistic regression
  52. % 7 -- L2-regularized logistic regression (dual)
  53. i_svmSolver = getFieldWithDefault ( settings, 'i_svmSolver', 1);
  54. liblinear_options = sprintf('%s -s %d', liblinear_options, i_svmSolver);
  55. % increase penalty for positive samples according to invers ratio of
  56. % their number, i.e., if 1/3 is ratio of positive to negative samples, then
  57. % impact of positives is 3 the times of negatives
  58. %
  59. b_weightBalancing = getFieldWithDefault ( settings, 'b_weightBalancing', false);
  60. % increase penalty for positive samples according to invers ratio of
  61. % their number, i.e., if 1/3 is ratio of positive to negative samples, then
  62. % impact of positives is 3 the times of negatives
  63. %
  64. b_cross_val = getFieldWithDefault ( settings, 'b_cross_val', false);
  65. if ( b_cross_val && (length(unique(labels)) ~=2 ) )
  66. i_num_folds = getFieldWithDefault ( settings, 'i_num_folds', 10);
  67. liblinear_options = sprintf('%s -v %d', liblinear_options, i_num_folds );
  68. end
  69. uniqueLabels = unique ( labels );
  70. i_numClasses = size ( uniqueLabels,1);
  71. %# train one-against-all models
  72. if ( ~b_weightBalancing)
  73. if ( b_cross_val && (length(unique(labels)) ==2 ) )
  74. % measure of accuracy during cross validation is auc
  75. svmmodel = do_binary_cross_validation( labels, feat, liblinear_options, getFieldWithDefault ( settings, 'i_num_folds', 10) );
  76. else
  77. svmmodel = train( labels, feat, liblinear_options );
  78. end
  79. else
  80. svmmodel = cell( i_numClasses,1);
  81. for k=1:i_numClasses
  82. yBin = 2*double( labels == uniqueLabels( k ) )-1;
  83. fraction = double(sum(yBin==1))/double(numel(yBin));
  84. liblinear_optionsLocal = sprintf('%s -w1 %f', liblinear_options, 1.0/fraction);
  85. svmmodel{ k } = train( yBin, feat, liblinear_optionsLocal );
  86. %store the unique class label for later evaluations.
  87. svmmodel{ k }.uniqueLabel = uniqueLabels( k );
  88. end
  89. end
  90. end