liblinear_train.m 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. function svmmodel = liblinear_train ( labels, feat, settings )
  2. %
  3. % BRIEF
  4. % A simple wrapper to provide training of 1-vs-all-classification for LIBLINEAR. No
  5. % further settings are adjustable currently.
  6. %
  7. % INPUT
  8. % labels -- multi-class labels (#sample x 1)
  9. % feat -- features for training images (#samples x # dimensions)
  10. % settings -- struct for configuring the svm model training, e.g., via
  11. % 'b_verbose', 'f_svm_C', ...
  12. %
  13. % OUTPUT:
  14. % svmmodel -- cell ( #classes x 1 ), every model entry is obtained via
  15. % svmtrain of the corresponding 1-vs-all-problem
  16. %
  17. % date: 30-04-2014 ( dd-mm-yyyy )
  18. % author: Alexander Freytag
  19. if ( nargin < 3 )
  20. settings = [];
  21. end
  22. liblinear_options = '';
  23. % outputs for training
  24. if ( ~ getFieldWithDefault ( settings, 'b_verbose', false ) )
  25. liblinear_options = sprintf('%s -q', liblinear_options);
  26. end
  27. % cost parameter
  28. f_svm_C = getFieldWithDefault ( settings, 'f_svm_C', 1);
  29. liblinear_options = sprintf('%s -c %f', liblinear_options, f_svm_C);
  30. % do we want to use an offset for the hyperplane?
  31. if ( getFieldWithDefault ( settings, 'b_addOffset', false) )
  32. liblinear_options = sprintf('%s -B 1', liblinear_options);
  33. end
  34. % which solver to use
  35. % copied from the liblinear manual:
  36. % for multi-class classification
  37. % 0 -- L2-regularized logistic regression (primal)
  38. % 1 -- L2-regularized L2-loss support vector classification (dual)
  39. % 2 -- L2-regularized L2-loss support vector classification (primal)
  40. % 3 -- L2-regularized L1-loss support vector classification (dual)
  41. % 4 -- support vector classification by Crammer and Singer
  42. % 5 -- L1-regularized L2-loss support vector classification
  43. % 6 -- L1-regularized logistic regression
  44. % 7 -- L2-regularized logistic regression (dual)
  45. i_svmSolver = getFieldWithDefault ( settings, 'i_svmSolver', 1);
  46. liblinear_options = sprintf('%s -s %d', liblinear_options, i_svmSolver);
  47. % increase penalty for positive samples according to invers ratio of
  48. % their number, i.e., if 1/3 is ratio of positive to negative samples, then
  49. % impact of positives is 3 the times of negatives
  50. %
  51. b_weightBalancing = getFieldWithDefault ( settings, 'b_weightBalancing', false);
  52. % increase penalty for positive samples according to invers ratio of
  53. % their number, i.e., if 1/3 is ratio of positive to negative samples, then
  54. % impact of positives is 3 the times of negatives
  55. %
  56. b_cross_val = getFieldWithDefault ( settings, 'b_cross_val', false);
  57. if ( b_cross_val && (length(unique(labels)) ~=2 ) )
  58. i_num_folds = getFieldWithDefault ( settings, 'i_num_folds', 10);
  59. liblinear_options = sprintf('%s -v %d', liblinear_options, i_num_folds );
  60. end
  61. uniqueLabels = unique ( labels );
  62. i_numClasses = size ( uniqueLabels,1);
  63. %# train one-against-all models
  64. if ( ~b_weightBalancing)
  65. if ( b_cross_val && (length(unique(labels)) ==2 ) )
  66. % measure of accuracy during cross validation is auc
  67. svmmodel = do_binary_cross_validation( labels, feat, liblinear_options, getFieldWithDefault ( settings, 'i_num_folds', 10) );
  68. else
  69. svmmodel = train( labels, feat, liblinear_options );
  70. end
  71. else
  72. svmmodel = cell( i_numClasses,1);
  73. for k=1:length(i_classesToRun)
  74. yBin = 2*double( labels == uniqueLabels( k ) )-1;
  75. fraction = double(sum(yBin==1))/double(numel(yBin));
  76. liblinear_optionsLocal = sprintf('%s -w1 %f', liblinear_options, 1.0/fraction);
  77. svmmodel{ k } = train( yBin, feat, liblinear_optionsLocal );
  78. %store the unique class label for later evaluations.
  79. svmmodel{ k }.uniqueLabel = uniqueLabels( k );
  80. end
  81. end
  82. end