liblinear_train_multicore.m 2.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. function svmmodel = liblinear_train_multicore ( labels, feat, settings )
  2. %
  3. % BRIEF
  4. % A simple wrapper to provide training of 1-vs-all-classification for LIBLINEAR. No
  5. % further settings are adjustable currently.
  6. %
  7. % INPUT
  8. % labels -- multi-class labels (#sample x 1)
  9. % feat -- features for training images (#samples x # dimensions)
  10. % settings -- struct for configuring the svm model training, e.g., via
  11. % 'b_verbose', 'f_svm_C', ...
  12. %
  13. % OUTPUT:
  14. % svmmodel -- cell ( #classes x 1 ), every model entry is obtained via
  15. % svmtrain of the corresponding 1-vs-all-problem
  16. %
  17. % date: 30-04-2014 ( dd-mm-yyyy )
  18. % author: Alexander Freytag
  19. if ( nargin < 3 )
  20. settings = [];
  21. end
  22. libsvm_options = '';
  23. % outputs for training
  24. if ( ~ getFieldWithDefault ( settings, 'b_verbose', false ) )
  25. libsvm_options = sprintf('%s -q', libsvm_options);
  26. end
  27. % cost parameter
  28. f_svm_C = getFieldWithDefault ( settings, 'f_svm_C', 1);
  29. libsvm_options = sprintf('%s -c %f', libsvm_options, f_svm_C);
  30. % do we want to use an offset for the hyperplane?
  31. if ( getFieldWithDefault ( settings, 'b_addOffset', false) )
  32. libsvm_options = sprintf('%s -B 1', libsvm_options);
  33. end
  34. % which solver to use
  35. % copied from the liblinear manual:
  36. % for multi-class classification
  37. % 0 -- L2-regularized logistic regression (primal)
  38. % 2 -- L2-regularized L2-loss support vector classification (primal)
  39. % 11 -- l2-loss SVR
  40. i_svmSolver = getFieldWithDefault ( settings, 'i_svmSolver', 2);
  41. i_numThreads = getFieldWithDefault ( settings, 'i_numThreads', 2);
  42. libsvm_options = sprintf('%s -s %d -n %d', libsvm_options, i_svmSolver, i_numThreads);
  43. % increase penalty for positive samples according to invers ratio of
  44. % their number, i.e., if 1/3 is ratio of positive to negative samples, then
  45. % impact of positives is 3 the times of negatives
  46. %
  47. b_weightBalancing = getFieldWithDefault ( settings, 'b_weightBalancing', false);
  48. uniqueLabels = unique ( labels );
  49. i_numClasses = size ( uniqueLabels,1);
  50. %# train one-against-all models
  51. if ( ~b_weightBalancing)
  52. svmmodel = train( labels, feat, libsvm_options );
  53. else
  54. svmmodel = cell( i_numClasses,1);
  55. for k=1:i_numClasses
  56. yBin = 2*double( labels == uniqueLabels( k ) )-1;
  57. fraction = double(sum(yBin==1))/double(numel(yBin));
  58. libsvm_optionsLocal = sprintf('%s -w1 %f', libsvm_options, 1.0/fraction);
  59. svmmodel{ k } = train( yBin, feat, libsvm_optionsLocal );
  60. %store the unique class label for later evaluations.
  61. svmmodel{ k }.uniqueLabel = uniqueLabels( k );
  62. end
  63. end
  64. end