|
@@ -22,20 +22,20 @@ function svmmodel = liblinear_train ( labels, feat, settings )
|
|
|
end
|
|
|
|
|
|
|
|
|
- libsvm_options = '';
|
|
|
+ liblinear_options = '';
|
|
|
|
|
|
% outputs for training
|
|
|
if ( ~ getFieldWithDefault ( settings, 'b_verbose', false ) )
|
|
|
- libsvm_options = sprintf('%s -q', libsvm_options);
|
|
|
+ liblinear_options = sprintf('%s -q', liblinear_options);
|
|
|
end
|
|
|
|
|
|
% cost parameter
|
|
|
f_svm_C = getFieldWithDefault ( settings, 'f_svm_C', 1);
|
|
|
- libsvm_options = sprintf('%s -c %f', libsvm_options, f_svm_C);
|
|
|
+ liblinear_options = sprintf('%s -c %f', liblinear_options, f_svm_C);
|
|
|
|
|
|
% do we want to use an offset for the hyperplane?
|
|
|
if ( getFieldWithDefault ( settings, 'b_addOffset', false) )
|
|
|
- libsvm_options = sprintf('%s -B 1', libsvm_options);
|
|
|
+ liblinear_options = sprintf('%s -B 1', liblinear_options);
|
|
|
end
|
|
|
|
|
|
% which solver to use
|
|
@@ -50,7 +50,7 @@ function svmmodel = liblinear_train ( labels, feat, settings )
|
|
|
% 6 -- L1-regularized logistic regression
|
|
|
% 7 -- L2-regularized logistic regression (dual)
|
|
|
i_svmSolver = getFieldWithDefault ( settings, 'i_svmSolver', 1);
|
|
|
- libsvm_options = sprintf('%s -s %d', libsvm_options, i_svmSolver);
|
|
|
+ liblinear_options = sprintf('%s -s %d', liblinear_options, i_svmSolver);
|
|
|
|
|
|
|
|
|
% increase penalty for positive samples according to invers ratio of
|
|
@@ -59,6 +59,16 @@ function svmmodel = liblinear_train ( labels, feat, settings )
|
|
|
%
|
|
|
b_weightBalancing = getFieldWithDefault ( settings, 'b_weightBalancing', false);
|
|
|
|
|
|
+ % increase penalty for positive samples according to invers ratio of
|
|
|
+ % their number, i.e., if 1/3 is ratio of positive to negative samples, then
|
|
|
+ % impact of positives is 3 the times of negatives
|
|
|
+ %
|
|
|
+ b_cross_val = getFieldWithDefault ( settings, 'b_cross_val', false);
|
|
|
+ if ( b_cross_val && (length(unique(labels)) ~=2 ) )
|
|
|
+ i_num_folds = getFieldWithDefault ( settings, 'i_num_folds', 10);
|
|
|
+ liblinear_options = sprintf('%s -v %d', liblinear_options, i_num_folds );
|
|
|
+ end
|
|
|
+
|
|
|
|
|
|
|
|
|
uniqueLabels = unique ( labels );
|
|
@@ -68,19 +78,25 @@ function svmmodel = liblinear_train ( labels, feat, settings )
|
|
|
%# train one-against-all models
|
|
|
|
|
|
if ( ~b_weightBalancing)
|
|
|
- svmmodel = train( labels, feat, libsvm_options );
|
|
|
+ if ( b_cross_val && (length(unique(labels)) ==2 ) )
|
|
|
+
|
|
|
+ % measure of accuracy during cross validation is auc
|
|
|
+ svmmodel = do_binary_cross_validation( labels, feat, liblinear_options, getFieldWithDefault ( settings, 'i_num_folds', 10) );
|
|
|
+ else
|
|
|
+ svmmodel = train( labels, feat, liblinear_options );
|
|
|
+ end
|
|
|
else
|
|
|
svmmodel = cell( i_numClasses,1);
|
|
|
for k=1:length(i_classesToRun)
|
|
|
yBin = 2*double( labels == uniqueLabels( k ) )-1;
|
|
|
|
|
|
fraction = double(sum(yBin==1))/double(numel(yBin));
|
|
|
- libsvm_optionsLocal = sprintf('%s -w1 %f', libsvm_options, 1.0/fraction);
|
|
|
- svmmodel{ k } = train( yBin, feat, libsvm_optionsLocal );
|
|
|
+ liblinear_optionsLocal = sprintf('%s -w1 %f', liblinear_options, 1.0/fraction);
|
|
|
+ svmmodel{ k } = train( yBin, feat, liblinear_optionsLocal );
|
|
|
|
|
|
%store the unique class label for later evaluations.
|
|
|
svmmodel{ k }.uniqueLabel = uniqueLabels( k );
|
|
|
end
|
|
|
end
|
|
|
-
|
|
|
+
|
|
|
end
|