libsvm_test.m 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849
  1. function [predicted_label, accuracy, scores] =libsvm_test ( labels_test, feat_test, svmmodel, settings )
  2. %
  3. % BRIEF
  4. % A simple wrapper to provide testing of 1-vs-all-classification for LIBSVM.
  5. % No further settings are adjustable currently.
  6. %
  7. % INPUT
  8. % labels_test -- multi-class labels (#samples x 1)
  9. % feat_test -- features for test images (#samples x # dimensions)
  10. % svmmodel -- cell ( #classes x 1 ), every model entry is obtained via
  11. % svmtrain of the corresponding 1-vs-all-problem
  12. % settings -- struct for configuring the svm classification, e.g., via
  13. % 'b_verbose' ...
  14. %
  15. % OUTPUT:
  16. % predicted_label ( note: in range [ymin, ymax], consequetively ordered.
  17. % Don't miss to map it to the original labels!
  18. %
  19. %
  20. % date: 28-04-2014 ( dd-mm-yyyy )
  21. % author: Alexander Freytag
  22. if ( nargin < 4 )
  23. settings = [];
  24. end
  25. libsvm_options = '';
  26. % outputs for training
  27. if ( ~ getFieldWithDefault ( settings, 'b_verbose', false ) )
  28. libsvm_options = sprintf('%s -q', libsvm_options);
  29. end
  30. i_numClasses = size ( svmmodel,1);
  31. i_numSamples = size( labels_test,1);
  32. scores = zeros( i_numSamples, i_numClasses );
  33. % classify with every one-against-all model
  34. for k=1:i_numClasses
  35. yBin = 2*double( labels_test == svmmodel{k}.uniqueLabel )-1;
  36. [~,~,scores(:,k)] = svmpredict( yBin, feat_test, svmmodel{k}.model, libsvm_options );
  37. end
  38. %# predict the class with the highest score
  39. [~,predicted_label] = max(scores,[],2);
  40. % accuracy
  41. accuracy = sum(predicted_label == labels_test) ./ numel(labels_test) ;
  42. end