12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061 |
- function multiclass_gp_1D_example()
- % plot GP model outputs for 1D artificial data
- % play around with param settings to become familiar with GP models
- %
- % Copyright (c) by Alexander Freytag, 2013-11-13.
- % get some artificial 1D data
- train_data = [1;2.5;4.5];
- train_labels = [1; 2; 4];
- test_data = (0.1:0.01:5)';
- % some default settings of hyperparameters
- loghyper = [-1 0];
- gpnoise = 0.01;
-
- % learn multi-class GP model
- K = feval('covSEiso', loghyper, train_data);
- model = learn_multiclass_gp(K, train_labels, gpnoise);
- % evaluate model on test data
- Ks = feval('covSEiso', loghyper, train_data, test_data);
- Kss = feval('covSEiso', loghyper, test_data, 'diag');
- [mu, pred_labels, variance] = test_multiclass_gp(model, Ks, Kss');
- % visualize everything nicely
- f1 = figure(1);
- plot(train_data(1), 1, 'bx');
- title('Mean curve of each binary task');
- hold on
- plot(train_data(2), 1, 'gx');
- plot(train_data(3), 1, 'rx');
- plot(test_data, mu(:,1), 'b');
- plot(test_data, mu(:,2), 'g');
- plot(test_data, mu(:,3), 'r');
- hold off
- f2 = figure(2);
- plot(test_data, pred_labels, 'kx');
- title('Predicted multi-class labels');
- f3 = figure(3);
- title('Multi-class posterior mean and variance');
- colors = {'b', 'g', 'r'};
- hold on
- max_mean = max(mu,[],2);
- lower = max_mean-sqrt(variance);
- upper = max_mean+sqrt(variance);
- p = [test_data, lower; flipdim(test_data,1),flipdim(upper,1)];
- fill(p(:,1), p(:,2), 'y');
- for k=1:length(model.unique_labels)
- tmp_ID = pred_labels == model.unique_labels(k);
- plot(test_data(tmp_ID),mu(tmp_ID,k),colors{k}, 'LineWidth', 2);
- end
- hold off
- end
|