|
@@ -6,7 +6,7 @@ function multiclass_gp_1D_example()
|
|
|
|
|
|
% get some artificial 1D data
|
|
|
train_data = [1;2.5;4.5]';
|
|
|
- train_labels = [1; 2; 4];
|
|
|
+ train_labels = [1; 2; 4]';
|
|
|
test_data = (0.1:0.01:5);
|
|
|
|
|
|
% some default settings of hyperparameters
|
|
@@ -32,9 +32,9 @@ function multiclass_gp_1D_example()
|
|
|
plot(train_data(2), 1, 'gx');
|
|
|
plot(train_data(3), 1, 'rx');
|
|
|
|
|
|
- plot(test_data, mu(:,1), 'b');
|
|
|
- plot(test_data, mu(:,2), 'g');
|
|
|
- plot(test_data, mu(:,3), 'r');
|
|
|
+ plot(test_data, mu(1,:), 'b');
|
|
|
+ plot(test_data, mu(2,:), 'g');
|
|
|
+ plot(test_data, mu(3,:), 'r');
|
|
|
hold off
|
|
|
|
|
|
hfigPred = figure(2);
|
|
@@ -45,17 +45,19 @@ function multiclass_gp_1D_example()
|
|
|
title('Multi-class posterior mean and variance');
|
|
|
colors = {'b', 'g', 'r'};
|
|
|
hold on
|
|
|
- max_mean = max(mu,[],2);
|
|
|
+ max_mean = max(mu,[],1);
|
|
|
lower = max_mean-sqrt(variance);
|
|
|
upper = max_mean+sqrt(variance);
|
|
|
- p = [test_data', lower; flipdim(test_data',1),flipdim(upper,1)];
|
|
|
- fill(p(:,1), p(:,2), 'y');
|
|
|
+
|
|
|
+ px = [test_data, flipdim(test_data,2)];
|
|
|
+ py = [lower, flipdim(upper,2)];
|
|
|
+ fill(px, py, 'y');
|
|
|
|
|
|
for k=1:length(model.unique_labels)
|
|
|
|
|
|
tmp_ID = pred_labels == model.unique_labels(k);
|
|
|
- plot( test_data(tmp_ID)', ...
|
|
|
- mu(tmp_ID,k),...
|
|
|
+ plot( test_data(tmp_ID), ...
|
|
|
+ mu(k,tmp_ID),...
|
|
|
colors{k}, ...
|
|
|
'LineWidth', 2 ...
|
|
|
);
|
|
@@ -98,29 +100,32 @@ function multiclass_gp_1D_example()
|
|
|
plot(train_data(2), 1, 'gx');
|
|
|
plot(train_data(3), 1, 'rx');
|
|
|
|
|
|
- plot(test_data, mu(:,1), 'b');
|
|
|
- plot(test_data, mu(:,2), 'g');
|
|
|
- plot(test_data, mu(:,3), 'r');
|
|
|
+ plot(test_data, mu(1,:), 'b');
|
|
|
+ plot(test_data, mu(2,:), 'g');
|
|
|
+ plot(test_data, mu(3,:), 'r');
|
|
|
hold off
|
|
|
|
|
|
hfigPredUpd = figure(5);
|
|
|
- plot(test_data', pred_labels, 'kx');
|
|
|
+ plot(test_data, pred_labels, 'kx');
|
|
|
title('Predicted multi-class labels');
|
|
|
|
|
|
hfigPosteriorUpd = figure(6);
|
|
|
title('Multi-class posterior mean and variance');
|
|
|
colors = {'b', 'g', 'r'};
|
|
|
hold on
|
|
|
- max_mean = max(mu,[],2);
|
|
|
+ max_mean = max(mu,[],1);
|
|
|
lower = max_mean-sqrt(variance);
|
|
|
upper = max_mean+sqrt(variance);
|
|
|
- p = [test_data', lower; flipdim(test_data',1),flipdim(upper,1)];
|
|
|
- fill(p(:,1), p(:,2), 'y');
|
|
|
+
|
|
|
+ px = [test_data, flipdim(test_data,2)];
|
|
|
+ py = [lower, flipdim(upper,2)];
|
|
|
+ fill(px, py, 'y');
|
|
|
+
|
|
|
for k=1:length(model.unique_labels)
|
|
|
|
|
|
tmp_ID = pred_labels == model.unique_labels(k);
|
|
|
- plot( test_data(tmp_ID)', ...
|
|
|
- mu(tmp_ID,k), ...
|
|
|
+ plot( test_data(tmp_ID), ...
|
|
|
+ mu(k,tmp_ID), ...
|
|
|
colors{k}, ...
|
|
|
'LineWidth', 2 ...
|
|
|
);
|