발표 자료
그림
코드
%% 1-Dimensional Gaussian Process Regression
clc;
clear all;
close all;
rng(1);
%% 1. Gaussian process PRIOR realizations
gp_realization.nr_data = 100;
gp_realization.input = linspace(0, 10, gp_realization.nr_data)';
gp_realization.hyp.sig2f = 1E+0;
gp_realization.hyp.sig2w = 1E-6;
gp_realization.hyp.sig2x = 1E+0;
fig = figure(1); set(fig, 'Position', [300 100 900 700]); hold on;
for i = 1:10
K = gp_kernel_se_speedup(gp_realization.input, gp_realization.input, gp_realization.hyp);
gp_realization.output = mvnrnd(zeros(100, 1), K);
plot(gp_realization.input, gp_realization.output, 'Color', rand(1, 3), 'LineWidth', 2);
end
hold off;
title(sprintf('Gaussian Process Realizations sig2x: %.1f', gp_realization.hyp.sig2x), 'FontSize', 20);
%% 2. Gaussian process POSTERIOR realizations
rng(1);
ref_data.nr_data = 100;
ref_data.input = linspace(0, 10, ref_data.nr_data)';
ref_data.hyp.sig2f = 1E+0;
ref_data.hyp.sig2w = 1E-6;
ref_data.hyp.sig2x = 1E-0;
K = gp_kernel_se_speedup(ref_data.input, ref_data.input, ref_data.hyp);
% ref_data.output = K*randn(ref_data.nr_data, 1);
ref_data.output = mvnrnd(zeros(ref_data.nr_data, 1), K)';
gp_data.nr_data = 8;
idx = 1+round(linspace(0, ref_data.nr_data-1, gp_data.nr_data))';
gp_data.input = ref_data.input(idx);
gp_data.output = ref_data.output(idx);
gp_data.hyp.sig2f = 1E+0;
gp_data.hyp.sig2w = 1E-2;
gp_data.hyp.sig2x = 1E-0;
[gp_struct] = initGP(gp_data.input, gp_data.output, @gp_kernel_se_speedup, gp_data.hyp);
gp_result = gpr(gp_struct, ref_data.input);
% 그림을 그려본다.
fig = figure(2); set(fig, 'Position', [400 100 900 700]);
hold on;
hGPV = fillBtwLines(ref_data.input ...
, gp_result.mean-3*gp_result.var, gp_result.mean+3*gp_result.var, [0.8 0.9 0.8]);
hRef = plot(ref_data.input, ref_data.output, 'r', 'LineWidth', 3);
hData = plot(gp_data.input, gp_data.output, 'ko', 'LineWidth', 2);
hGPR = plot(ref_data.input, gp_result.mean, 'b', 'LineWidth', 3);
Kpost = zeros(ref_data.nr_data, ref_data.nr_data);
kfun = @(x1, x2)(gp_kernel_se_speedup(x1, x2, gp_data.hyp));
for i = 1:ref_data.nr_data
for j = 1:ref_data.nr_data
xi = ref_data.input(i); xj = ref_data.input(j);
Kpost(i, j) = kfun(xi, xj) - kfun(xi, gp_data.input)/kfun(gp_data.input, gp_data.input)*kfun(gp_data.input, xj);
end
end
for i = 1:10
% sample_path = Kpost*randn(ref_data.nr_data, 1);
sample_path = mvnrnd(zeros(ref_data.nr_data, 1), Kpost)';
hPost = plot(ref_data.input, gp_result.mean + sample_path, '--', 'Color', rand(1, 3), 'LineWidth', 2);
end
hold off; grid on;
hleg = legend([hRef hGPR hGPV hData hPost], 'Reference', 'GP mean', 'GP variance', 'Train Data', 'Samples from Posterior', 0);
set(hleg, 'FontSize', 15);
title(sprintf('Gaussian Process Regression sig2x: %.1f', gp_data.hyp.sig2x), 'FontSize', 20);
%% 3. 다른 kernel에 대해서 GPR
ref_data.nr_data = 100;
ref_data.input = linspace(0, 10, ref_data.nr_data)';
ref_data.hyp.sig2f = 1E+0;ref_data.hyp.sig2w = 1E-6;ref_data.hyp.sig2x = 1E-0;
K = gp_kernel_se_speedup(ref_data.input, ref_data.input, ref_data.hyp);
ref_data.output = mvnrnd(zeros(100, 1), K)';
gp_data.nr_data = 8;
idx = 1+round(linspace(0, ref_data.nr_data-1, gp_data.nr_data))';
gp_data.input = ref_data.input(idx);
gp_data.output = ref_data.output(idx);
% SE kernel
seKernelStruct.hyp.sig2f = 1E+0; seKernelStruct.hyp.sig2w = 1E-6; seKernelStruct.hyp.sig2x = 2E-0;
[gp_struct] = initGP(gp_data.input, gp_data.output, @gp_kernel_se_speedup, seKernelStruct.hyp);
seKernelStruct.gp_result = gpr(gp_struct, ref_data.input);
% Periodinc kernel
peKernelStruct.hyp.sig2f = 1E+0; peKernelStruct.hyp.sig2w = 1E-6;
peKernelStruct.hyp.sig2x1 = 1E+0; peKernelStruct.hyp.sig2x2 = 2E+0; peKernelStruct.hyp.sig2p = 2E+0;
[gp_struct] = initGP(gp_data.input, gp_data.output, @gp_kernel_period, peKernelStruct.hyp);
peKernelStruct.gp_result = gpr(gp_struct, ref_data.input);
% Product kernel
prKernelStruct.hyp.sig2f = 1E+0; prKernelStruct.hyp.sig2w = 1E-6; prKernelStruct.hyp.p = 3;
[gp_struct] = initGP(gp_data.input, gp_data.output, @gp_kernel_product, prKernelStruct.hyp);
prKernelStruct.gp_result = gpr(gp_struct, ref_data.input);
% Neural Network kernel
nnKernelStruct.hyp.sig2f = 1E+0; nnKernelStruct.hyp.sig2w = 1E-6; nnKernelStruct.hyp.sig2x = 1E-0;
[gp_struct] = initGP(gp_data.input, gp_data.output, @gp_kernel_nn, nnKernelStruct.hyp);
nnKernelStruct.gp_result = gpr(gp_struct, ref_data.input);
fig = figure(3); set(fig, 'Position', [500 100 900 700]);
hold on;
hRef = plot(ref_data.input, ref_data.output, 'r', 'LineWidth', 3);
hData = plot(gp_data.input, gp_data.output, 'ko', 'LineWidth', 2, 'MarkerSize', 15);
hSE = plot(ref_data.input, seKernelStruct.gp_result.mean, 'b', 'LineWidth', 3);
hPE = plot(ref_data.input, peKernelStruct.gp_result.mean, 'g', 'LineWidth', 3);
hPR = plot(ref_data.input, prKernelStruct.gp_result.mean, 'c', 'LineWidth', 3);
hNN = plot(ref_data.input, nnKernelStruct.gp_result.mean, 'y', 'LineWidth', 3);
hleg = legend([hRef hData hSE hPE hPR, hNN] ...
, 'Reference', 'Data', 'SE kernel', 'Periodic', 'Polynomial', 'Neural Network', 0);
set(hleg, 'FontSize', 15);
title(sprintf('GPR using Different Kernels'), 'FontSize', 20);
%% 4. Nonstationary GP를 해보자.
rng(1);
% Target 함수
ref_data.nr_data = 100;
ref_data.input = linspace(0, 10, ref_data.nr_data)';
ref_data.hyp.sig2f = 1E+0;ref_data.hyp.sig2w = 1E-6;ref_data.hyp.sig2x = 1E-0;
K = gp_kernel_se_speedup(ref_data.input, ref_data.input, ref_data.hyp);
ref_data.output = mvnrnd(zeros(100, 1), K)';
% NGP를 초기화한다.
ngp_data.nr_data = 8;
idx = 1+round(linspace(0, ref_data.nr_data-1, ngp_data.nr_data))';
ngp_data.input = ref_data.input(idx);
ngp_data.output = ref_data.output(idx);
ngp_data.sigma = cell(ngp_data.nr_data, 1);
for i = 1:ngp_data.nr_data, ngp_data.sigma{i} = rand; end;
ngp_data.hyp.sig2f = 1; ngp_data.hyp.sig2w = 1E-2;
ngp_struct = initNGP(ngp_data.input, ngp_data.output, ngp_data.sigma, @gp_kernel_ns, ngp_data.hyp);
ngp_result = ngpr(ngp_struct, ref_data.input);
% SGP를 초기화한다. nr_data = 8;
idx = 1+round(linspace(0, ref_data.nr_data-1, sgp_data.nr_data))';
sgp_data.input = ref_data.input(idx);
sgp_data.output = ref_data.output(idx);
sgp_data.sigma = cell(sgp_data.nr_data, 1);
for i = 1:sgp_data.nr_data, sgp_data.sigma{i} = 1; end;
sgp_data.hyp.sig2f = 1; sgp_data.hyp.sig2w = 1E-2;
sgp_struct = initNGP(sgp_data.input, sgp_data.output, sgp_data.sigma, @gp_kernel_ns, sgp_data.hyp);
sgp_result = ngpr(sgp_struct, ref_data.input);
% 그림을 그린다.
fig = figure(4); set(fig, 'Position', [600 100 900 700]);
hold on;
hRef = plot(ref_data.input, ref_data.output, 'r', 'LineWidth', 3);
hData = plot(ngp_data.input, ngp_data.output, 'ko', 'LineWidth', 2, 'MarkerSize', 15);
hNGP = plot(ref_data.input, ngp_result.mean, 'b-', 'LineWidth', 2, 'MarkerSize', 15);
hSGP = plot(ref_data.input, sgp_result.mean, 'g-', 'LineWidth', 2, 'MarkerSize', 15);
for i = 1:ngp_data.nr_data
hText = text(ngp_data.input(i), ngp_data.output(i), sprintf(' %.2f', ngp_data.sigma{i}), 'FontSize', 15);
end
hold off;
hleg = legend([hRef hData hSGP hNGP], 'Reference', 'Data', 'Stationary GPR', 'Nonstationary GPR', 0);
set(hleg, 'FontSize', 15);
title(sprintf('Stationary and Nonstationary GPR'), 'FontSize', 20);
추가 코드들
'Enginius > Machine Learning' 카테고리의 다른 글
Sparse Kernel Methods (0) | 2014.07.31 |
---|---|
Determinantal Point Process (DPP) (1) | 2014.07.28 |
ICML에 나온 Gaussian process (0) | 2014.07.23 |
Stochastic process and Gaussian process (2) | 2014.07.16 |
공간 속에 있는 점들 사이이 거리에 대한 성질 (0) | 2014.06.19 |