-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathbayesianLSvb.m
61 lines (51 loc) · 1.36 KB
/
bayesianLSvb.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
% ---------------------------------------------
%
% Bayesian Linear regression example
% using varational Bayes
%
% Johan Dahlin ([email protected])
% 2013-03-19
%
% ---------------------------------------------
% Variational inference on the Bayesian LS model
% Likelihood: prod_n N(t_n|w'Phi_n,Beta^{-1})
% Prior: N(w|0,\alpha^{-1}I)
% Hyperprior: Gam(\alpha|a0,b0)
clear all;
% Parameters
Beta=5; % Noise variance
N=1; M=1; % Dimension of data and parameter vectors (must be 1)
Ndata=20; % Number of data points
% Generate data
for nn=1:Ndata
x(nn)=2*randn;
y(nn)=x(nn).^3+sqrt(Beta)*randn;
end
% Priors
a0=1; b0=1;
% Build the regressor matrix-Phi
Phi=[x.^3]';
y=y';
% Initalise the parameters
aN(1)=a0;
bN(1)=b0;
SN(:,:,1)=inv(aN(1)/bN(:,:,1)*eye(N)+Beta*(Phi'*Phi));
mN(1,:)=Beta*SN(:,:,1)*Phi'*y;
% Estimate the parameters
for ii=2:10
SN(:,:,ii)=inv(aN(ii-1)/bN(ii-1)*eye(N)+Beta*(Phi'*Phi));
mN(ii,:)=Beta*SN(:,:,ii)*Phi'*y;
aN(ii)=a0+M/2;
bN(ii)=b0+(mN(ii,:)*mN(ii,:)'+SN(:,:,ii))/2;
end
% Predict values and estimate the covariance of the predictions
xx=-5:0.01:5;
for jj=1:length(xx);
yhat(jj)=mN(ii)*xx(jj).^3;
Shat(jj)=Beta+xx(jj)'*SN(ii)*xx(jj);
ylimUpp(jj)=yhat(jj)+1.96*sqrt(Shat(jj));
ylimLow(jj)=yhat(jj)-1.96*sqrt(Shat(jj));
end
% Plot the results
x2=sort(x);
plot(x,y,'o',xx,ylimUpp,':',xx,ylimLow,':',xx,xx.^3,'k')