Skip to content

Commit df71e74

Browse files
committed
Add: gibbs sampling
1 parent eaaa32c commit df71e74

File tree

3 files changed

+78
-7
lines changed

3 files changed

+78
-7
lines changed

19.MCMC/GibbsSampling.py

+71
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
import numpy as np
2+
from matplotlib import pyplot as plt
3+
from scipy.stats import gaussian_kde
4+
5+
6+
def gibbs_sampling(dim, conditional_sampler, x0=None, burning_steps=1000, max_steps=10000, epsilon=1e-8, verbose=False):
7+
"""
8+
Given a conditionl sampler which samples from p(x_j | x_1, x_2, ... x_n)
9+
return a list of samples x ~ p, where p is the original distribution of the conditional distribution.
10+
x0 is the initial value of x. If not specified, it's set as zero vector.
11+
conditional_sampler takes (x, j) as parameters
12+
"""
13+
x = np.zeros(dim) if x0 is None else x0
14+
samples = np.zeros([max_steps - burning_steps, dim])
15+
for i in range(max_steps):
16+
for j in range(dim):
17+
x[j] = conditional_sampler(x, j)
18+
if verbose:
19+
print("New value of x is", x_new)
20+
if i >= burning_steps:
21+
samples[i - burning_steps] = x
22+
return samples
23+
24+
25+
if __name__ == '__main__':
26+
def demonstrate(dim, p, desc, **args):
27+
samples = gibbs_sampling(dim, p, **args)
28+
z = gaussian_kde(samples.T)(samples.T)
29+
plt.scatter(samples[:, 0], samples[:, 1], c=z, marker='.')
30+
plt.plot(samples[: 100, 0], samples[: 100, 1], 'r-')
31+
plt.title(desc)
32+
plt.show()
33+
34+
# example 1:
35+
mean = np.array([2, 3])
36+
covariance = np.array([[1, 0],
37+
[0, 1]])
38+
covariance_inv = np.linalg.inv(covariance)
39+
det_convariance = 1
40+
def gaussian_sampler1(x, j):
41+
return np.random.normal()
42+
demonstrate(2, gaussian_sampler1, "Gaussian distribution with mean of 0 and 0")
43+
44+
# example 2:
45+
mean = np.array([2, 3])
46+
covariance = np.array([[1, 0],
47+
[0, 1]])
48+
covariance_inv = np.linalg.inv(covariance)
49+
det_convariance = 1
50+
def gaussian_sampler2(x, j):
51+
if j == 0:
52+
return np.random.normal(2)
53+
else:
54+
return np.random.normal(3)
55+
demonstrate(2, gaussian_sampler2, "Gaussian distribution with mean of 2 and 3")
56+
57+
# example 3:
58+
def blocks_sampler(x, j):
59+
sample = np.random.random()
60+
if sample > .5:
61+
sample += 1.
62+
return sample
63+
demonstrate(2, blocks_sampler, "Four blocks")
64+
65+
# example 4:
66+
def blocks_sampler(x, j):
67+
sample = np.random.random()
68+
if sample > .5:
69+
sample += 100.
70+
return sample
71+
demonstrate(2, blocks_sampler, "Four blocks with large gap.")

19.MCMC/MetropolisHasting.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ def gaussian_kernel(x1, x2):
99
def gaussian_sampler(x):
1010
return np.random.normal(x)
1111

12-
def single_component_metropolis_hasting(dim, p, q=gaussian_kernel, q_sampler=gaussian_sampler, x0=None, burning_steps=1000, max_steps=10000, epsilon=1e-8, verbose=False):
12+
def metropolis_hasting(dim, p, q=gaussian_kernel, q_sampler=gaussian_sampler, x0=None, burning_steps=1000, max_steps=10000, epsilon=1e-8, verbose=False):
1313
"""
1414
Given a distribution function p (it doesn't need to be a probability, a likelihood function is enough),
1515
and the recommended distribution q,
@@ -19,9 +19,8 @@ def single_component_metropolis_hasting(dim, p, q=gaussian_kernel, q_sampler=gau
1919
q is a distribution function representing q(x_new | x_old).
2020
q takes (x_old, x_new) as parameters.
2121
"""
22-
x = np.zeros(dim)
22+
x = np.zeros(dim) if x0 is None else x0
2323
samples = np.zeros([max_steps - burning_steps, dim])
24-
# Burning
2524
for i in range(max_steps):
2625
x_new = q_sampler(x)
2726
accept_prob = (p(x_new) + epsilon) / (p(x) + epsilon) * q(x, x_new) / q(x_new, x)
@@ -38,10 +37,11 @@ def single_component_metropolis_hasting(dim, p, q=gaussian_kernel, q_sampler=gau
3837

3938
if __name__ == '__main__':
4039
def demonstrate(dim, p, desc, **args):
41-
samples = single_component_metropolis_hasting(dim, p, **args)
40+
samples = metropolis_hasting(dim, p, **args)
4241
z = gaussian_kde(samples.T)(samples.T)
4342
plt.scatter(samples[:, 0], samples[:, 1], c=z, marker='.')
4443
plt.plot(samples[: 100, 0], samples[: 100, 1], 'r-')
44+
plt.title(desc)
4545
plt.show()
4646

4747
# example 1:

19.MCMC/SingleComponentMetropolisHasting.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,9 @@ def single_component_metropolis_hasting(dim, p, q=gaussian_kernel, q_sampler=gau
2323
xj_new is the new value of x_j.
2424
x0 is the initial value of x. If not specified, it's set as zero vector.
2525
"""
26-
x = np.zeros(dim)
26+
x = np.zeros(dim) if x0 is None else x0
2727
samples = np.zeros([max_steps - burning_steps, dim])
28-
# Burning
29-
for i in range(max_steps):
28+
or i in range(max_steps):
3029
for j in range(dim):
3130
xj_new = q_sampler(x, j)
3231
x_new = x.copy()
@@ -49,6 +48,7 @@ def demonstrate(dim, p, desc, **args):
4948
z = gaussian_kde(samples.T)(samples.T)
5049
plt.scatter(samples[:, 0], samples[:, 1], c=z, marker='.')
5150
plt.plot(samples[: 100, 0], samples[: 100, 1], 'r-')
51+
plt.title(desc)
5252
plt.show()
5353

5454
# example 1:

0 commit comments

Comments
 (0)