Skip to content

Commit ef3d197

Browse files
authored
Merge pull request #611 from CUQI-DTU/add_rv_to_distribution
Add rv method to Distribution class and update artifact version
2 parents 0303b8f + 907b9c4 commit ef3d197

File tree

6 files changed

+284
-21
lines changed

6 files changed

+284
-21
lines changed

.github/workflows/docs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ jobs:
2626
# If PR-based build. Upload artifact of the documentation for download and review
2727
- name: Upload artifact
2828
if: ${{ github.event_name == 'pull_request' }}
29-
uses: actions/upload-artifact@v3
29+
uses: actions/upload-artifact@v4
3030
with:
3131
name: docs_build
3232
path: ./public

cuqi/distribution/_distribution.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -406,3 +406,9 @@ def __repr__(self) -> str:
406406
return "CUQI {}. Conditioning variables {}.".format(self.__class__.__name__,self.get_conditioning_variables())
407407
else:
408408
return "CUQI {}.".format(self.__class__.__name__)
409+
410+
@property
411+
def rv(self):
412+
""" Return a random variable object representing the distribution. """
413+
from cuqi.experimental.algebra import RandomVariable
414+
return RandomVariable(self)
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
# %%
2+
# This demo shows the use of RandomVariable (RV) as prior in posteior sampling.
3+
# Specifically, we test the use of RV as prior through
4+
# 1) BayesianProblem,
5+
# 2) JointDistribution with MALA,
6+
# 3) JointDistribution with Gibbs (NUTS + Conjugate) and
7+
# 4) JointDistribution with Regularized LinearRTO.
8+
9+
from cuqi.testproblem import Deconvolution1D
10+
from cuqi.distribution import Gaussian, Gamma, GMRF
11+
from cuqi.problem import BayesianProblem
12+
from cuqi.distribution import JointDistribution
13+
from cuqi.implicitprior import NonnegativeGMRF
14+
from cuqi.experimental.mcmc import HybridGibbs, Conjugate, MALA, NUTS, RegularizedLinearRTO
15+
import numpy as np
16+
import matplotlib.pyplot as plt
17+
18+
np.random.seed(1912)
19+
20+
# %% Forward model
21+
n = 128
22+
A, y_obs, info = Deconvolution1D(dim=n, phantom='square').get_components()
23+
24+
# %% 1) Bayesian problem
25+
x = GMRF(np.zeros(A.domain_dim), 100).rv
26+
y = Gaussian(A @ x, 1/10000).rv
27+
28+
# Combine into a Bayesian Problem and perform UQ
29+
BP = BayesianProblem(y, x)
30+
BP.set_data(y=y_obs)
31+
BP.UQ(exact=info.exactSolution)
32+
33+
# %% 2) Joint distribution with MALA
34+
target = JointDistribution(y, x)(y=y_obs)
35+
sampler = MALA(target, scale=0.0002, initial_point=np.zeros(n))
36+
sampler.sample(10000)
37+
samples = sampler.get_samples()
38+
plt.figure()
39+
samples.plot_ci(exact=info.exactSolution)
40+
41+
# %% 3) Hybrid Gibbs (NUTS + Conjugate)
42+
d = Gamma(1, 1e-4).rv
43+
x = GMRF(np.zeros(A.domain_dim), d).rv
44+
y = Gaussian(A @ x, 1/10000).rv
45+
46+
target = JointDistribution(y, x, d)(y=y_obs)
47+
48+
# Sampling strategy
49+
sampling_strategy = {
50+
"x" : NUTS(),
51+
"d" : Conjugate()
52+
}
53+
54+
# Gibbs sampler
55+
sampler = HybridGibbs(target, sampling_strategy)
56+
# Run sampler
57+
sampler.warmup(200)
58+
sampler.sample(1000)
59+
samples = sampler.get_samples()
60+
61+
# Plot
62+
plt.figure()
63+
samples["x"].plot_ci(exact=info.exactSolution)
64+
65+
# %% 4) Regularized GMRF
66+
x = NonnegativeGMRF(np.zeros(A.domain_dim), 50).rv
67+
y = Gaussian(A @ x, 1/10000).rv
68+
69+
target = JointDistribution(y, x)(y=y_obs)
70+
71+
# Regularized Linear RTO sampler
72+
sampler = RegularizedLinearRTO(target)
73+
# Run sampler
74+
sampler.sample(1000)
75+
samples = sampler.get_samples()
76+
77+
# Plot
78+
plt.figure()
79+
samples.plot_ci(exact=info.exactSolution)

demos/howtos/RandomVariablesAndAlgebra.py

Lines changed: 45 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -12,16 +12,16 @@
1212
#%%
1313
# Defining Random Variables
1414
# -------------------------
15-
# Random variables can be defined using the RandomVariable class. The RandomVariable
16-
# class requires a distribution object to be passed as an argument. The distribution
17-
# object can be any distribution object from the `cuqi.distribution` module.
15+
16+
# Random variables can be defined by either initialising the RandomVariable class
17+
# with a distribution object or by retrieving the `rv` attribute of a distribution.
18+
# The distribution object can be any distribution from the `cuqi.distribution` module.
1819

1920
from cuqi.distribution import Normal
2021
from cuqi.experimental.algebra import RandomVariable
2122

22-
2323
x = RandomVariable(Normal(0, 1))
24-
y = RandomVariable(Normal(0, 1))
24+
y = Normal(0, 1).rv
2525

2626
# %%
2727
# Recording Algebraic Operations
@@ -66,33 +66,59 @@
6666
from cuqi.distribution import Gaussian, Gamma, GMRF
6767
from cuqi.experimental.algebra import RandomVariable
6868
from cuqi.problem import BayesianProblem
69+
from cuqi.distribution import JointDistribution
70+
from cuqi.experimental.mcmc import HybridGibbs, LinearRTO, Conjugate, ConjugateApprox
6971
import numpy as np
72+
import matplotlib.pyplot as plt
7073

7174
# Forward model
7275
A, y_obs, info = Deconvolution1D().get_components()
7376

7477
# Bayesian Problem (defined using Random Variables)
75-
d = RandomVariable(Gamma(1, 1e-4))
76-
s = RandomVariable(Gamma(1, 1e-4))
77-
x = RandomVariable(GMRF(np.zeros(A.domain_dim), d))
78-
y = RandomVariable(Gaussian(A @ x, 1/s))
78+
d = Gamma(1, 1e-4).rv
79+
s = Gamma(1, 1e-4).rv
80+
x = GMRF(np.zeros(A.domain_dim), d).rv
81+
y = Gaussian(A @ x, 1/s).rv
7982

8083
# Combine into a Bayesian Problem and perform UQ
8184
BP = BayesianProblem(y, x, s, d)
8285
BP.set_data(y=y_obs)
8386
BP.UQ(exact={"x": info.exactSolution})
8487

88+
# Random variables can also be used to define JointDistribution. Here we solve the same
89+
# problem above by explictly forming a target distribution and then drawing samples with
90+
# the HybridGibbs sampler.
91+
target = JointDistribution(y, x, s, d)(y=y_obs)
92+
93+
# Sampling strategy
94+
sampling_strategy = {
95+
"x" : LinearRTO(),
96+
"s" : Conjugate(),
97+
"d" : Conjugate()
98+
}
99+
100+
# Gibbs sampler
101+
sampler = HybridGibbs(target, sampling_strategy)
102+
103+
# Run sampler
104+
sampler.warmup(200)
105+
sampler.sample(1000)
106+
samples = sampler.get_samples()
107+
108+
# Plot
109+
plt.figure()
110+
samples["x"].plot_ci(exact=info.exactSolution)
111+
85112
# %%
86113
# Conditioning on random variables (example 1)
87-
from cuqi.distribution import Gaussian
88-
from cuqi.experimental.algebra import RandomVariable
89-
90-
x = RandomVariable(Gaussian(0, lambda s: s))
91-
y = RandomVariable(Gaussian(0, lambda d: d))
114+
s = Gaussian(0, 1).rv
115+
x = Gaussian(0, s).rv
116+
y = Gaussian(0, lambda d: d).rv
92117

93118
z = x+y
94119

95-
z.condition(x=1)
120+
z.condition(s=1)
121+
z.condition(d=2)
96122

97123
# %%
98124
# Or conditioning on the variables s, or d
@@ -110,10 +136,10 @@
110136
A, y_obs, info = Deconvolution1D(dim=4).get_components()
111137

112138
# Bayesian Problem (defined using Random Variables)
113-
d = RandomVariable(Gamma(1, 1e-4))
114-
s = RandomVariable(Gamma(1, 1e-4))
115-
x = RandomVariable(GMRF(np.zeros(A.domain_dim), d))
116-
y = RandomVariable(Gaussian(A @ x, 1/s))
139+
d = Gamma(1, 1e-4).rv
140+
s = Gamma(1, 1e-4).rv
141+
x = GMRF(np.zeros(A.domain_dim), d).rv
142+
y = Gaussian(A @ x, 1/s).rv
117143

118144

119145
z = x+y

tests/zexperimental/test_mcmc.py

Lines changed: 137 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1255,6 +1255,142 @@ def test_UGLA_with_AffineModel_is_equivalent_to_LinearModel_and_shifted_data():
12551255
# Check that the samples are the same
12561256
assert np.allclose(samples_linear.samples, samples_affine.samples)
12571257

1258+
# ============ Test for sampling with RandomVariable prior against Distribution prior ============
1259+
samplers_for_rv_against_dist = [cuqi.experimental.mcmc.MALA,
1260+
cuqi.experimental.mcmc.ULA,
1261+
cuqi.experimental.mcmc.MH,
1262+
cuqi.experimental.mcmc.PCN,
1263+
cuqi.experimental.mcmc.CWMH,
1264+
cuqi.experimental.mcmc.NUTS,
1265+
cuqi.experimental.mcmc.LinearRTO]
1266+
1267+
@pytest.mark.parametrize("sampler", samplers_for_rv_against_dist)
1268+
def test_RandomVariable_prior_against_Distribution_prior(sampler: cuqi.experimental.mcmc.Sampler):
1269+
""" Test RandomVariable prior is equivalent to Distribution prior for
1270+
MALA, ULA, MH, PCN, CWMH, NUTS and LinearRTO.
1271+
"""
1272+
1273+
# Set dim
1274+
dim = 32
1275+
1276+
# Extract model and data
1277+
A, y_data, info = cuqi.testproblem.Deconvolution1D(dim=32, phantom='square').get_components()
1278+
1279+
# Set up RandomVariable prior and do posterior sampling
1280+
np.random.seed(0)
1281+
x_rv = cuqi.distribution.Gaussian(0.5*np.ones(dim), 0.1).rv
1282+
y_rv = cuqi.distribution.Gaussian(A@x_rv, 0.001).rv
1283+
joint_rv = cuqi.distribution.JointDistribution(x_rv, y_rv)(y_rv=y_data)
1284+
sampler_rv = sampler(joint_rv)
1285+
sampler_rv.sample(10)
1286+
samples_rv = sampler_rv.get_samples()
1287+
1288+
# Set up Distribution prior and do posterior sampling
1289+
np.random.seed(0)
1290+
x_dist = cuqi.distribution.Gaussian(0.5*np.ones(dim), 0.1)
1291+
y_dist = cuqi.distribution.Gaussian(A@x_dist, 0.001)
1292+
joint_dist = cuqi.distribution.JointDistribution(x_dist, y_dist)(y_dist=y_data)
1293+
sampler_dist = sampler(joint_dist)
1294+
sampler_dist.sample(10)
1295+
samples_dist = sampler_dist.get_samples()
1296+
1297+
assert np.allclose(samples_rv.samples, samples_dist.samples)
1298+
1299+
def test_RandomVariable_prior_against_Distribution_prior_regularized_RTO():
1300+
""" Test RandomVariable prior is equivalent to Distribution prior for
1301+
RegularizedLinearRTO.
1302+
"""
1303+
1304+
# Set dim
1305+
dim = 32
1306+
1307+
# Extract model and data
1308+
A, y_data, info = cuqi.testproblem.Deconvolution1D(dim=32, phantom='square').get_components()
1309+
1310+
# Set up RandomVariable prior and do posterior sampling
1311+
np.random.seed(0)
1312+
x_rv = cuqi.implicitprior.RegularizedGaussian(0.5*np.ones(dim), 0.1, constraint="nonnegativity").rv
1313+
y_rv = cuqi.distribution.Gaussian(A@x_rv, 0.001).rv
1314+
joint_rv = cuqi.distribution.JointDistribution(x_rv, y_rv)(y_rv=y_data)
1315+
sampler_rv = cuqi.experimental.mcmc.RegularizedLinearRTO(joint_rv)
1316+
sampler_rv.sample(10)
1317+
samples_rv = sampler_rv.get_samples()
1318+
1319+
# Set up Distribution prior and do posterior sampling
1320+
np.random.seed(0)
1321+
x_dist = cuqi.implicitprior.RegularizedGaussian(0.5*np.ones(dim), 0.1, constraint="nonnegativity")
1322+
y_dist = cuqi.distribution.Gaussian(A@x_dist, 0.001)
1323+
joint_dist = cuqi.distribution.JointDistribution(x_dist, y_dist)(y_dist=y_data)
1324+
sampler_dist = cuqi.experimental.mcmc.RegularizedLinearRTO(joint_dist)
1325+
sampler_dist.sample(10)
1326+
samples_dist = sampler_dist.get_samples()
1327+
1328+
assert np.allclose(samples_rv.samples, samples_dist.samples)
1329+
1330+
def test_RandomVariable_prior_against_Distribution_prior_UGLA_Conjugate_ConjugateApprox_HybridGibbs():
1331+
""" Test RandomVariable prior is equivalent to Distribution prior for
1332+
UGLA, Conjugate, ConjugateApprox and HybridGibbs samplers.
1333+
"""
1334+
1335+
# Forward problem
1336+
A, y_data, info = cuqi.testproblem.Deconvolution1D(dim=28, phantom='square', noise_std=0.001).get_components()
1337+
1338+
# Random seed
1339+
np.random.seed(0)
1340+
1341+
# Bayesian Inverse Problem
1342+
d = cuqi.distribution.Gamma(1, 1e-4)
1343+
s = cuqi.distribution.Gamma(1, 1e-4)
1344+
x = cuqi.distribution.LMRF(0, lambda d: 1/d, geometry=A.domain_geometry)
1345+
y = cuqi.distribution.Gaussian(A@x, lambda s: 1/s)
1346+
1347+
# Posterior
1348+
target = cuqi.distribution.JointDistribution(y, x, s, d)(y=y_data)
1349+
1350+
# Sampling strategy
1351+
sampling_strategy = {
1352+
"x" : cuqi.experimental.mcmc.UGLA(),
1353+
"s" : cuqi.experimental.mcmc.Conjugate(),
1354+
"d" : cuqi.experimental.mcmc.ConjugateApprox()
1355+
}
1356+
1357+
# Gibbs sampler
1358+
sampler = cuqi.experimental.mcmc.HybridGibbs(target, sampling_strategy)
1359+
1360+
# Run sampler
1361+
sampler.warmup(50)
1362+
sampler.sample(200)
1363+
samples = sampler.get_samples()
1364+
1365+
# Random seed
1366+
np.random.seed(0)
1367+
1368+
# Bayesian Inverse Problem
1369+
d_rv = cuqi.distribution.Gamma(1, 1e-4).rv
1370+
s_rv = cuqi.distribution.Gamma(1, 1e-4).rv
1371+
x_rv = cuqi.distribution.LMRF(0, lambda d_rv: 1/d_rv, geometry=A.domain_geometry).rv
1372+
y_rv = cuqi.distribution.Gaussian(A@x_rv, lambda s_rv: 1/s_rv).rv
1373+
1374+
# Posterior
1375+
target_rv = cuqi.distribution.JointDistribution(y_rv, x_rv, s_rv, d_rv)(y_rv=y_data)
1376+
1377+
# Sampling strategy
1378+
sampling_strategy_rv = {
1379+
"x_rv" : cuqi.experimental.mcmc.UGLA(),
1380+
"s_rv" : cuqi.experimental.mcmc.Conjugate(),
1381+
"d_rv" : cuqi.experimental.mcmc.ConjugateApprox()
1382+
}
1383+
1384+
# Gibbs sampler
1385+
sampler_rv = cuqi.experimental.mcmc.HybridGibbs(target_rv, sampling_strategy_rv)
1386+
1387+
# Run sampler
1388+
sampler_rv.warmup(50)
1389+
sampler_rv.sample(200)
1390+
samples_rv = sampler_rv.get_samples()
1391+
1392+
assert np.allclose(samples['x'].samples, samples_rv['x_rv'].samples)
1393+
12581394
def Conjugate_GaussianGammaPair():
12591395
""" Unit test whether Conjugacy Pair (Gaussian, Gamma) constructs the right distribution """
12601396
x = cuqi.distribution.Gamma(1.0, 2.0)
@@ -1373,4 +1509,4 @@ def test_RegularizedLinearRTO_ScipyLinearLSQ_option_invalid():
13731509
posterior = joint(y=y_data)
13741510

13751511
with pytest.raises(ValueError, match="ScipyLinearLSQ"):
1376-
sampler = cuqi.experimental.mcmc.RegularizedLinearRTO(posterior, solver = "ScipyLinearLSQ")
1512+
sampler = cuqi.experimental.mcmc.RegularizedLinearRTO(posterior, solver = "ScipyLinearLSQ")

tests/zexperimental/test_randomvariable.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -328,6 +328,21 @@ def test_RV_condition_maintains_parameter_name_order():
328328
assert z.condition(d=1).parameter_names == ['x', 'y']
329329
assert z.condition(d=1, s=1).parameter_names == ['x', 'y']
330330

331+
def test_equivalent_ways_to_create_RV_from_distribution():
332+
x = RandomVariable(cuqi.distribution.Gaussian(0, 1))
333+
y = cuqi.distribution.Gaussian(0, 1).rv
334+
335+
assert x.dim == y.dim
336+
assert x.distribution.mean == y.distribution.mean
337+
assert x.distribution.cov == y.distribution.cov
338+
339+
x = RandomVariable(cuqi.distribution.Gaussian(0, lambda s: s))
340+
y = cuqi.distribution.Gaussian(0, lambda s: s).rv
341+
342+
assert x.dim == y.dim
343+
assert x.distribution.mean == y.distribution.mean
344+
assert x.condition(s=1).distribution.cov == y.condition(s=1).distribution.cov
345+
331346
def test_RV_sampling_unable_if_conditioning_variables_from_lambda():
332347
x = RandomVariable(cuqi.distribution.Gaussian(0, lambda s: s))
333348

@@ -354,3 +369,4 @@ def test_RV_sample_against_distribution_sample():
354369
dist_samples = x.distribution.sample(10)
355370

356371
assert np.allclose(rv_samples.samples, dist_samples.samples)
372+

0 commit comments

Comments
 (0)