Skip to content

Commit 5a9c8cd

Browse files
committed
Remove unused MvNormalLogp Op
1 parent eb7c3b6 commit 5a9c8cd

File tree

3 files changed

+0
-130
lines changed

3 files changed

+0
-130
lines changed

pymc/distributions/dist_math.py

Lines changed: 0 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -28,21 +28,16 @@
2828
import scipy.linalg
2929
import scipy.stats
3030

31-
from pytensor.compile.builders import OpFromGraph
3231
from pytensor.graph.basic import Apply, Variable
3332
from pytensor.graph.op import Op
3433
from pytensor.scalar import UnaryScalarOp, upgrade_to_float_no_complex
3534
from pytensor.tensor import gammaln
3635
from pytensor.tensor.elemwise import Elemwise
37-
from pytensor.tensor.slinalg import Cholesky, SolveTriangular
3836

3937
from pymc.distributions.shape_utils import to_tuple
4038
from pymc.logprob.utils import CheckParameterValue
4139
from pymc.pytensorf import floatX
4240

43-
solve_lower = SolveTriangular(lower=True)
44-
solve_upper = SolveTriangular(lower=False)
45-
4641
f = floatX
4742
c = -0.5 * np.log(2.0 * np.pi)
4843
_beta_clip_values = {
@@ -242,69 +237,6 @@ def log_normal(x, mean, **kwargs):
242237
return f(c) - pt.log(pt.abs(std)) - (x - mean) ** 2 / (2.0 * std**2)
243238

244239

245-
def MvNormalLogp():
246-
"""Compute the log pdf of a multivariate normal distribution.
247-
248-
This should be used in MvNormal.logp once Theano#5908 is released.
249-
250-
Parameters
251-
----------
252-
cov: pt.matrix
253-
The covariance matrix.
254-
delta: pt.matrix
255-
Array of deviations from the mean.
256-
"""
257-
cov = pt.matrix("cov")
258-
cov.tag.test_value = floatX(np.eye(3))
259-
delta = pt.matrix("delta")
260-
delta.tag.test_value = floatX(np.zeros((2, 3)))
261-
262-
cholesky = Cholesky(lower=True, on_error="nan")
263-
264-
n, k = delta.shape
265-
n, k = f(n), f(k)
266-
chol_cov = cholesky(cov)
267-
diag = pt.diag(chol_cov)
268-
ok = pt.all(diag > 0)
269-
270-
chol_cov = pt.switch(ok, chol_cov, pt.fill(chol_cov, 1))
271-
delta_trans = solve_lower(chol_cov, delta.T).T
272-
273-
result = n * k * pt.log(f(2) * np.pi)
274-
result += f(2) * n * pt.sum(pt.log(diag))
275-
result += (delta_trans ** f(2)).sum()
276-
result = f(-0.5) * result
277-
logp = pt.switch(ok, result, -np.inf)
278-
279-
def dlogp(inputs, gradients):
280-
(g_logp,) = gradients
281-
cov, delta = inputs
282-
283-
g_logp.tag.test_value = floatX(1.0)
284-
n, k = delta.shape
285-
286-
chol_cov = cholesky(cov)
287-
diag = pt.diag(chol_cov)
288-
ok = pt.all(diag > 0)
289-
290-
chol_cov = pt.switch(ok, chol_cov, pt.fill(chol_cov, 1))
291-
delta_trans = solve_lower(chol_cov, delta.T).T
292-
293-
inner = n * pt.eye(k) - pt.dot(delta_trans.T, delta_trans)
294-
g_cov = solve_upper(chol_cov.T, inner)
295-
g_cov = solve_upper(chol_cov.T, g_cov.T)
296-
297-
tau_delta = solve_upper(chol_cov.T, delta_trans.T)
298-
g_delta = tau_delta.T
299-
300-
g_cov = pt.switch(ok, g_cov, -np.nan)
301-
g_delta = pt.switch(ok, g_delta, -np.nan)
302-
303-
return [-0.5 * g_cov * g_logp, -g_delta * g_logp]
304-
305-
return OpFromGraph([cov, delta], [logp], grad_overrides=dlogp, inline=True)
306-
307-
308240
class SplineWrapper(Op):
309241
"""
310242
Creates an PyTensor operation from scipy.interpolate.UnivariateSpline

pymc/distributions/multivariate.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,8 +155,6 @@ def quaddist_parse(value, mu, cov, mat_type="cov"):
155155
onedim = False
156156

157157
delta = value - mu
158-
# Use this when Theano#5908 is released.
159-
# return MvNormalLogp()(self.cov, delta)
160158
chol_cov = cholesky(cov)
161159
if mat_type != "tau":
162160
dist, logdet, ok = quaddist_chol(delta, chol_cov)

tests/distributions/test_dist_math.py

Lines changed: 0 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626

2727
from pymc.distributions import Discrete
2828
from pymc.distributions.dist_math import (
29-
MvNormalLogp,
3029
SplineWrapper,
3130
check_parameters,
3231
clipped_beta_rvs,
@@ -123,65 +122,6 @@ def test_multinomial_check_parameters():
123122
)
124123

125124

126-
class TestMvNormalLogp:
127-
def test_logp(self):
128-
np.random.seed(42)
129-
130-
chol_val = floatX(np.array([[1, 0.9], [0, 2]]))
131-
cov_val = floatX(np.dot(chol_val, chol_val.T))
132-
cov = pt.matrix("cov")
133-
cov.tag.test_value = cov_val
134-
delta_val = floatX(np.random.randn(5, 2))
135-
delta = pt.matrix("delta")
136-
delta.tag.test_value = delta_val
137-
expect = stats.multivariate_normal(mean=np.zeros(2), cov=cov_val)
138-
expect = expect.logpdf(delta_val).sum()
139-
logp = MvNormalLogp()(cov, delta)
140-
logp_f = pytensor.function([cov, delta], logp)
141-
logp = logp_f(cov_val, delta_val)
142-
npt.assert_allclose(logp, expect)
143-
144-
@pytensor.config.change_flags(compute_test_value="ignore")
145-
def test_grad(self):
146-
np.random.seed(42)
147-
148-
def func(chol_vec, delta):
149-
chol = pt.stack(
150-
[
151-
pt.stack([pt.exp(0.1 * chol_vec[0]), 0]),
152-
pt.stack([chol_vec[1], 2 * pt.exp(chol_vec[2])]),
153-
]
154-
)
155-
cov = pt.dot(chol, chol.T)
156-
return MvNormalLogp()(cov, delta)
157-
158-
chol_vec_val = floatX(np.array([0.5, 1.0, -0.1]))
159-
160-
delta_val = floatX(np.random.randn(1, 2))
161-
verify_grad(func, [chol_vec_val, delta_val])
162-
163-
delta_val = floatX(np.random.randn(5, 2))
164-
verify_grad(func, [chol_vec_val, delta_val])
165-
166-
@pytensor.config.change_flags(compute_test_value="ignore")
167-
def test_hessian(self):
168-
chol_vec = pt.vector("chol_vec")
169-
chol_vec.tag.test_value = floatX(np.array([0.1, 2, 3]))
170-
chol = pt.stack(
171-
[
172-
pt.stack([pt.exp(0.1 * chol_vec[0]), 0]),
173-
pt.stack([chol_vec[1], 2 * pt.exp(chol_vec[2])]),
174-
]
175-
)
176-
cov = pt.dot(chol, chol.T)
177-
delta = pt.matrix("delta")
178-
delta.tag.test_value = floatX(np.ones((5, 2)))
179-
logp = MvNormalLogp()(cov, delta)
180-
g_cov, g_delta = pt.grad(logp, [cov, delta])
181-
# TODO: What's the test? Something needs to be asserted.
182-
pt.grad(g_delta.sum() + g_cov.sum(), [delta, cov])
183-
184-
185125
class TestSplineWrapper:
186126
@pytensor.config.change_flags(compute_test_value="ignore")
187127
def test_grad(self):

0 commit comments

Comments
 (0)