diff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py index bc6a5da9094..aa5b2119cfa 100644 --- a/pymc/distributions/multivariate.py +++ b/pymc/distributions/multivariate.py @@ -238,7 +238,7 @@ class MvNormal(Continuous): rv_op = multivariate_normal @classmethod - def dist(cls, mu, cov=None, tau=None, chol=None, lower=True, **kwargs): + def dist(cls, mu=0, cov=None, *, tau=None, chol=None, lower=True, **kwargs): mu = pt.as_tensor_variable(mu) cov = quaddist_matrix(cov, chol, tau, lower) # PyTensor is stricter about the shape of mu, than PyMC used to be @@ -358,7 +358,7 @@ class MvStudentT(Continuous): rv_op = mv_studentt @classmethod - def dist(cls, nu, *, Sigma=None, mu, scale=None, tau=None, chol=None, lower=True, **kwargs): + def dist(cls, nu, *, Sigma=None, mu=0, scale=None, tau=None, chol=None, lower=True, **kwargs): cov = kwargs.pop("cov", None) if cov is not None: warnings.warn( diff --git a/tests/distributions/test_multivariate.py b/tests/distributions/test_multivariate.py index 4fb69e92e34..3013cd15331 100644 --- a/tests/distributions/test_multivariate.py +++ b/tests/distributions/test_multivariate.py @@ -2300,7 +2300,11 @@ def test_mvnormal_no_cholesky_in_model_logp(): def test_mvnormal_mu_convenience(): - """Test that mu is broadcasted to the length of cov""" + """Test that mu is broadcasted to the length of cov and provided a default of zero""" + x = pm.MvNormal.dist(cov=np.eye(3)) + mu = x.owner.inputs[3] + np.testing.assert_allclose(mu.eval(), np.zeros((3,))) + x = pm.MvNormal.dist(mu=1, cov=np.eye(3)) mu = x.owner.inputs[3] np.testing.assert_allclose(mu.eval(), np.ones((3,))) @@ -2325,7 +2329,11 @@ def test_mvnormal_mu_convenience(): def test_mvstudentt_mu_convenience(): - """Test that mu is broadcasted to the length of scale""" + """Test that mu is broadcasted to the length of scale and provided a default of zero""" + x = pm.MvStudentT.dist(nu=4, scale=np.eye(3)) + mu = x.owner.inputs[4] + np.testing.assert_allclose(mu.eval(), np.zeros((3,))) + x = pm.MvStudentT.dist(nu=4, mu=1, scale=np.eye(3)) mu = x.owner.inputs[4] np.testing.assert_allclose(mu.eval(), np.ones((3,)))