Closed
Description
Description of your problem
Please provide a minimal, self-contained, and reproducible example.
with pymc3.Model() as pmodel1:
grw1 = pymc3.GaussianRandomWalk('grw1', mu=numpy.arange(4), shape=(3,4,))
grw1.random(size = None).shape
When shape
is introduced in the form of (3,4,),
I am getting an error message
ValueError: Input dimension mis-match. (input[0].shape[1] = 4, input[1].shape[1] = 3)
for the line
grw1 = pymc3.GaussianRandomWalk('grw1', mu=numpy.arange(4), shape=(3,4,))
Please provide the full traceback.
ValueError Traceback (most recent call last)
<ipython-input-24-177b01fab8f8> in <module>
1 with pymc3.Model() as pmodel1:
----> 2 grw1 = pymc3.GaussianRandomWalk('grw1', mu=pymc3.Normal('mu',numpy.arange(4),1e-4,shape = 4), shape=(3,4,))
3
4 grw1.random(size = None).shape
/mnt/c/Users/RISHAV/pymc3/pymc3/distributions/distribution.py in __new__(cls, name, *args, **kwargs)
81 else:
82 dist = cls.dist(*args, **kwargs)
---> 83 return model.Var(name, dist, data, total_size, dims=dims)
84
85 def __getnewargs__(self):
/mnt/c/Users/RISHAV/pymc3/pymc3/model.py in Var(self, name, dist, data, total_size, dims)
1070 with self:
1071 var = FreeRV(
-> 1072 name=name, distribution=dist, total_size=total_size, model=self
1073 )
1074 self.free_RVs.append(var)
/mnt/c/Users/RISHAV/pymc3/pymc3/model.py in __init__(self, type, owner, index, name, distribution, total_size, model)
1591 np.ones(distribution.shape, distribution.dtype) * distribution.default()
1592 )
-> 1593 self.logp_elemwiset = distribution.logp(self)
1594 # The logp might need scaling in minibatches.
1595 # This is done in `Factor`.
/mnt/c/Users/RISHAV/pymc3/pymc3/distributions/timeseries.py in logp(self, x)
272 x_i = x[1:]
273 mu, sigma = self._mu_and_sigma(self.mu, self.sigma)
--> 274 innov_like = Normal.dist(mu=x_im1 + mu, sigma=sigma).logp(x_i)
275 return self.init.logp(x[0]) + tt.sum(innov_like)
276 return self.init.logp(x)
/mnt/c/Users/RISHAV/pymc3/myvenv/lib/python3.6/site-packages/theano/tensor/var.py in __add__(self, other)
126 def __add__(self, other):
127 try:
--> 128 return theano.tensor.basic.add(self, other)
129 # We should catch the minimum number of exception here.
130 # Otherwise this will convert error when Theano flags
/mnt/c/Users/RISHAV/pymc3/myvenv/lib/python3.6/site-packages/theano/gof/op.py in __call__(self, *inputs, **kwargs)
672 thunk.outputs = [storage_map[v] for v in node.outputs]
673
--> 674 required = thunk()
675 assert not required # We provided all inputs
676
/mnt/c/Users/RISHAV/pymc3/myvenv/lib/python3.6/site-packages/theano/gof/op.py in rval()
860
861 def rval():
--> 862 thunk()
863 for o in node.outputs:
864 compute_map[o][0] = True
/mnt/c/Users/RISHAV/pymc3/myvenv/lib/python3.6/site-packages/theano/gof/cc.py in __call__(self)
1737 print(self.error_storage, file=sys.stderr)
1738 raise
-> 1739 reraise(exc_type, exc_value, exc_trace)
1740
1741
/mnt/c/Users/RISHAV/pymc3/myvenv/lib/python3.6/site-packages/six.py in reraise(tp, value, tb)
701 if value.__traceback__ is not tb:
702 raise value.with_traceback(tb)
--> 703 raise value
704 finally:
705 value = None
ValueError: Input dimension mis-match. (input[0].shape[1] = 4, input[1].shape[1] = 3)
Please provide any additional information below.
As discussed in #3985 , It looks like logp
made some incorrect assumptions on the shape of mu
and sigma