diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 493673d3387..95d0b8fe488 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -9,6 +9,10 @@ ### New Features - The `CAR` distribution has been added to allow for use of conditional autoregressions which often are used in spatial and network models. +- The dimensionality of model variables can now be parametrized through either of `shape`, `dims` or `size` (see [#4696](https://github.com/pymc-devs/pymc3/pull/4696)): + - With `shape` the length of dimensions must be given numerically or as scalar Aesara `Variables`. Using `shape` restricts the model variable to the exact length and re-sizing is no longer possible. + - `dims` keeps model variables re-sizeable (for example through `pm.Data`) and leads to well defined coordinates in `InferenceData` objects. An `Ellipsis` (`...`) in the last position of `dims` can be used as short-hand notation for implied dimensions. + - The `size` kwarg behaves like it does in Aesara/NumPy. For univariate RVs it is the same as `shape`, but for multivariate RVs it depends on how the RV implements broadcasting to dimensionality greater than `RVOp.ndim_supp`. - ... ### Maintenance diff --git a/pymc3/distributions/distribution.py b/pymc3/distributions/distribution.py index 892599e325d..c9a8777c992 100644 --- a/pymc3/distributions/distribution.py +++ b/pymc3/distributions/distribution.py @@ -20,21 +20,19 @@ from abc import ABCMeta from copy import copy -from typing import TYPE_CHECKING +from typing import Any, Optional, Sequence, Tuple, Union +import aesara +import aesara.tensor as at import dill +from aesara.graph.basic import Variable from aesara.tensor.random.op import RandomVariable +from aesara.tensor.shape import SpecifyShape, specify_shape +from pymc3.aesaraf import change_rv_size, pandas_to_array from pymc3.distributions import _logcdf, _logp - -if TYPE_CHECKING: - from typing import Optional, Callable - -import aesara -import aesara.graph.basic -import aesara.tensor as at - +from pymc3.exceptions import ShapeError, ShapeWarning from pymc3.util import UNSET, get_repr_for_variable from pymc3.vartypes import string_types @@ -52,6 +50,10 @@ PLATFORM = sys.platform +Shape = Union[int, Sequence[Union[str, type(Ellipsis)]], Variable] +Dims = Union[str, Sequence[Union[str, None, type(Ellipsis)]]] +Size = Union[int, Tuple[int, ...]] + class _Unpickling: pass @@ -115,13 +117,111 @@ def logcdf(op, var, rvs_to_values, *dist_params, **kwargs): return new_cls +def _valid_ellipsis_position(items: Union[None, Shape, Dims, Size]) -> bool: + if items is not None and not isinstance(items, Variable) and Ellipsis in items: + if any(i == Ellipsis for i in items[:-1]): + return False + return True + + +def _validate_shape_dims_size( + shape: Any = None, dims: Any = None, size: Any = None +) -> Tuple[Optional[Shape], Optional[Dims], Optional[Size]]: + # Raise on unsupported parametrization + if shape is not None and dims is not None: + raise ValueError(f"Passing both `shape` ({shape}) and `dims` ({dims}) is not supported!") + if dims is not None and size is not None: + raise ValueError(f"Passing both `dims` ({dims}) and `size` ({size}) is not supported!") + if shape is not None and size is not None: + raise ValueError(f"Passing both `shape` ({shape}) and `size` ({size}) is not supported!") + + # Raise on invalid types + if not isinstance(shape, (type(None), int, list, tuple, Variable)): + raise ValueError("The `shape` parameter must be an int, list or tuple.") + if not isinstance(dims, (type(None), str, list, tuple)): + raise ValueError("The `dims` parameter must be a str, list or tuple.") + if not isinstance(size, (type(None), int, list, tuple)): + raise ValueError("The `size` parameter must be an int, list or tuple.") + + # Auto-convert non-tupled parameters + if isinstance(shape, int): + shape = (shape,) + if isinstance(dims, str): + dims = (dims,) + if isinstance(size, int): + size = (size,) + + # Convert to actual tuples + if not isinstance(shape, (type(None), tuple, Variable)): + shape = tuple(shape) + if not isinstance(dims, (type(None), tuple)): + dims = tuple(dims) + if not isinstance(size, (type(None), tuple)): + size = tuple(size) + + if not _valid_ellipsis_position(shape): + raise ValueError( + f"Ellipsis in `shape` may only appear in the last position. Actual: {shape}" + ) + if not _valid_ellipsis_position(dims): + raise ValueError(f"Ellipsis in `dims` may only appear in the last position. Actual: {dims}") + if size is not None and Ellipsis in size: + raise ValueError(f"The `size` parameter cannot contain an Ellipsis. Actual: {size}") + return shape, dims, size + + class Distribution(metaclass=DistributionMeta): """Statistical distribution""" rv_class = None rv_op = None - def __new__(cls, name, *args, **kwargs): + def __new__( + cls, + name: str, + *args, + rng=None, + dims: Optional[Dims] = None, + testval=None, + observed=None, + total_size=None, + transform=UNSET, + **kwargs, + ) -> RandomVariable: + """Adds a RandomVariable corresponding to a PyMC3 distribution to the current model. + + Note that all remaining kwargs must be compatible with ``.dist()`` + + Parameters + ---------- + cls : type + A PyMC3 distribution. + name : str + Name for the new model variable. + rng : optional + Random number generator to use with the RandomVariable. + dims : tuple, optional + A tuple of dimension names known to the model. + testval : optional + Test value to be attached to the output RV. + Must match its shape exactly. + observed : optional + Observed data to be passed when registering the random variable in the model. + See ``Model.register_rv``. + total_size : float, optional + See ``Model.register_rv``. + transform : optional + See ``Model.register_rv``. + **kwargs + Keyword arguments that will be forwarded to ``.dist()``. + Most prominently: ``shape`` and ``size`` + + Returns + ------- + rv : RandomVariable + The created RV, registered in the Model. + """ + try: from pymc3.model import Model @@ -134,40 +234,165 @@ def __new__(cls, name, *args, **kwargs): "for a standalone distribution." ) - rng = kwargs.pop("rng", None) - - if rng is None: - rng = model.default_rng - if not isinstance(name, string_types): raise TypeError(f"Name needs to be a string but got: {name}") - data = kwargs.pop("observed", None) + if rng is None: + rng = model.default_rng - total_size = kwargs.pop("total_size", None) + _, dims, _ = _validate_shape_dims_size(dims=dims) + resize = None + + # Create the RV without specifying testval, because the testval may have a shape + # that only matches after replicating with a size implied by dims (see below). + rv_out = cls.dist(*args, rng=rng, testval=None, **kwargs) + n_implied = rv_out.ndim + + # The `.dist()` can wrap automatically with a SpecifyShape Op which brings informative + # error messages earlier in model construction. + # Here, however, the underyling RV must be used - a new SpecifyShape Op can be added at the end. + assert_shape = None + if isinstance(rv_out.owner.op, SpecifyShape): + rv_out, assert_shape = rv_out.owner.inputs + + # `dims` are only available with this API, because `.dist()` can be used + # without a modelcontext and dims are not tracked at the Aesara level. + if dims is not None: + if Ellipsis in dims: + # Auto-complete the dims tuple to the full length + dims = (*dims[:-1], *[None] * rv_out.ndim) + + n_resize = len(dims) - n_implied + + # All resize dims must be known already (numerically or symbolically). + unknown_resize_dims = set(dims[:n_resize]) - set(model.dim_lengths) + if unknown_resize_dims: + raise KeyError( + f"Dimensions {unknown_resize_dims} are unknown to the model and cannot be used to specify a `size`." + ) - dims = kwargs.pop("dims", None) + # The numeric/symbolic resize tuple can be created using model.RV_dim_lengths + resize = tuple(model.dim_lengths[dname] for dname in dims[:n_resize]) + elif observed is not None: + if not hasattr(observed, "shape"): + observed = pandas_to_array(observed) + n_resize = observed.ndim - n_implied + resize = tuple(observed.shape[d] for d in range(n_resize)) + + if resize: + # A batch size was specified through `dims`, or implied by `observed`. + rv_out = change_rv_size(rv_var=rv_out, new_size=resize, expand=True) + + if dims is not None: + # Now that we have a handle on the output RV, we can register named implied dimensions that + # were not yet known to the model, such that they can be used for size further downstream. + for di, dname in enumerate(dims[n_resize:]): + if not dname in model.dim_lengths: + model.add_coord(dname, values=None, length=rv_out.shape[n_resize + di]) - if "shape" in kwargs: - raise DeprecationWarning("The `shape` keyword is deprecated; use `size`.") + if testval is not None: + # Assigning the testval earlier causes trouble because the RV may not be created with the final shape already. + rv_out.tag.test_value = testval - transform = kwargs.pop("transform", UNSET) + rv_registered = model.register_rv( + rv_out, name, observed, total_size, dims=dims, transform=transform + ) - rv_out = cls.dist(*args, rng=rng, **kwargs) + # Wrapping in specify_shape now does not break transforms: + if assert_shape is not None: + rv_registered = specify_shape(rv_registered, assert_shape) - return model.register_rv(rv_out, name, data, total_size, dims=dims, transform=transform) + return rv_registered @classmethod - def dist(cls, dist_params, **kwargs): + def dist( + cls, + dist_params, + *, + shape: Optional[Shape] = None, + size: Optional[Size] = None, + testval=None, + **kwargs, + ) -> RandomVariable: + """Creates a RandomVariable corresponding to the `cls` distribution. - testval = kwargs.pop("testval", None) + Parameters + ---------- + dist_params + shape : tuple, optional + A tuple of sizes for each dimension of the new RV. + size : int, tuple, Variable, optional + A scalar or tuple for replicating the RV in addition + to its implied shape/dimensionality. + testval : optional + Test value to be attached to the output RV. + Must match its shape exactly. - rv_var = cls.rv_op(*dist_params, **kwargs) + Returns + ------- + rv : RandomVariable + The created RV. + """ + if "dims" in kwargs: + raise NotImplementedError("The use of a `.dist(dims=...)` API is not supported.") + + shape, _, size = _validate_shape_dims_size(shape=shape, size=size) + ndim_supp = cls.rv_op.ndim_supp + + if shape is not None: + ndim_expected = len(tuple(shape)) + batch_shape = tuple(shape)[: ndim_expected - ndim_supp] + elif size is not None: + ndim_expected = ndim_supp + len(tuple(size)) + batch_shape = size + else: + ndim_expected = None + batch_shape = None + + # Create the RV with a `size` right away. + # This is not necessarily the final result. + rv_intermediate = cls.rv_op(*dist_params, size=batch_shape, **kwargs) + ndim_actual = rv_intermediate.ndim + + if shape is not None: + ndim_batch = len(tuple(shape)) - ndim_supp + elif size is not None: + ndim_batch = len(tuple(size)) + else: + ndim_batch = ndim_actual - ndim_supp + needs_resize = shape is not None and size is None and ndim_actual != ndim_expected + + # This is rare, but happens, for example, with MvNormal(np.ones((2, 3)), np.eye(3), shape=(2, 3)). + if needs_resize: + # There are shape dimensions that go beyond what's implied by the RV parameters. + # Recreate the RV without passing `size`, this time creating batch dimensions with `change_rv_size`. + rv_out = change_rv_size( + rv_var=cls.rv_op(*dist_params, size=None, **kwargs), + new_size=shape[:-ndim_batch] if ndim_batch > 0 else (), + expand=True, + ) + if not rv_out.ndim == ndim_expected: + raise ShapeError( + f"Resized RV does not have the expected dimensionality. ", + f"This indicates a severe problem. Please open an issue.", + actual=rv_out.ndim, + expected=ndim_batch + ndim_supp, + ) + else: + rv_out = rv_intermediate + + # Warn about the edge cases where the RV Op creates more dimensions than + # it should based on `size` and `RVOp.ndim_supp`. + if size is not None and ndim_actual != ndim_expected: + warnings.warn( + f"You may have expected a ({len(tuple(size))}+{ndim_supp})-dimensional RV, but the resulting RV will be {ndim_actual}-dimensional.", + ShapeWarning, + ) if testval is not None: - rv_var.tag.test_value = testval + rv_out.tag.test_value = testval - return rv_var + return rv_out def _distr_parameters_for_repr(self): """Return the names of the parameters for this distribution (e.g. "mu" diff --git a/pymc3/model.py b/pymc3/model.py index 9b741b1dd81..77c3e188999 100644 --- a/pymc3/model.py +++ b/pymc3/model.py @@ -47,7 +47,6 @@ from pandas import Series from pymc3.aesaraf import ( - change_rv_size, gradient, hessian, inputvars, @@ -1020,14 +1019,18 @@ def set_data( New values for the shared variable. coords : optional, dict New coordinate values for dimensions of the shared variable. - Must be provided for all named dimensions that change in length. + Must be provided for all named dimensions that change in length + and already have coordinate values. """ shared_object = self[name] if not isinstance(shared_object, SharedVariable): raise TypeError( - f"The variable `{name}` must be defined as `pymc3.Data` inside the model to allow updating. " + f"The variable `{name}` must be a `SharedVariable` (e.g. `pymc3.Data`) allow updating. " f"The current type is: {type(shared_object)}" ) + + if isinstance(values, list): + values = np.array(values) values = pandas_to_array(values) dims = self.RV_dims.get(name, None) or () coords = coords or {} @@ -1049,10 +1052,11 @@ def set_data( # Reject resizing if we already know that it would create shape problems. # NOTE: If there are multiple pm.Data containers sharing this dim, but the user only # changes the values for one of them, they will run into shape problems nonetheless. - if not isinstance(length_tensor, ScalarSharedVariable) and length_changed: + length_belongs_to = length_tensor.owner.inputs[0].owner.inputs[0] + if not isinstance(length_belongs_to, SharedVariable) and length_changed: raise ShapeError( - f"Resizing dimension {dname} with values of length {new_length} would lead to incompatibilities, " - f"because the dimension was not initialized from a shared variable. " + f"Resizing dimension '{dname}' with values of length {new_length} would lead to incompatibilities, " + f"because the dimension was initialized from '{length_belongs_to}' which is not a shared variable. " f"Check if the dimension was defined implicitly before the shared variable '{name}' was created, " f"for example by a model variable.", actual=new_length, @@ -1115,7 +1119,10 @@ def register_rv(self, rv_var, name, data=None, total_size=None, dims=None, trans and not isinstance(data, (GenTensorVariable, Minibatch)) and data.owner is not None ): - raise TypeError("Observed data cannot consist of symbolic variables.") + raise TypeError( + "Variables that depend on other nodes cannot be used for observed data." + f"The data variable was: {data}" + ) data = pandas_to_array(data) @@ -1135,6 +1142,7 @@ def make_obs_var( ========== rv_var The random variable that is observed. + Its dimensionality must be compatible with the data already. data The observed data. dims: tuple @@ -1146,21 +1154,10 @@ def make_obs_var( name = rv_var.name data = pandas_to_array(data).astype(rv_var.dtype) - # The shapes of the observed random variable and its data might not - # match. We need need to update the observed random variable's `size` - # (i.e. number of samples) so that it matches the data. - - # Setting `size` produces a random variable with shape `size + - # support_shape`, where `len(support_shape) == op.ndim_supp`, we need - # to disregard the last `op.ndim_supp`-many dimensions when we - # determine the appropriate `size` value from `data.shape`. - ndim_supp = rv_var.owner.op.ndim_supp - if ndim_supp > 0: - new_size = data.shape[:-ndim_supp] - else: - new_size = data.shape - - rv_var = change_rv_size(rv_var, new_size) + if data.ndim != rv_var.ndim: + raise ShapeError( + "Dimensionality of data and RV don't match.", actual=data.ndim, expected=rv_var.ndim + ) if aesara.config.compute_test_value != "off": test_value = getattr(rv_var.tag, "test_value", None) diff --git a/pymc3/tests/test_data_container.py b/pymc3/tests/test_data_container.py index 4aa20eedb8b..93acae3057c 100644 --- a/pymc3/tests/test_data_container.py +++ b/pymc3/tests/test_data_container.py @@ -18,10 +18,13 @@ from aesara import shared from aesara.tensor.sharedvar import ScalarSharedVariable +from aesara.tensor.var import TensorVariable import pymc3 as pm +from pymc3.aesaraf import floatX from pymc3.distributions import logpt +from pymc3.exceptions import ShapeError from pymc3.tests.helpers import SeededTest @@ -235,7 +238,7 @@ def test_set_data_to_non_data_container_variables(self): ) with pytest.raises(TypeError) as error: pm.set_data({"beta": [1.1, 2.2, 3.3]}, model=model) - error.match("defined as `pymc3.Data` inside the model") + error.match("The variable `beta` must be a `SharedVariable`") @pytest.mark.xfail(reason="Depends on ModelGraph") def test_model_to_graphviz_for_model_with_data_container(self): @@ -301,6 +304,38 @@ def test_explicit_coords(self): assert isinstance(pmodel.dim_lengths["columns"], ScalarSharedVariable) assert pmodel.dim_lengths["columns"].eval() == 7 + def test_symbolic_coords(self): + """ + In v4 dimensions can be created without passing coordinate values. + Their lengths are then automatically linked to the corresponding Tensor dimension. + """ + with pm.Model() as pmodel: + intensity = pm.Data("intensity", np.ones((2, 3)), dims=("row", "column")) + assert "row" in pmodel.dim_lengths + assert "column" in pmodel.dim_lengths + assert isinstance(pmodel.dim_lengths["row"], TensorVariable) + assert isinstance(pmodel.dim_lengths["column"], TensorVariable) + assert pmodel.dim_lengths["row"].eval() == 2 + assert pmodel.dim_lengths["column"].eval() == 3 + + intensity.set_value(floatX(np.ones((4, 5)))) + assert pmodel.dim_lengths["row"].eval() == 4 + assert pmodel.dim_lengths["column"].eval() == 5 + + def test_no_resize_of_implied_dimensions(self): + with pm.Model() as pmodel: + # Imply a dimension through RV params + pm.Normal("n", mu=[1, 2, 3], dims="city") + # _Use_ the dimension for a data variable + inhabitants = pm.Data("inhabitants", [100, 200, 300], dims="city") + + # Attempting to re-size the dimension through the data variable would + # cause shape problems in InferenceData conversion, because the RV remains (3,). + with pytest.raises( + ShapeError, match="was initialized from 'n' which is not a shared variable" + ): + pmodel.set_data("inhabitants", [1, 2, 3, 4]) + def test_implicit_coords_series(self): ser_sales = pd.Series( data=np.random.randint(low=0, high=30, size=22), diff --git a/pymc3/tests/test_shape_handling.py b/pymc3/tests/test_shape_handling.py index c7524a1fae7..53cdc3fea39 100644 --- a/pymc3/tests/test_shape_handling.py +++ b/pymc3/tests/test_shape_handling.py @@ -13,6 +13,7 @@ # limitations under the License. import sys +import aesara import numpy as np import pytest @@ -20,6 +21,7 @@ import pymc3 as pm +from pymc3.distributions.distribution import _validate_shape_dims_size from pymc3.distributions.shape_utils import ( broadcast_dist_samples_shape, broadcast_dist_samples_to, @@ -28,6 +30,7 @@ shapes_broadcasting, to_tuple, ) +from pymc3.exceptions import ShapeWarning test_shapes = [ (tuple(), (1,), (4,), (5, 4)), @@ -221,6 +224,119 @@ def test_sample_generate_values(fixture_model, fixture_sizes): for rv in RVs: assert prior[rv.name].shape == size + tuple(rv.distribution.shape) + +class TestShapeDimsSize: + @pytest.mark.parametrize("param_shape", [(), (3,)]) + @pytest.mark.parametrize("batch_shape", [(), (3,)]) + @pytest.mark.parametrize( + "parametrization", + [ + "implicit", + "shape", + # "shape...", + "dims", + "dims...", + "size", + ], + ) + def test_param_and_batch_shape_combos( + self, param_shape: tuple, batch_shape: tuple, parametrization: str + ): + coords = {} + param_dims = [] + batch_dims = [] + + # Create coordinates corresponding to the parameter shape + for d in param_shape: + dname = f"param_dim_{d}" + coords[dname] = [f"c_{i}" for i in range(d)] + param_dims.append(dname) + assert len(param_dims) == len(param_shape) + # Create coordinates corresponding to the batch shape + for d in batch_shape: + dname = f"batch_dim_{d}" + coords[dname] = [f"c_{i}" for i in range(d)] + batch_dims.append(dname) + assert len(batch_dims) == len(batch_shape) + + with pm.Model(coords=coords) as pmodel: + mu = aesara.shared(np.random.normal(size=param_shape)) + + with pytest.warns(None): + if parametrization == "implicit": + rv = pm.Normal("rv", mu=mu).shape == param_shape + else: + expected_shape = batch_shape + param_shape + if parametrization == "shape": + rv = pm.Normal("rv", mu=mu, shape=batch_shape + param_shape) + assert rv.eval().shape == expected_shape + # elif parametrization == "shape...": + # rv = pm.Normal("rv", mu=mu, shape=(*batch_shape, ...)) + # assert rv.eval().shape == batch_shape + param_shape + elif parametrization == "dims": + rv = pm.Normal("rv", mu=mu, dims=batch_dims + param_dims) + assert rv.eval().shape == expected_shape + elif parametrization == "dims...": + rv = pm.Normal("rv", mu=mu, dims=(*batch_dims, ...)) + n_size = len(batch_shape) + n_implied = len(param_shape) + ndim = n_size + n_implied + assert len(pmodel.RV_dims["rv"]) == ndim, pmodel.RV_dims + assert len(pmodel.RV_dims["rv"][:n_size]) == len(batch_dims) + assert len(pmodel.RV_dims["rv"][n_size:]) == len(param_dims) + if n_implied > 0: + assert pmodel.RV_dims["rv"][-1] is None + elif parametrization == "size": + rv = pm.Normal("rv", mu=mu, size=batch_shape + param_shape) + assert rv.eval().shape == expected_shape + else: + raise NotImplementedError("Invalid test case parametrization.") + + def test_define_dims_on_the_fly(self): + with pm.Model() as pmodel: + agedata = aesara.shared(np.array([10, 20, 30])) + + # Associate the "patient" dim with an implied dimension + age = pm.Normal("age", agedata, dims=("patient",)) + assert "patient" in pmodel.dim_lengths + assert pmodel.dim_lengths["patient"].eval() == 3 + + # Use the dim to replicate a new RV + effect = pm.Normal("effect", 0, dims=("patient",)) + assert effect.ndim == 1 + assert effect.eval().shape == (3,) + + # Now change the length of the implied dimension + agedata.set_value([1, 2, 3, 4]) + # The change should propagate all the way through + assert effect.eval().shape == (4,) + + @pytest.mark.xfail(reason="Simultaneous use of size and dims is not implemented") + def test_data_defined_size_dimension_can_register_dimname(self): + with pm.Model() as pmodel: + x = pm.Data("x", [[1, 2, 3, 4]], dims=("first", "second")) + assert "first" in pmodel.dim_lengths + assert "second" in pmodel.dim_lengths + # two dimensions are implied; a "third" dimension is created + y = pm.Normal("y", mu=x, size=2, dims=("third", "first", "second")) + assert "third" in pmodel.dim_lengths + assert y.eval().shape() == (2, 1, 4) + + def test_can_resize_data_defined_size(self): + with pm.Model() as pmodel: + x = pm.Data("x", [[1, 2, 3, 4]], dims=("first", "second")) + y = pm.Normal("y", mu=0, dims=("first", "second")) + z = pm.Normal("z", mu=y, observed=np.ones((1, 4))) + assert x.eval().shape == (1, 4) + assert y.eval().shape == (1, 4) + assert z.eval().shape == (1, 4) + assert "first" in pmodel.dim_lengths + assert "second" in pmodel.dim_lengths + pmodel.set_data("x", [[1, 2], [3, 4], [5, 6]]) + assert x.eval().shape == (3, 2) + assert y.eval().shape == (3, 2) + assert z.eval().shape == (3, 2) + @pytest.mark.xfail( condition=sys.platform == "win32", reason="See https://github.com/pymc-devs/pymc3/issues/4652.", @@ -231,3 +347,64 @@ def test_observed_with_column_vector(self): model.logp() pm.Normal("x2", mu=0, sd=1, observed=np.random.normal(size=(3, 1))) model.logp() + + def test_dist_api_works(self): + mu = aesara.shared(np.array([1, 2, 3])) + with pytest.raises(NotImplementedError, match="API is not supported"): + pm.Normal.dist(mu=mu, dims=("town",)) + assert pm.Normal.dist(mu=mu, shape=(3,)).eval().shape == (3,) + assert pm.Normal.dist(mu=mu, shape=(5, 3)).eval().shape == (5, 3) + # assert pm.Normal.dist(mu=mu, shape=(7, ...)).eval().shape == (7, 3) + assert pm.Normal.dist(mu=mu, size=(3,)).eval().shape == (3,) + assert pm.Normal.dist(mu=mu, size=(4, 3)).eval().shape == (4, 3) + + def test_mvnormal_ndim_warning(self): + with pytest.warns(None): + # parameters add 1 batch dimension (4), shape adds another (5) + rv = pm.MvNormal.dist(mu=np.ones((4, 3)), cov=np.eye(3), shape=(5, 4, 3)) + assert rv.ndim == 3 + assert tuple(rv.shape.eval()) == (5, 4, 3) + + # with pytest.warns(None): + # rv = pm.MvNormal.dist(mu=np.ones((4, 3, 2)), cov=np.eye(2), shape=(6, 5, ...)) + # assert rv.ndim == 5 + # assert tuple(rv.shape.eval()) == (6, 5, 4, 3, 2) + + # When using `size` the API behaves like Aesara/NumPy + with pytest.warns( + ShapeWarning, + match=r"You may have expected a \(2\+1\)-dimensional RV, but the resulting RV will be 5-dimensional", + ): + pm.MvNormal.dist(mu=np.ones((5, 4, 3)), cov=np.eye(3), size=(5, 4)) + + def test_lazy_flavors(self): + + _validate_shape_dims_size(shape=5) + _validate_shape_dims_size(dims="town") + _validate_shape_dims_size(size=7) + + assert pm.Uniform.dist(2, [4, 5], size=[3, 2]).eval().shape == (3, 2) + assert pm.Uniform.dist(2, [4, 5], shape=[3, 2]).eval().shape == (3, 2) + with pm.Model(coords=dict(town=["Greifswald", "Madrid"])): + assert pm.Normal("n2", mu=[1, 2], dims=("town",)).eval().shape == (2,) + + def test_invalid_flavors(self): + # redundant parametrizations + with pytest.raises(ValueError, match="Passing both"): + _validate_shape_dims_size(shape=(2,), dims=("town",)) + with pytest.raises(ValueError, match="Passing both"): + _validate_shape_dims_size(dims=("town",), size=(2,)) + with pytest.raises(ValueError, match="Passing both"): + _validate_shape_dims_size(shape=(3,), size=(3,)) + + # invalid, but not necessarly rare + with pytest.raises(ValueError, match="must be an int, list or tuple"): + _validate_shape_dims_size(size="notasize") + + # invalid ellipsis positions + with pytest.raises(ValueError, match="may only appear in the last position"): + _validate_shape_dims_size(shape=(3, ..., 2)) + # with pytest.raises(ValueError, match="may only appear in the last position"): + # _validate_shape_dims_size(dims=(..., "town")) + with pytest.raises(ValueError, match="cannot contain"): + _validate_shape_dims_size(size=(3, ...))