Skip to content

Commit

Permalink
Allow parametrization through either shape, dims or size
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelosthege committed Apr 18, 2021
1 parent fd3f730 commit 4b0a76d
Show file tree
Hide file tree
Showing 10 changed files with 476 additions and 103 deletions.
5 changes: 5 additions & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@

### New Features
- The `CAR` distribution has been added to allow for use of conditional autoregressions which often are used in spatial and network models.
- The dimensionality of model variables can now be parametrized through either of `shape`, `dims` or `size` (see [#4625](https://github.com/pymc-devs/pymc3/pull/4625)):
- With `shape` the length of dimensions must be given numerically or as scalar Aesara `Variables`.
- `dims` is arguably the most elegant parametrization, because it allows you to resize `pm.Data` variables and leads to well defined coordinates in `InferenceData` objects.
- The `size` kwarg creates new dimensions in addition to what is implied by RV parameters.
- An `Ellipsis` (`...`) in the last position of `shape` or `dims` can be used as short-hand notation for implied dimensions.
- ...

### Maintenance
Expand Down
232 changes: 207 additions & 25 deletions pymc3/distributions/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,28 +13,25 @@
# limitations under the License.
import contextvars
import inspect
import logging
import multiprocessing
import sys
import types
import warnings

from abc import ABCMeta
from copy import copy
from typing import TYPE_CHECKING
from typing import Any, Optional, Sequence, Tuple, Union

import aesara
import aesara.tensor as at
import dill

from aesara.graph.basic import Variable
from aesara.tensor.random.op import RandomVariable

from pymc3.aesaraf import change_rv_size
from pymc3.distributions import _logcdf, _logp

if TYPE_CHECKING:
from typing import Optional, Callable

import aesara
import aesara.graph.basic
import aesara.tensor as at

from pymc3.util import UNSET, get_repr_for_variable
from pymc3.vartypes import string_types

Expand All @@ -46,12 +43,18 @@
"NoDistribution",
]

_log = logging.getLogger(__file__)

vectorized_ppc = contextvars.ContextVar(
"vectorized_ppc", default=None
) # type: contextvars.ContextVar[Optional[Callable]]

PLATFORM = sys.platform

Shape = Union[int, Sequence[Union[str, type(Ellipsis)]], Variable]
Dims = Union[str, Sequence[Union[str, None, type(Ellipsis)]]]
Size = Union[int, Tuple[int, ...]]


class _Unpickling:
pass
Expand Down Expand Up @@ -122,13 +125,111 @@ def logcdf(op, var, rvs_to_values, *dist_params, **kwargs):
return new_cls


def _valid_ellipsis_position(items: Union[None, Shape, Dims, Size]) -> bool:
if items is not None and not isinstance(items, Variable) and Ellipsis in items:
if any(i == Ellipsis for i in items[:-1]):
return False
return True


def _validate_shape_dims_size(
shape: Any = None, dims: Any = None, size: Any = None
) -> Tuple[Optional[Shape], Optional[Dims], Optional[Size]]:
# Raise on unsupported parametrization
if shape is not None and dims is not None:
raise ValueError("Passing both `shape` ({shape}) and `dims` ({dims}) is not supported!")
if dims is not None and size is not None:
raise ValueError("Passing both `dims` ({dims}) and `size` ({size}) is not supported!")
if shape is not None and size is not None:
raise ValueError("Passing both `shape` ({shape}) and `size` ({size}) is not supported!")

# Raise on invalid types
if not isinstance(shape, (type(None), int, list, tuple, Variable)):
raise ValueError("The `shape` parameter must be an int, list or tuple.")
if not isinstance(dims, (type(None), str, list, tuple)):
raise ValueError("The `dims` parameter must be a str, list or tuple.")
if not isinstance(size, (type(None), int, list, tuple)):
raise ValueError("The `size` parameter must be an int, list or tuple.")

# Auto-convert non-tupled parameters
if isinstance(shape, int):
shape = (shape,)
if isinstance(dims, str):
dims = (dims,)
if isinstance(size, int):
size = (size,)

# Convert to actual tuples
if not isinstance(shape, (type(None), tuple, Variable)):
shape = tuple(shape)
if not isinstance(dims, (type(None), tuple)):
dims = tuple(dims)
if not isinstance(size, (type(None), tuple)):
size = tuple(size)

if not _valid_ellipsis_position(shape):
raise ValueError(
f"Ellipsis in `shape` may only appear in the last position. Actual: {shape}"
)
if not _valid_ellipsis_position(dims):
raise ValueError(f"Ellipsis in `dims` may only appear in the last position. Actual: {dims}")
if size is not None and Ellipsis in size:
raise ValueError("The `size` parameter cannot contain an Ellipsis. Actual: {size}")
return shape, dims, size


class Distribution(metaclass=DistributionMeta):
"""Statistical distribution"""

rv_class = None
rv_op = None

def __new__(cls, name, *args, **kwargs):
def __new__(
cls,
name: str,
*args,
rng=None,
dims: Optional[Dims] = None,
testval=None,
observed=None,
total_size=None,
transform=UNSET,
**kwargs,
) -> RandomVariable:
"""Adds a RandomVariable corresponding to a PyMC3 distribution to the current model.
Note that all remaining kwargs must be compatible with .dist()
Parameters
----------
cls : type
A PyMC3 distribution.
name : str
Name for the new model variable.
rng : optional
Random number generator to use with the RandomVariable.
dims : tuple, optional
A tuple of dimension names known to the model.
testval : optional
Test value to be attached to the output RV.
Must match its shape exactly.
observed : optional
Observed data to be passed when registering the random variable in the model.
See `Model.register_rv`.
total_size : float, optional
See `Model.register_rv`.
transform : optional
See `Model.register_rv`.
**kwargs
Keyword arguments that will be forwarded to .dist().
Most prominently: `shape` and `size`
Returns
-------
rv : RandomVariable
The created RV, registered in the Model.
"""

try:
from pymc3.model import Model

Expand All @@ -141,40 +242,121 @@ def __new__(cls, name, *args, **kwargs):
"for a standalone distribution."
)

rng = kwargs.pop("rng", None)
if not isinstance(name, string_types):
raise TypeError(f"Name needs to be a string but got: {name}")

if rng is None:
rng = model.default_rng

if not isinstance(name, string_types):
raise TypeError(f"Name needs to be a string but got: {name}")
_, dims, _ = _validate_shape_dims_size(dims=dims)

data = kwargs.pop("observed", None)
# Create the RV without specifying testval, because the testval may
# have a shape that only matches after replicating with a size implied
# by dims (see below).
rv_out = cls.dist(*args, rng=rng, testval=None, **kwargs)

total_size = kwargs.pop("total_size", None)

dims = kwargs.pop("dims", None)
# `dims` are only available with this API, because `.dist()` can be used
# without a modelcontext and dims are not yet tracked at the Aesara level.
if dims is not None:
if Ellipsis in dims:
# Auto-complete the dims tuple to the full length
dims = (*dims[:-1], *[None] * rv_out.ndim)

n_implied = rv_out.ndim
n_size = len(dims) - n_implied

# All size dims must be known already (numerically or symbolically).
unknown_size_dims = set(dims[:n_size]) - set(model.dim_lengths)
if unknown_size_dims:
raise KeyError(
f"Dimensions {unknown_size_dims} are unknown to the model and cannot be used to specify a `size`."
)

if "shape" in kwargs:
raise DeprecationWarning("The `shape` keyword is deprecated; use `size`.")
# The numeric/symbolic size tuple can be created using model.RV_dim_lengths
size = tuple(model.dim_lengths[dname] for dname in dims[:n_size])

transform = kwargs.pop("transform", UNSET)
if size:
# A batch size was specified through dims!
rv_out = change_rv_size(rv_var=rv_out, new_size=size, expand=True)

rv_out = cls.dist(*args, rng=rng, **kwargs)
# Now that we have a handle on the output RV, we can register named implied dimensions that
# were not yet known to the model, such that they can be used for size further downstream.
for di, dname in enumerate(dims[n_size:]):
if not dname in model.dim_lengths:
model.add_coord(dname, values=None, length=rv_out.shape[n_size + di])

if testval is not None:
# Assigning the testval earlier causes trouble because the RV may not be created with the final shape already.
rv_out.tag.test_value = testval

return model.register_rv(rv_out, name, data, total_size, dims=dims, transform=transform)
return model.register_rv(rv_out, name, observed, total_size, dims=dims, transform=transform)

@classmethod
def dist(cls, dist_params, **kwargs):
def dist(
cls,
dist_params,
*,
shape: Optional[Shape] = None,
size: Optional[Size] = None,
testval=None,
**kwargs,
) -> RandomVariable:
"""Creates a RandomVariable corresponding to the `cls` distribution.
Parameters
----------
dist_params
shape : tuple, optional
A tuple of sizes for each dimension of the new RV.
Ellipsis (...) may be used in the last position of the tuple,
and automatically expand to the shape implied by RV inputs.
testval = kwargs.pop("testval", None)
size : int, tuple, Variable, optional
A scalar or tuple for replicating the RV in addition
to its implied shape/dimensionality.
testval : optional
Test value to be attached to the output RV.
Must match its shape exactly.
rv_var = cls.rv_op(*dist_params, **kwargs)
Returns
-------
rv : RandomVariable
The created RV.
"""
if "dims" in kwargs:
raise NotImplementedError("The use of a `.dist(dims=...)` API is not yet supported.")

shape, _, size = _validate_shape_dims_size(shape=shape, size=size)
# Create the RV without specifying size or testval.
# The size will be expanded later (if necessary) and only then the testval fits.
rv_native = cls.rv_op(*dist_params, size=None, **kwargs)

if shape is None and size is None:
size = ()
elif shape is not None:
if isinstance(shape, Variable):
size = ()
else:
if Ellipsis in shape:
size = tuple(shape[:-1])
else:
size = tuple(shape[: len(shape) - rv_native.ndim])
# no-op conditions:
# `elif size is not None` (User already specified how to expand the RV)
# `else` (Unreachable)

if size:
rv_out = change_rv_size(rv_var=rv_native, new_size=size, expand=True)
else:
rv_out = rv_native

if testval is not None:
rv_var.tag.test_value = testval
rv_out.tag.test_value = testval

return rv_var
return rv_out

def _distr_parameters_for_repr(self):
"""Return the names of the parameters for this distribution (e.g. "mu"
Expand Down
12 changes: 7 additions & 5 deletions pymc3/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -959,17 +959,18 @@ def add_coord(
----------
name : str
Name of the dimension.
Forbidden: {"chain", "draw"}
Forbidden: {"chain", "draw", "__samaple__"}
values : optional, array-like
Coordinate values or ``None`` (for auto-numbering).
If ``None`` is passed, a ``length`` must be specified.
length : optional, scalar
A symbolic scalar of the dimensions length.
Defaults to ``aesara.shared(len(values))``.
"""
if name in {"draw", "chain"}:
if name in {"draw", "chain", "__sample__"}:
raise ValueError(
"Dimensions can not be named `draw` or `chain`, as they are reserved for the sampler's outputs."
"Dimensions can not be named `draw`, `chain` or `__sample__`, "
"as those are reserved for use in `InferenceData`."
)
if values is None and length is None:
raise ValueError(
Expand All @@ -981,7 +982,7 @@ def add_coord(
)
if name in self.coords:
if not values.equals(self.coords[name]):
raise ValueError("Duplicate and incompatiple coordinate: %s." % name)
raise ValueError(f"Duplicate and incompatiple coordinate: {name}.")
else:
self._coords[name] = values
self._dim_lengths[name] = length or aesara.shared(len(values))
Expand Down Expand Up @@ -1019,7 +1020,8 @@ def set_data(
New values for the shared variable.
coords : optional, dict
New coordinate values for dimensions of the shared variable.
Must be provided for all named dimensions that change in length.
Must be provided for all named dimensions that change in length
and already have coordinate values.
"""
shared_object = self[name]
if not isinstance(shared_object, SharedVariable):
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/sampler_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ class BetaBinomialFixture(KnownCDF):
@classmethod
def make_model(cls):
with pm.Model() as model:
p = pm.Beta("p", [0.5, 0.5, 1.0], [0.5, 0.5, 1.0], size=3)
p = pm.Beta("p", [0.5, 0.5, 1.0], [0.5, 0.5, 1.0])
pm.Binomial("y", p=p, n=[4, 12, 9], observed=[1, 2, 9])
return model

Expand Down
Loading

0 comments on commit 4b0a76d

Please sign in to comment.