diff --git a/pymc/distributions/continuous.py b/pymc/distributions/continuous.py index 0e172cc0b3..fc287045b0 100644 --- a/pymc/distributions/continuous.py +++ b/pymc/distributions/continuous.py @@ -35,21 +35,22 @@ from aesara.tensor.math import tanh from aesara.tensor.random.basic import ( BetaRV, - cauchy, + CauchyRV, + HalfCauchyRV, + HalfNormalRV, + LogNormalRV, + NormalRV, + UniformRV, chisquare, exponential, gamma, gumbel, - halfcauchy, - halfnormal, invgamma, laplace, logistic, - lognormal, normal, pareto, triangular, - uniform, vonmises, ) from aesara.tensor.random.op import RandomVariable @@ -252,6 +253,13 @@ def get_tau_sigma(tau=None, sigma=None): return floatX(tau), floatX(sigma) +class PyMCUniformRV(UniformRV): + _print_name = ("Uniform", "\\operatorname{Uniform}") + + +pymc_uniform = PyMCUniformRV() + + class Uniform(BoundedContinuous): r""" Continuous uniform log-likelihood. @@ -295,7 +303,8 @@ class Uniform(BoundedContinuous): upper : tensor_like of float, default 1 Upper limit. """ - rv_op = uniform + rv_op = pymc_uniform + rv_type = UniformRV bound_args_indices = (3, 4) # Lower, Upper @classmethod @@ -479,6 +488,13 @@ def logcdf(value): return at.switch(at.lt(value, np.inf), -np.inf, at.switch(at.eq(value, np.inf), 0, -np.inf)) +class PyMCNormalRV(NormalRV): + _print_name = ("Normal", "\\operatorname{Normal}") + + +pymc_normal = PyMCNormalRV() + + class Normal(Continuous): r""" Univariate normal log-likelihood. @@ -544,7 +560,8 @@ class Normal(Continuous): with pm.Model(): x = pm.Normal('x', mu=0, tau=1/23) """ - rv_op = normal + rv_op = pymc_normal + rv_type = NormalRV @classmethod def dist(cls, mu=0, sigma=None, tau=None, **kwargs): @@ -801,6 +818,13 @@ def truncated_normal_default_transform(op, rv): return bounded_cont_transform(op, rv, TruncatedNormal.bound_args_indices) +class PyMCHalfNormalRV(HalfNormalRV): + _print_name = ("HalfNormal", "\\operatorname{HalfNormal}") + + +pymc_halfnormal = PyMCHalfNormalRV() + + class HalfNormal(PositiveContinuous): r""" Half-normal log-likelihood. @@ -867,7 +891,8 @@ class HalfNormal(PositiveContinuous): with pm.Model(): x = pm.HalfNormal('x', tau=1/15) """ - rv_op = halfnormal + rv_op = pymc_halfnormal + rv_type = HalfNormalRV @classmethod def dist(cls, sigma=None, tau=None, *args, **kwargs): @@ -1690,6 +1715,13 @@ def logp(value, b, kappa, mu): return check_parameters(res, 0 < b, 0 < kappa, msg="b > 0, kappa > 0") +class PyMCLogNormalRV(LogNormalRV): + _print_name = ("LogNormal", "\\operatorname{LogNormal}") + + +pymc_lognormal = PyMCLogNormalRV() + + class LogNormal(PositiveContinuous): r""" Log-normal log-likelihood. @@ -1758,7 +1790,8 @@ class LogNormal(PositiveContinuous): x = pm.LogNormal('x', mu=2, tau=1/100) """ - rv_op = lognormal + rv_op = pymc_lognormal + rv_type = LogNormalRV @classmethod def dist(cls, mu=0, sigma=None, tau=None, *args, **kwargs): @@ -2049,6 +2082,13 @@ def pareto_default_transform(op, rv): return bounded_cont_transform(op, rv, Pareto.bound_args_indices) +class PyMCCauchyRV(CauchyRV): + _print_name = ("Cauchy", "\\operatorname{Cauchy}") + + +pymc_cauchy = PyMCCauchyRV() + + class Cauchy(Continuous): r""" Cauchy log-likelihood. @@ -2095,7 +2135,8 @@ class Cauchy(Continuous): beta : tensor_like of float Scale parameter > 0. """ - rv_op = cauchy + rv_op = pymc_cauchy + rv_type = CauchyRV @classmethod def dist(cls, alpha, beta, *args, **kwargs): @@ -2133,6 +2174,13 @@ def logcdf(value, alpha, beta): ) +class PyMCHalfCauchyRV(HalfCauchyRV): + _print_name = ("HalfCauchy", "\\operatorname{HalfCauchy}") + + +pymc_halfcauchy = PyMCHalfCauchyRV() + + class HalfCauchy(PositiveContinuous): r""" Half-Cauchy log-likelihood. @@ -2172,7 +2220,8 @@ class HalfCauchy(PositiveContinuous): beta : tensor_like of float Scale parameter (beta > 0). """ - rv_op = halfcauchy + rv_op = pymc_halfcauchy + rv_type = HalfCauchyRV @classmethod def dist(cls, beta, *args, **kwargs): @@ -3942,7 +3991,7 @@ class PolyaGammaRV(RandomVariable): ndim_supp = 0 ndims_params = [0, 0] dtype = "floatX" - _print_name = ("PG", "\\operatorname{PG}") + _print_name = ("PolyaGamma", "\\operatorname{PolyaGamma}") def __call__(self, h=1.0, z=0.0, size=None, **kwargs): return super().__call__(h, z, size=size, **kwargs) diff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py index b21e0fe7ce..28bc69ce6d 100644 --- a/pymc/distributions/discrete.py +++ b/pymc/distributions/discrete.py @@ -17,16 +17,16 @@ import numpy as np from aesara.tensor.random.basic import ( + GeometricRV, + HyperGeometricRV, + NegBinomialRV, + PoissonRV, RandomVariable, ScipyRandomVariable, bernoulli, betabinom, binomial, categorical, - geometric, - hypergeometric, - nbinom, - poisson, ) from scipy import stats @@ -560,6 +560,13 @@ def logcdf(value, q, beta): return check_parameters(res, 0 < q, q < 1, 0 < beta, msg="0 < q < 1, beta > 0") +class PyMCPoissonRV(PoissonRV): + _print_name = ("Poisson", "\\operatorname{Poisson}") + + +pymc_poisson = PyMCPoissonRV() + + class Poisson(Discrete): R""" Poisson log-likelihood. @@ -605,7 +612,8 @@ class Poisson(Discrete): The Poisson distribution can be derived as a limiting case of the binomial distribution. """ - rv_op = poisson + rv_op = pymc_poisson + rv_type = PoissonRV @classmethod def dist(cls, mu, *args, **kwargs): @@ -674,6 +682,13 @@ def logcdf(value, mu): return check_parameters(res, 0 <= mu, msg="mu >= 0") +class PyMCNegativeBinomialRV(NegBinomialRV): + _print_name = ("NegBinom", "\\operatorname{NegBinom}") + + +pymc_nbinom = PyMCNegativeBinomialRV() + + class NegativeBinomial(Discrete): R""" Negative binomial log-likelihood. @@ -746,7 +761,8 @@ def NegBinom(a, m, x): n : tensor_like of float Alternative number of target success trials (n > 0) """ - rv_op = nbinom + rv_op = pymc_nbinom + rv_type = NegBinomialRV @classmethod def dist(cls, mu=None, alpha=None, p=None, n=None, *args, **kwargs): @@ -847,6 +863,13 @@ def logcdf(value, n, p): ) +class PyMCGeometricRV(GeometricRV): + _print_name = ("Geometric", "\\operatorname{Geometric}") + + +pymc_geometric = PyMCGeometricRV() + + class Geometric(Discrete): R""" Geometric log-likelihood. @@ -886,7 +909,8 @@ class Geometric(Discrete): Probability of success on an individual trial (0 < p <= 1). """ - rv_op = geometric + rv_op = pymc_geometric + rv_type = GeometricRV @classmethod def dist(cls, p, *args, **kwargs): @@ -956,6 +980,13 @@ def logcdf(value, p): ) +class PyMCHyperGeometricRV(HyperGeometricRV): + _print_name = ("HyperGeometric", "\\operatorname{HyperGeometric}") + + +pymc_hypergeometric = PyMCHyperGeometricRV() + + class HyperGeometric(Discrete): R""" Discrete hypergeometric distribution. @@ -1004,7 +1035,8 @@ class HyperGeometric(Discrete): Number of samples drawn from the population (0 <= n <= N) """ - rv_op = hypergeometric + rv_op = pymc_hypergeometric + rv_type = HyperGeometricRV @classmethod def dist(cls, N, k, n, *args, **kwargs): diff --git a/pymc/distributions/distribution.py b/pymc/distributions/distribution.py index b75bcaaa74..4ea762dd12 100644 --- a/pymc/distributions/distribution.py +++ b/pymc/distributions/distribution.py @@ -102,9 +102,9 @@ def _random(*args, **kwargs): clsdict["random"] = _random rv_op = clsdict.setdefault("rv_op", None) - rv_type = None + rv_type = clsdict.setdefault("rv_type", None) - if isinstance(rv_op, RandomVariable): + if rv_type is None and isinstance(rv_op, RandomVariable): rv_type = type(rv_op) clsdict["rv_type"] = rv_type diff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py index 2f21d9e949..0632cba370 100644 --- a/pymc/distributions/multivariate.py +++ b/pymc/distributions/multivariate.py @@ -32,7 +32,12 @@ from aesara.sparse.basic import sp_sum from aesara.tensor import gammaln, sigmoid from aesara.tensor.nlinalg import det, eigh, matrix_inverse, trace -from aesara.tensor.random.basic import dirichlet, multinomial, multivariate_normal +from aesara.tensor.random.basic import ( + DirichletRV, + MvNormalRV, + multinomial, + multivariate_normal, +) from aesara.tensor.random.op import RandomVariable, default_supp_shape_from_params from aesara.tensor.random.utils import broadcast_params from aesara.tensor.slinalg import Cholesky, SolveTriangular @@ -190,6 +195,13 @@ def quaddist_tau(delta, chol_mat): return quaddist, logdet, ok +class PyMCMvNormalRV(MvNormalRV): + _print_name = ("MvNormal", "\\operatorname{MvNormal}") + + +pymc_multivariate_normal = PyMCMvNormalRV() + + class MvNormal(Continuous): r""" Multivariate normal log-likelihood. @@ -254,7 +266,8 @@ class MvNormal(Continuous): vals_raw = pm.Normal('vals_raw', mu=0, sigma=1, shape=(5, 3)) vals = pm.Deterministic('vals', at.dot(chol, vals_raw.T).T) """ - rv_op = multivariate_normal + rv_op = pymc_multivariate_normal + rv_type = MvNormalRV @classmethod def dist(cls, mu, cov=None, tau=None, chol=None, lower=True, **kwargs): @@ -436,6 +449,13 @@ def logp(value, nu, mu, scale): ) +class PyMCDirichletRV(DirichletRV): + _print_name = ("Dirichlet", "\\operator{Dirichlet}") + + +pymc_dirichlet = PyMCDirichletRV() + + class Dirichlet(SimplexContinuous): r""" Dirichlet log-likelihood. @@ -460,7 +480,8 @@ class Dirichlet(SimplexContinuous): Concentration parameters (a > 0). The number of categories is given by the length of the last axis. """ - rv_op = dirichlet + rv_op = pymc_dirichlet + rv_type = DirichletRV @classmethod def dist(cls, a, **kwargs): diff --git a/pymc/tests/distributions/test_logprob.py b/pymc/tests/distributions/test_logprob.py index 4212b4baa7..881bf27358 100644 --- a/pymc/tests/distributions/test_logprob.py +++ b/pymc/tests/distributions/test_logprob.py @@ -320,7 +320,7 @@ def test_ignore_logprob_basic(): new_x = ignore_logprob(x) assert new_x is not x assert isinstance(new_x.owner.op, Normal) - assert type(new_x.owner.op).__name__ == "UnmeasurableNormalRV" + assert type(new_x.owner.op).__name__ == "UnmeasurablePyMCNormalRV" # Confirm that it does not have measurable output assert get_measurable_outputs(new_x.owner.op, new_x.owner) is None diff --git a/pymc/tests/test_aesaraf.py b/pymc/tests/test_aesaraf.py index f579df7c69..418b802150 100644 --- a/pymc/tests/test_aesaraf.py +++ b/pymc/tests/test_aesaraf.py @@ -25,7 +25,7 @@ from aeppl.logprob import ParameterValueError from aesara.compile.builders import OpFromGraph from aesara.graph.basic import Variable, equal_computations -from aesara.tensor.random.basic import normal, uniform +from aesara.tensor.random.basic import NormalRV, normal, uniform from aesara.tensor.random.op import RandomVariable from aesara.tensor.random.var import RandomStateSharedVariable from aesara.tensor.subtensor import AdvancedIncSubtensor, AdvancedIncSubtensor1 @@ -405,7 +405,7 @@ def test_rvs_to_value_vars_unvalued_rv(): res_y = res.owner.inputs[1] # Graph should have be cloned, and therefore y and res_y should have different ids assert res_y is not y - assert res_y.owner.op == at.random.normal + assert isinstance(res_y.owner.op, NormalRV) assert res_y.owner.inputs[3] is x_value diff --git a/pymc/tests/test_printing.py b/pymc/tests/test_printing.py index 8ebaba564a..d1c9caedeb 100644 --- a/pymc/tests/test_printing.py +++ b/pymc/tests/test_printing.py @@ -92,59 +92,59 @@ def setup_class(self): self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)] self.expected = { ("plain", True): [ - r"alpha ~ N(0, 10)", - r"sigma ~ N**+(0, 1)", + r"alpha ~ Normal(0, 10)", + r"sigma ~ HalfNormal(0, 1)", r"mu ~ Deterministic(f(beta, alpha))", - r"beta ~ N(0, 10)", - r"Z ~ N(f(), f())", - r"nb_with_p_n ~ NB(10, nbp)", - r"zip ~ MarginalMixture(f(), DiracDelta(0), Pois(5))", + r"beta ~ Normal(0, 10)", + r"Z ~ MvNormal(f(), f())", + r"nb_with_p_n ~ NegBinom(10, nbp)", + r"zip ~ MarginalMixture(f(), DiracDelta(0), Poisson(5))", ( r"nested_mix ~ MarginalMixture(, " - r"MarginalMixture(f(), DiracDelta(0), Pois(5)), " + r"MarginalMixture(f(), DiracDelta(0), Poisson(5)), " r"Censored(Bern(0.5), -1, 1))" ), - r"Y_obs ~ N(mu, sigma)", + r"Y_obs ~ Normal(mu, sigma)", r"pot ~ Potential(f(beta, alpha))", ], ("plain", False): [ - r"alpha ~ N", - r"sigma ~ N**+", + r"alpha ~ Normal", + r"sigma ~ HalfNormal", r"mu ~ Deterministic", - r"beta ~ N", - r"Z ~ N", - r"nb_with_p_n ~ NB", + r"beta ~ Normal", + r"Z ~ MvNormal", + r"nb_with_p_n ~ NegBinom", r"zip ~ MarginalMixture", r"nested_mix ~ MarginalMixture", - r"Y_obs ~ N", + r"Y_obs ~ Normal", r"pot ~ Potential", ], ("latex", True): [ - r"$\text{alpha} \sim \operatorname{N}(0,~10)$", - r"$\text{sigma} \sim \operatorname{N^{+}}(0,~1)$", + r"$\text{alpha} \sim \operatorname{Normal}(0,~10)$", + r"$\text{sigma} \sim \operatorname{HalfNormal}(0,~1)$", r"$\text{mu} \sim \operatorname{Deterministic}(f(\text{beta},~\text{alpha}))$", - r"$\text{beta} \sim \operatorname{N}(0,~10)$", - r"$\text{Z} \sim \operatorname{N}(f(),~f())$", - r"$\text{nb_with_p_n} \sim \operatorname{NB}(10,~\text{nbp})$", - r"$\text{zip} \sim \operatorname{MarginalMixture}(f(),~\text{\$\operatorname{DiracDelta}(0)\$},~\text{\$\operatorname{Pois}(5)\$})$", + r"$\text{beta} \sim \operatorname{Normal}(0,~10)$", + r"$\text{Z} \sim \operatorname{MvNormal}(f(),~f())$", + r"$\text{nb_with_p_n} \sim \operatorname{NegBinom}(10,~\text{nbp})$", + r"$\text{zip} \sim \operatorname{MarginalMixture}(f(),~\text{\$\operatorname{DiracDelta}(0)\$},~\text{\$\operatorname{Poisson}(5)\$})$", ( r"$\text{nested_mix} \sim \operatorname{MarginalMixture}(\text{}," - r"~\text{\$\operatorname{MarginalMixture}(f(),~\text{\\$\operatorname{DiracDelta}(0)\\$},~\text{\\$\operatorname{Pois}(5)\\$})\$}," + r"~\text{\$\operatorname{MarginalMixture}(f(),~\text{\\$\operatorname{DiracDelta}(0)\\$},~\text{\\$\operatorname{Poisson}(5)\\$})\$}," r"~\text{\$\operatorname{Censored}(\text{\\$\operatorname{Bern}(0.5)\\$},~-1,~1)\$})$" ), - r"$\text{Y_obs} \sim \operatorname{N}(\text{mu},~\text{sigma})$", + r"$\text{Y_obs} \sim \operatorname{Normal}(\text{mu},~\text{sigma})$", r"$\text{pot} \sim \operatorname{Potential}(f(\text{beta},~\text{alpha}))$", ], ("latex", False): [ - r"$\text{alpha} \sim \operatorname{N}$", - r"$\text{sigma} \sim \operatorname{N^{+}}$", + r"$\text{alpha} \sim \operatorname{Normal}$", + r"$\text{sigma} \sim \operatorname{HalfNormal}$", r"$\text{mu} \sim \operatorname{Deterministic}$", - r"$\text{beta} \sim \operatorname{N}$", - r"$\text{Z} \sim \operatorname{N}$", - r"$\text{nb_with_p_n} \sim \operatorname{NB}$", + r"$\text{beta} \sim \operatorname{Normal}$", + r"$\text{Z} \sim \operatorname{MvNormal}$", + r"$\text{nb_with_p_n} \sim \operatorname{NegBinom}$", r"$\text{zip} \sim \operatorname{MarginalMixture}$", r"$\text{nested_mix} \sim \operatorname{MarginalMixture}$", - r"$\text{Y_obs} \sim \operatorname{N}$", + r"$\text{Y_obs} \sim \operatorname{Normal}$", r"$\text{pot} \sim \operatorname{Potential}$", ], }