Skip to content

Bounded now works with array bounds #1604

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 19, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 48 additions & 45 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -1114,31 +1114,55 @@ class Bounded(Continuous):

def __init__(self, distribution, lower, upper, transform='infer', *args, **kwargs):
self.dist = distribution.dist(*args, **kwargs)

self.__dict__.update(self.dist.__dict__)
self.__dict__.update(locals())

if hasattr(self.dist, 'mode'):
self.mode = self.dist.mode

if transform == 'infer':
self.transform, self.testval = self._infer(lower, upper)

default = self.dist.default()

if not np.isinf(lower) and not np.isinf(upper):
self.transform = transforms.interval(lower, upper)
if default <= lower or default >= upper:
self.testval = 0.5 * (upper + lower)
def _infer(self, lower, upper):
"""Infer proper transforms for the variable, and adjust test_value.

if not np.isinf(lower) and np.isinf(upper):
self.transform = transforms.lowerbound(lower)
if default <= lower:
self.testval = lower + 1
In particular, this deals with the case where lower or upper may be +/-inf, or an
`ndarray` or a `theano.tensor.TensorVariable`
"""
if isinstance(upper, tt.TensorVariable):
_upper = upper.tag.test_value
else:
_upper = upper
if isinstance(lower, tt.TensorVariable):
_lower = lower.tag.test_value
else:
_lower = lower

testval = self.dist.default()
if not self._isinf(_lower) and not self._isinf(_upper):
transform = transforms.interval(lower, upper)
if (testval <= _lower).any() or (testval >= _upper).any():
testval = 0.5 * (_upper + _lower)
elif not self._isinf(_lower) and self._isinf(_upper):
transform = transforms.lowerbound(lower)
if (testval <= _lower).any():
testval = lower + 1
elif self._isinf(_lower) and not self._isinf(_upper):
transform = transforms.upperbound(upper)
if (testval >= _upper).any():
testval = _upper - 1
else:
transform = None
return transform, testval

if np.isinf(lower) and not np.isinf(upper):
self.transform = transforms.upperbound(upper)
if default >= upper:
self.testval = upper - 1
def _isinf(self, value):
"""Checks whether the value is +/-inf, or else returns 0 (even if an ndarray with
entries that are +/-inf)
"""
try:
return bool(np.isinf(value))
except ValueError:
return False

def _random(self, lower, upper, point=None, size=None):
samples = np.zeros(size).flatten()
Expand All @@ -1165,38 +1189,17 @@ def logp(self, value):
value >= self.lower, value <= self.upper)


class Bound(object):
R"""
Creates a new upper, lower or upper+lower bounded distribution

Parameters
----------
distribution : pymc3 distribution
Distribution to be transformed into a bounded distribution
lower : float (optional)
Lower bound of the distribution
upper : float (optional)

Example
-------
boundedNormal = pymc3.Bound(pymc3.Normal, lower=0.0)
par = boundedNormal(mu=0.0, sd=1.0, testval=1.0)
"""

def __init__(self, distribution, lower=-np.inf, upper=np.inf):
self.distribution = distribution
self.lower = lower
self.upper = upper

def __call__(self, *args, **kwargs):
first, args = args[0], args[1:]
def Bound(distribution, lower=-np.inf, upper=np.inf):
class _BoundedDist(Bounded):
def __init__(self, *args, **kwargs):
first, args = args[0], args[1:]
super(self, _BoundedDist).__init__(first, distribution, lower, upper, *args, **kwargs)

return Bounded(first, self.distribution, self.lower, self.upper,
*args, **kwargs)
@classmethod
def dist(cls, *args, **kwargs):
return Bounded.dist(distribution, lower, upper, *args, **kwargs)

def dist(self, *args, **kwargs):
return Bounded.dist(self.distribution, self.lower, self.upper,
*args, **kwargs)
return _BoundedDist


def StudentTpos(*args, **kwargs):
Expand Down
38 changes: 22 additions & 16 deletions pymc3/examples/garch_example.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pymc3 import Normal, sample, Model, Bound
from pymc3 import Normal, sample, Model, Bound, summary
import theano.tensor as tt
import numpy as np

Expand Down Expand Up @@ -29,29 +29,35 @@
r ~ normal(mu,sigma);
}
"""
J = 8
r = np.array([28, 8, -3, 7, -1, 1, 18, 12])
sigma1 = np.array([15, 10, 16, 11, 9, 11, 10, 18])
alpha0 = np.array([10, 10, 16, 8, 9, 11, 12, 18])

with Model() as garch:
alpha1 = Normal('alpha1', 0, 1, shape=J)
BoundedNormal = Bound(Normal, upper=(1 - alpha1))
beta1 = BoundedNormal('beta1', 0, sd=1e6)
mu = Normal('mu', 0, sd=1e6)

theta = tt.sqrt(alpha0 + alpha1 * tt.pow(r - mu, 2) +
beta1 * tt.pow(sigma1, 2))
def get_garch_model():
r = np.array([28, 8, -3, 7, -1, 1, 18, 12])
sigma1 = np.array([15, 10, 16, 11, 9, 11, 10, 18])
alpha0 = np.array([10, 10, 16, 8, 9, 11, 12, 18])
shape = r.shape

obs = Normal('obs', mu, sd=theta, observed=r)
with Model() as garch:
alpha1 = Normal('alpha1', mu=np.zeros(shape=shape), sd=np.ones(shape=shape), shape=shape)
BoundedNormal = Bound(Normal, upper=(1 - alpha1))
beta1 = BoundedNormal('beta1',
mu=np.zeros(shape=shape),
sd=1e6 * np.ones(shape=shape),
shape=shape)
mu = Normal('mu', mu=np.zeros(shape=shape), sd=1e6 * np.ones(shape=shape), shape=shape)
theta = tt.sqrt(alpha0 + alpha1 * tt.pow(r - mu, 2) +
beta1 * tt.pow(sigma1, 2))
Normal('obs', mu, sd=theta, observed=r)
return garch


def run(n=1000):
if n == "short":
n = 50
with garch:
tr = sample(n)
with get_garch_model():
tr = sample(n, n_init=10000)
return tr


if __name__ == '__main__':
run()
print(summary(run()))