Skip to content

Commit c362db5

Browse files
authored
Histogram docs (#1914)
* add docs * delete redundant code * add usage example * remove unused import
1 parent aa2da14 commit c362db5

File tree

1 file changed

+29
-33
lines changed

1 file changed

+29
-33
lines changed

pymc3/variational/approximations.py

+29-33
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from pymc3 import ArrayOrdering, DictToArrayBijection
66
from pymc3.distributions.dist_math import rho2sd, log_normal, log_normal_mv
77
from pymc3.variational.opvi import Approximation
8-
from pymc3.theanof import tt_rng, memoize, change_flags
8+
from pymc3.theanof import tt_rng
99

1010

1111
__all__ = [
@@ -221,6 +221,28 @@ def from_mean_field(cls, mean_field, gpu_compat=False):
221221

222222

223223
class Histogram(Approximation):
224+
"""
225+
Builds Approximation instance from a given trace,
226+
it has the same interface as variational approximation
227+
228+
Prameters
229+
----------
230+
trace : MultiTrace
231+
local_rv : dict
232+
Experimental for Histogram
233+
mapping {model_variable -> local_variable}
234+
Local Vars are used for Autoencoding Variational Bayes
235+
See (AEVB; Kingma and Welling, 2014) for details
236+
237+
model : PyMC3 model
238+
239+
Usage
240+
-----
241+
>>> with model:
242+
... step = NUTS()
243+
... trace = sample(1000, step=step)
244+
... histogram = Histogram(trace[100:])
245+
"""
224246
def __init__(self, trace, local_rv=None, model=None):
225247
self.trace = trace
226248
self._histogram_logp = None
@@ -275,10 +297,16 @@ def random_global(self, size=None, no_rand=False):
275297

276298
@property
277299
def histogram(self):
300+
"""
301+
Shortcut to flattened Trace
302+
"""
278303
return self.shared_params
279304

280305
@property
281306
def histogram_logp(self):
307+
"""
308+
Symbolic logp for every point in trace
309+
"""
282310
if self._histogram_logp is None:
283311
node = self.to_flat_input(self.model.logpt)
284312

@@ -297,35 +325,3 @@ def mean(self):
297325
@property
298326
def params(self):
299327
return []
300-
301-
@property
302-
@memoize
303-
@change_flags(compute_test_value='off')
304-
def random_fn(self):
305-
"""
306-
Implements posterior distribution from initial latent space
307-
308-
Parameters
309-
----------
310-
size : number of samples from distribution
311-
no_rand : whether use deterministic distribution
312-
313-
Returns
314-
-------
315-
posterior space (numpy)
316-
"""
317-
In = theano.In
318-
size = tt.iscalar('size')
319-
no_rand = tt.bscalar('no_rand')
320-
posterior = self.random(size, no_rand=no_rand)
321-
fn = theano.function([In(size, 'size', 1, allow_downcast=True),
322-
In(no_rand, 'no_rand', 0, allow_downcast=True)],
323-
posterior)
324-
325-
def inner(size=None, no_rand=False):
326-
if size is None:
327-
return fn(1, int(no_rand))[0]
328-
else:
329-
return fn(size, int(no_rand))
330-
331-
return inner

0 commit comments

Comments
 (0)