5
5
from pymc3 import ArrayOrdering , DictToArrayBijection
6
6
from pymc3 .distributions .dist_math import rho2sd , log_normal , log_normal_mv
7
7
from pymc3 .variational .opvi import Approximation
8
- from pymc3 .theanof import tt_rng , memoize , change_flags
8
+ from pymc3 .theanof import tt_rng
9
9
10
10
11
11
__all__ = [
@@ -221,6 +221,28 @@ def from_mean_field(cls, mean_field, gpu_compat=False):
221
221
222
222
223
223
class Histogram (Approximation ):
224
+ """
225
+ Builds Approximation instance from a given trace,
226
+ it has the same interface as variational approximation
227
+
228
+ Prameters
229
+ ----------
230
+ trace : MultiTrace
231
+ local_rv : dict
232
+ Experimental for Histogram
233
+ mapping {model_variable -> local_variable}
234
+ Local Vars are used for Autoencoding Variational Bayes
235
+ See (AEVB; Kingma and Welling, 2014) for details
236
+
237
+ model : PyMC3 model
238
+
239
+ Usage
240
+ -----
241
+ >>> with model:
242
+ ... step = NUTS()
243
+ ... trace = sample(1000, step=step)
244
+ ... histogram = Histogram(trace[100:])
245
+ """
224
246
def __init__ (self , trace , local_rv = None , model = None ):
225
247
self .trace = trace
226
248
self ._histogram_logp = None
@@ -275,10 +297,16 @@ def random_global(self, size=None, no_rand=False):
275
297
276
298
@property
277
299
def histogram (self ):
300
+ """
301
+ Shortcut to flattened Trace
302
+ """
278
303
return self .shared_params
279
304
280
305
@property
281
306
def histogram_logp (self ):
307
+ """
308
+ Symbolic logp for every point in trace
309
+ """
282
310
if self ._histogram_logp is None :
283
311
node = self .to_flat_input (self .model .logpt )
284
312
@@ -297,35 +325,3 @@ def mean(self):
297
325
@property
298
326
def params (self ):
299
327
return []
300
-
301
- @property
302
- @memoize
303
- @change_flags (compute_test_value = 'off' )
304
- def random_fn (self ):
305
- """
306
- Implements posterior distribution from initial latent space
307
-
308
- Parameters
309
- ----------
310
- size : number of samples from distribution
311
- no_rand : whether use deterministic distribution
312
-
313
- Returns
314
- -------
315
- posterior space (numpy)
316
- """
317
- In = theano .In
318
- size = tt .iscalar ('size' )
319
- no_rand = tt .bscalar ('no_rand' )
320
- posterior = self .random (size , no_rand = no_rand )
321
- fn = theano .function ([In (size , 'size' , 1 , allow_downcast = True ),
322
- In (no_rand , 'no_rand' , 0 , allow_downcast = True )],
323
- posterior )
324
-
325
- def inner (size = None , no_rand = False ):
326
- if size is None :
327
- return fn (1 , int (no_rand ))[0 ]
328
- else :
329
- return fn (size , int (no_rand ))
330
-
331
- return inner
0 commit comments