Skip to content

Commit 946fa0c

Browse files
authored
Merge pull request #2831 from pymc-devs/sample_arg_rename
Replace njobs with cores argument
2 parents d47e133 + 8268157 commit 946fa0c

32 files changed

+131
-110
lines changed

RELEASE-NOTES.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
### Deprecations
1717

1818
- DIC and BPIC calculations have been removed
19+
- `njobs` and `nchains` deprecated in favor of `cores` and `chains` for `sample`
1920

2021
## PyMC 3.3 (January 9, 2018)
2122

benchmarks/benchmarks/benchmarks.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -115,11 +115,11 @@ def time_drug_evaluation(self):
115115
pm.Deterministic('difference of stds', group1_std - group2_std)
116116
pm.Deterministic(
117117
'effect size', diff_of_means / np.sqrt((group1_std**2 + group2_std**2) / 2))
118-
pm.sample(20000, njobs=4, chains=4)
118+
pm.sample(20000, cores=4, chains=4)
119119

120120
def time_glm_hierarchical(self):
121121
with glm_hierarchical_model():
122-
pm.sample(draws=20000, njobs=4, chains=4)
122+
pm.sample(draws=20000, cores=4, chains=4)
123123

124124

125125
class NUTSInitSuite(object):
@@ -141,7 +141,7 @@ def track_glm_hierarchical_ess(self, init):
141141
with glm_hierarchical_model():
142142
start, step = pm.init_nuts(init=init, chains=self.chains, progressbar=False, random_seed=123)
143143
t0 = time.time()
144-
trace = pm.sample(draws=self.draws, step=step, njobs=4, chains=self.chains,
144+
trace = pm.sample(draws=self.draws, step=step, cores=4, chains=self.chains,
145145
start=start, random_seed=100)
146146
tot = time.time() - t0
147147
ess = pm.effective_n(trace, ('mu_a',))['mu_a']
@@ -154,7 +154,7 @@ def track_marginal_mixture_model_ess(self, init):
154154
progressbar=False, random_seed=123)
155155
start = [{k: v for k, v in start.items()} for _ in range(self.chains)]
156156
t0 = time.time()
157-
trace = pm.sample(draws=self.draws, step=step, njobs=4, chains=self.chains,
157+
trace = pm.sample(draws=self.draws, step=step, cores=4, chains=self.chains,
158158
start=start, random_seed=100)
159159
tot = time.time() - t0
160160
ess = pm.effective_n(trace, ('mu',))['mu'].min() # worst case
@@ -178,7 +178,7 @@ def track_glm_hierarchical_ess(self, step):
178178
if step is not None:
179179
step = step()
180180
t0 = time.time()
181-
trace = pm.sample(draws=self.draws, step=step, njobs=4, chains=4,
181+
trace = pm.sample(draws=self.draws, step=step, cores=4, chains=4,
182182
random_seed=100)
183183
tot = time.time() - t0
184184
ess = pm.effective_n(trace, ('mu_a',))['mu_a']

docs/source/notebooks/AR.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@
152152
"with pm.Model() as ar1:\n",
153153
" beta = pm.Normal('beta', mu=0, sd=tau)\n",
154154
" data = pm.AR('y', beta, sd=1.0, observed=y)\n",
155-
" trace = pm.sample(1000, njobs=4)\n",
155+
" trace = pm.sample(1000, cores=4)\n",
156156
" \n",
157157
"pm.traceplot(trace);"
158158
]
@@ -278,7 +278,7 @@
278278
"with pm.Model() as ar2:\n",
279279
" beta = pm.Normal('beta', mu=0, sd=tau, shape=2)\n",
280280
" data = pm.AR('y', beta, sd=1.0, observed=y)\n",
281-
" trace = pm.sample(1000, njobs=4)\n",
281+
" trace = pm.sample(1000, cores=4)\n",
282282
" \n",
283283
"pm.traceplot(trace);"
284284
]
@@ -340,7 +340,7 @@
340340
" beta = pm.Normal('beta', mu=0, sd=tau)\n",
341341
" beta2 = pm.Uniform('beta2')\n",
342342
" data = pm.AR('y', [beta, beta2], sd=1.0, observed=y)\n",
343-
" trace = pm.sample(1000, njobs=4)\n",
343+
" trace = pm.sample(1000, cores=4)\n",
344344
" \n",
345345
"pm.traceplot(trace);"
346346
]

docs/source/notebooks/BEST.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@
260260
],
261261
"source": [
262262
"with model:\n",
263-
" trace = pm.sample(2000, njobs=2)"
263+
" trace = pm.sample(2000, cores=2)"
264264
]
265265
},
266266
{

docs/source/notebooks/GLM-linear.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@
198198
" sd=sigma, observed=y)\n",
199199
" \n",
200200
" # Inference!\n",
201-
" trace = sample(3000, njobs=2) # draw 3000 posterior samples using NUTS sampling"
201+
" trace = sample(3000, cores=2) # draw 3000 posterior samples using NUTS sampling"
202202
]
203203
},
204204
{
@@ -234,7 +234,7 @@
234234
" # specify glm and pass in data. The resulting linear model, its likelihood and \n",
235235
" # and all its parameters are automatically added to our model.\n",
236236
" glm.GLM.from_formula('y ~ x', data)\n",
237-
" trace = sample(3000, njobs=2) # draw 3000 posterior samples using NUTS sampling"
237+
" trace = sample(3000, cores=2) # draw 3000 posterior samples using NUTS sampling"
238238
]
239239
},
240240
{

docs/source/notebooks/GLM-negative-binomial-regression.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -452,7 +452,7 @@
452452
" # C = pm.approx_hessian(start)\n",
453453
" # trace = pm.sample(4000, step=pm.NUTS(scaling=C))\n",
454454
" \n",
455-
" trace = pm.sample(2000, njobs=2)"
455+
" trace = pm.sample(2000, cores=2)"
456456
]
457457
},
458458
{

docs/source/notebooks/GLM-poisson-regression.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -640,7 +640,7 @@
640640
],
641641
"source": [
642642
"with mdl_fish:\n",
643-
" trc_fish = pm.sample(2000, tune=1000, njobs=4)[1000:]"
643+
" trc_fish = pm.sample(2000, tune=1000, cores=4)[1000:]"
644644
]
645645
},
646646
{

docs/source/notebooks/GLM-robust.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@
142142
"source": [
143143
"with pm.Model() as model:\n",
144144
" pm.glm.GLM.from_formula('y ~ x', data)\n",
145-
" trace = pm.sample(2000, njobs=2)"
145+
" trace = pm.sample(2000, cores=2)"
146146
]
147147
},
148148
{
@@ -262,7 +262,7 @@
262262
"with pm.Model() as model_robust:\n",
263263
" family = pm.glm.families.StudentT()\n",
264264
" pm.glm.GLM.from_formula('y ~ x', data, family=family)\n",
265-
" trace_robust = pm.sample(2000, njobs=2)\n",
265+
" trace_robust = pm.sample(2000, cores=2)\n",
266266
"\n",
267267
"plt.figure(figsize=(7, 5))\n",
268268
"plt.plot(x_out, y_out, 'x')\n",

docs/source/notebooks/GLM-rolling-regression.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -327,7 +327,7 @@
327327
],
328328
"source": [
329329
"with model_randomwalk:\n",
330-
" trace_rw = pm.sample(tune=2000, njobs=4, samples=200, \n",
330+
" trace_rw = pm.sample(tune=2000, cores=4, samples=200, \n",
331331
" nuts_kwargs=dict(target_accept=.9))"
332332
]
333333
},

docs/source/notebooks/GLM.ipynb

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@
8888
" lm = glm.LinearComponent.from_formula('y ~ x', data)\n",
8989
" sigma = Uniform('sigma', 0, 20)\n",
9090
" y_obs = Normal('y_obs', mu=lm.y_est, sd=sigma, observed=y)\n",
91-
" trace = sample(2000, njobs=2)\n",
91+
" trace = sample(2000, cores=2)\n",
9292
"\n",
9393
"plt.figure(figsize=(5, 5))\n",
9494
"plt.plot(x, y, 'x')\n",
@@ -135,7 +135,7 @@
135135
"source": [
136136
"with Model() as model:\n",
137137
" GLM.from_formula('y ~ x', data)\n",
138-
" trace = sample(2000, njobs=2)\n",
138+
" trace = sample(2000, cores=2)\n",
139139
"\n",
140140
"plt.figure(figsize=(5, 5))\n",
141141
"plt.plot(x, y, 'x')\n",
@@ -194,7 +194,7 @@
194194
"source": [
195195
"with Model() as model:\n",
196196
" GLM.from_formula('y ~ x', data_outlier)\n",
197-
" trace = sample(2000, njobs=2)\n",
197+
" trace = sample(2000, cores=2)\n",
198198
"\n",
199199
"plt.figure(figsize=(5, 5))\n",
200200
"plt.plot(x_out, y_out, 'x')\n",
@@ -244,7 +244,7 @@
244244
" priors={'nu': 1.5,\n",
245245
" 'lam': Uniform.dist(0, 20)})\n",
246246
" GLM.from_formula('y ~ x', data_outlier, family=family) \n",
247-
" trace = sample(2000, njobs=2)\n",
247+
" trace = sample(2000, cores=2)\n",
248248
"\n",
249249
"plt.figure(figsize=(5, 5))\n",
250250
"plt.plot(x_out, y_out, 'x')\n",
@@ -301,7 +301,7 @@
301301
" 'prcnt_take': Normal.dist(mu=grp_mean, sd=grp_sd)\n",
302302
" }\n",
303303
" GLM.from_formula('sat_t ~ spend + stu_tea_rat + salary + prcnt_take', sat_data, priors=priors)\n",
304-
" trace_sat = sample(2000, njobs=2)"
304+
" trace_sat = sample(2000, cores=2)"
305305
]
306306
},
307307
{
@@ -358,7 +358,7 @@
358358
" intercept = Normal.dist(mu=sat_data.sat_t.mean(), sd=sat_data.sat_t.std())\n",
359359
" GLM.from_formula('sat_t ~ spend + stu_tea_rat + salary + prcnt_take', sat_data,\n",
360360
" priors={'Intercept': intercept, 'Regressor': slope})\n",
361-
" trace_sat = sample(2000, njobs=2)"
361+
" trace_sat = sample(2000, cores=2)"
362362
]
363363
},
364364
{
@@ -417,7 +417,7 @@
417417
" GLM.from_formula('sat_t ~ spend + stu_tea_rat + salary + prcnt_take', sat_data,\n",
418418
" priors={'Intercept': intercept, 'Regressor': slope})\n",
419419
"\n",
420-
" trace_sat = sample(2000, njobs=2)"
420+
" trace_sat = sample(2000, cores=2)"
421421
]
422422
},
423423
{
@@ -599,7 +599,7 @@
599599
"source": [
600600
"with Model() as model_htwt:\n",
601601
" GLM.from_formula('male ~ height + weight', htwt_data, family=glm.families.Binomial())\n",
602-
" trace_htwt = sample(2000, njobs=2)"
602+
" trace_htwt = sample(2000, cores=2)"
603603
]
604604
},
605605
{
@@ -747,7 +747,7 @@
747747
" GLM.from_formula('male ~ height + weight', htwt_data, family=glm.families.Binomial(),\n",
748748
" priors=priors)\n",
749749
" \n",
750-
" trace_lasso = sample(500, njobs=2)\n",
750+
" trace_lasso = sample(500, cores=2)\n",
751751
" \n",
752752
"trace_df = trace_to_dataframe(trace_lasso)\n",
753753
"scatter_matrix(trace_df, figsize=(8, 8));\n",

docs/source/notebooks/LKJ.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -288,7 +288,7 @@
288288
],
289289
"source": [
290290
"with model:\n",
291-
" trace = pm.sample(random_seed=SEED, njobs=4)"
291+
" trace = pm.sample(random_seed=SEED, cores=4)"
292292
]
293293
},
294294
{

docs/source/notebooks/MvGaussianRandomWalk_demo.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@
139139
"\n",
140140
" sd = pm.Uniform('sd', 0, 1)\n",
141141
" likelihood = pm.Normal('y', mu=regression, sd=sd, observed=y_t)\n",
142-
" trace = pm.sample(n_samples, njobs=4)\n",
142+
" trace = pm.sample(n_samples, cores=4)\n",
143143
"\n",
144144
" return trace, y_scaler, t_scaler, t_section"
145145
]

docs/source/notebooks/PyMC3_tips_and_heuristic.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -484,7 +484,7 @@
484484
" # Proportion sptial variance\n",
485485
" alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))\n",
486486
"\n",
487-
" trace1 = pm.sample(3e3, njobs=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
487+
" trace1 = pm.sample(3e3, cores=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
488488
]
489489
},
490490
{
@@ -702,7 +702,7 @@
702702
" # Proportion sptial variance\n",
703703
" alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))\n",
704704
"\n",
705-
" trace2 = pm.sample(3e3, njobs=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
705+
" trace2 = pm.sample(3e3, cores=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
706706
]
707707
},
708708
{
@@ -856,7 +856,7 @@
856856
" # Likelihood\n",
857857
" Yi = pm.Poisson('Yi', mu=mu.ravel(), observed=O)\n",
858858
"\n",
859-
" trace3 = pm.sample(3e3, njobs=2, tune=1000)"
859+
" trace3 = pm.sample(3e3, cores=2, tune=1000)"
860860
]
861861
},
862862
{
@@ -1104,7 +1104,7 @@
11041104
" # Likelihood\n",
11051105
" Yi = pm.Poisson('Yi', mu=mu.ravel(), observed=O)\n",
11061106
"\n",
1107-
" trace4 = pm.sample(3e3, njobs=2, tune=1000)"
1107+
" trace4 = pm.sample(3e3, cores=2, tune=1000)"
11081108
]
11091109
},
11101110
{

docs/source/notebooks/api_quickstart.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -814,7 +814,7 @@
814814
"cell_type": "markdown",
815815
"metadata": {},
816816
"source": [
817-
"You can also run multiple chains in parallel using the `njobs` kwarg:"
817+
"You can also run multiple chains in parallel using the `cores` kwarg:"
818818
]
819819
},
820820
{
@@ -837,7 +837,7 @@
837837
" mu = pm.Normal('mu', mu=0, sd=1)\n",
838838
" obs = pm.Normal('obs', mu=mu, sd=1, observed=np.random.randn(100))\n",
839839
" \n",
840-
" trace = pm.sample(njobs=4)"
840+
" trace = pm.sample(cores=4)"
841841
]
842842
},
843843
{
@@ -1009,7 +1009,7 @@
10091009
" \n",
10101010
" step1 = pm.Metropolis(vars=[mu])\n",
10111011
" step2 = pm.Slice(vars=[sd])\n",
1012-
" trace = pm.sample(10000, step=[step1, step2], njobs=4)"
1012+
" trace = pm.sample(10000, step=[step1, step2], cores=4)"
10131013
]
10141014
},
10151015
{
@@ -1159,7 +1159,7 @@
11591159
"source": [
11601160
"with pm.Model() as model:\n",
11611161
" x = pm.Normal('x', mu=0, sd=1, shape=100) \n",
1162-
" trace = pm.sample(njobs=4)\n",
1162+
" trace = pm.sample(cores=4)\n",
11631163
" \n",
11641164
"pm.energyplot(trace);"
11651165
]

docs/source/notebooks/hierarchical_partial_pooling.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@
170170
],
171171
"source": [
172172
"with baseball_model:\n",
173-
" trace = pm.sample(2000, tune=1000, nchains=2,\n",
173+
" trace = pm.sample(2000, tune=1000, chains=2,\n",
174174
" nuts_kwargs={'target_accept': 0.95})"
175175
]
176176
},

docs/source/notebooks/normalizing_flows_overview.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -438,7 +438,7 @@
438438
"pm.set_tt_rng(42)\n",
439439
"np.random.seed(42)\n",
440440
"with pot1m:\n",
441-
" trace = pm.sample(1000, init='auto', njobs=2, start=[dict(pot1=np.array([-2, 0])),\n",
441+
" trace = pm.sample(1000, init='auto', cores=2, start=[dict(pot1=np.array([-2, 0])),\n",
442442
" dict(pot1=np.array([2, 0]))])"
443443
]
444444
},
@@ -975,7 +975,7 @@
975975
],
976976
"source": [
977977
"with pot_m:\n",
978-
" traceNUTS = pm.sample(3000, tune=1000, target_accept=0.9, njobs=2)"
978+
" traceNUTS = pm.sample(3000, tune=1000, target_accept=0.9, cores=2)"
979979
]
980980
},
981981
{

docs/source/notebooks/probabilistic_matrix_factorization.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -810,17 +810,17 @@
810810
" basename = 'pmf-mcmc-d%d' % self.dim\n",
811811
" return os.path.join(DATA_DIR, basename)\n",
812812
"\n",
813-
"def _draw_samples(self, nsamples=1000, njobs=2):\n",
813+
"def _draw_samples(self, nsamples=1000, cores=2):\n",
814814
" # First make sure the trace_dir does not already exist.\n",
815815
" if os.path.isdir(self.trace_dir):\n",
816816
" shutil.rmtree(self.trace_dir)\n",
817817
"\n",
818818
" with self.model:\n",
819-
" logging.info('drawing %d samples using %d jobs' % (nsamples, njobs))\n",
819+
" logging.info('drawing %d samples using %d jobs' % (nsamples, cores))\n",
820820
" backend = pm.backends.Text(self.trace_dir)\n",
821821
" logging.info('backing up trace to directory: %s' % self.trace_dir)\n",
822822
" self.trace = pm.sample(draws=nsamples, init='advi',\n",
823-
" n_init=150000, njobs=njobs, trace=backend)\n",
823+
" n_init=150000, cores=cores, trace=backend)\n",
824824
" \n",
825825
"def _load_trace(self):\n",
826826
" with self.model:\n",
@@ -837,7 +837,7 @@
837837
"cell_type": "markdown",
838838
"metadata": {},
839839
"source": [
840-
"We could define some kind of default trace property like we did for the MAP, but that would mean using possibly nonsensical values for `nsamples` and `njobs`. Better to leave it as a non-optional call to `draw_samples`. Finally, we'll need a function to make predictions using our inferred values for $U$ and $V$. For user $i$ and joke $j$, a prediction is generated by drawing from $\\mathcal{N}(U_i V_j^T, \\alpha)$. To generate predictions from the sampler, we generate an $R$ matrix for each $U$ and $V$ sampled, then we combine these by averaging over the $K$ samples.\n",
840+
"We could define some kind of default trace property like we did for the MAP, but that would mean using possibly nonsensical values for `nsamples` and `cores`. Better to leave it as a non-optional call to `draw_samples`. Finally, we'll need a function to make predictions using our inferred values for $U$ and $V$. For user $i$ and joke $j$, a prediction is generated by drawing from $\\mathcal{N}(U_i V_j^T, \\alpha)$. To generate predictions from the sampler, we generate an $R$ matrix for each $U$ and $V$ sampled, then we combine these by averaging over the $K$ samples.\n",
841841
"\n",
842842
"\\begin{equation}\n",
843843
"P(R_{ij}^* \\given R, \\alpha, \\alpha_U, \\alpha_V) \\approx\n",

docs/source/notebooks/rugby_analytics.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -815,7 +815,7 @@
815815
],
816816
"source": [
817817
"with model:\n",
818-
" trace = pm.sample(1000, tune=1000, njobs=3)\n",
818+
" trace = pm.sample(1000, tune=1000, cores=3)\n",
819819
" pm.traceplot(trace)"
820820
]
821821
},

0 commit comments

Comments
 (0)