Skip to content

Replace njobs with cores argument #2831

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Feb 4, 2018
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
### Deprecations

- DIC and BPIC calculations have been removed
- `njobs` and `nchains` deprecated in favor of `cores` and `chains` for `sample`

## PyMC 3.3 (January 9, 2018)

Expand Down
10 changes: 5 additions & 5 deletions benchmarks/benchmarks/benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,11 +115,11 @@ def time_drug_evaluation(self):
pm.Deterministic('difference of stds', group1_std - group2_std)
pm.Deterministic(
'effect size', diff_of_means / np.sqrt((group1_std**2 + group2_std**2) / 2))
pm.sample(20000, njobs=4, chains=4)
pm.sample(20000, cores=4, chains=4)

def time_glm_hierarchical(self):
with glm_hierarchical_model():
pm.sample(draws=20000, njobs=4, chains=4)
pm.sample(draws=20000, cores=4, chains=4)


class NUTSInitSuite(object):
Expand All @@ -141,7 +141,7 @@ def track_glm_hierarchical_ess(self, init):
with glm_hierarchical_model():
start, step = pm.init_nuts(init=init, chains=self.chains, progressbar=False, random_seed=123)
t0 = time.time()
trace = pm.sample(draws=self.draws, step=step, njobs=4, chains=self.chains,
trace = pm.sample(draws=self.draws, step=step, cores=4, chains=self.chains,
start=start, random_seed=100)
tot = time.time() - t0
ess = pm.effective_n(trace, ('mu_a',))['mu_a']
Expand All @@ -154,7 +154,7 @@ def track_marginal_mixture_model_ess(self, init):
progressbar=False, random_seed=123)
start = [{k: v for k, v in start.items()} for _ in range(self.chains)]
t0 = time.time()
trace = pm.sample(draws=self.draws, step=step, njobs=4, chains=self.chains,
trace = pm.sample(draws=self.draws, step=step, cores=4, chains=self.chains,
start=start, random_seed=100)
tot = time.time() - t0
ess = pm.effective_n(trace, ('mu',))['mu'].min() # worst case
Expand All @@ -178,7 +178,7 @@ def track_glm_hierarchical_ess(self, step):
if step is not None:
step = step()
t0 = time.time()
trace = pm.sample(draws=self.draws, step=step, njobs=4, chains=4,
trace = pm.sample(draws=self.draws, step=step, cores=4, chains=4,
random_seed=100)
tot = time.time() - t0
ess = pm.effective_n(trace, ('mu_a',))['mu_a']
Expand Down
6 changes: 3 additions & 3 deletions docs/source/notebooks/AR.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@
"with pm.Model() as ar1:\n",
" beta = pm.Normal('beta', mu=0, sd=tau)\n",
" data = pm.AR('y', beta, sd=1.0, observed=y)\n",
" trace = pm.sample(1000, njobs=4)\n",
" trace = pm.sample(1000, cores=4)\n",
" \n",
"pm.traceplot(trace);"
]
Expand Down Expand Up @@ -278,7 +278,7 @@
"with pm.Model() as ar2:\n",
" beta = pm.Normal('beta', mu=0, sd=tau, shape=2)\n",
" data = pm.AR('y', beta, sd=1.0, observed=y)\n",
" trace = pm.sample(1000, njobs=4)\n",
" trace = pm.sample(1000, cores=4)\n",
" \n",
"pm.traceplot(trace);"
]
Expand Down Expand Up @@ -340,7 +340,7 @@
" beta = pm.Normal('beta', mu=0, sd=tau)\n",
" beta2 = pm.Uniform('beta2')\n",
" data = pm.AR('y', [beta, beta2], sd=1.0, observed=y)\n",
" trace = pm.sample(1000, njobs=4)\n",
" trace = pm.sample(1000, cores=4)\n",
" \n",
"pm.traceplot(trace);"
]
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/BEST.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@
],
"source": [
"with model:\n",
" trace = pm.sample(2000, njobs=2)"
" trace = pm.sample(2000, cores=2)"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions docs/source/notebooks/GLM-linear.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@
" sd=sigma, observed=y)\n",
" \n",
" # Inference!\n",
" trace = sample(3000, njobs=2) # draw 3000 posterior samples using NUTS sampling"
" trace = sample(3000, cores=2) # draw 3000 posterior samples using NUTS sampling"
]
},
{
Expand Down Expand Up @@ -234,7 +234,7 @@
" # specify glm and pass in data. The resulting linear model, its likelihood and \n",
" # and all its parameters are automatically added to our model.\n",
" glm.GLM.from_formula('y ~ x', data)\n",
" trace = sample(3000, njobs=2) # draw 3000 posterior samples using NUTS sampling"
" trace = sample(3000, cores=2) # draw 3000 posterior samples using NUTS sampling"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,7 @@
" # C = pm.approx_hessian(start)\n",
" # trace = pm.sample(4000, step=pm.NUTS(scaling=C))\n",
" \n",
" trace = pm.sample(2000, njobs=2)"
" trace = pm.sample(2000, cores=2)"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/GLM-poisson-regression.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -640,7 +640,7 @@
],
"source": [
"with mdl_fish:\n",
" trc_fish = pm.sample(2000, tune=1000, njobs=4)[1000:]"
" trc_fish = pm.sample(2000, tune=1000, cores=4)[1000:]"
]
},
{
Expand Down
4 changes: 2 additions & 2 deletions docs/source/notebooks/GLM-robust.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@
"source": [
"with pm.Model() as model:\n",
" pm.glm.GLM.from_formula('y ~ x', data)\n",
" trace = pm.sample(2000, njobs=2)"
" trace = pm.sample(2000, cores=2)"
]
},
{
Expand Down Expand Up @@ -262,7 +262,7 @@
"with pm.Model() as model_robust:\n",
" family = pm.glm.families.StudentT()\n",
" pm.glm.GLM.from_formula('y ~ x', data, family=family)\n",
" trace_robust = pm.sample(2000, njobs=2)\n",
" trace_robust = pm.sample(2000, cores=2)\n",
"\n",
"plt.figure(figsize=(7, 5))\n",
"plt.plot(x_out, y_out, 'x')\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/GLM-rolling-regression.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@
],
"source": [
"with model_randomwalk:\n",
" trace_rw = pm.sample(tune=2000, njobs=4, samples=200, \n",
" trace_rw = pm.sample(tune=2000, cores=4, samples=200, \n",
" nuts_kwargs=dict(target_accept=.9))"
]
},
Expand Down
18 changes: 9 additions & 9 deletions docs/source/notebooks/GLM.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
" lm = glm.LinearComponent.from_formula('y ~ x', data)\n",
" sigma = Uniform('sigma', 0, 20)\n",
" y_obs = Normal('y_obs', mu=lm.y_est, sd=sigma, observed=y)\n",
" trace = sample(2000, njobs=2)\n",
" trace = sample(2000, cores=2)\n",
"\n",
"plt.figure(figsize=(5, 5))\n",
"plt.plot(x, y, 'x')\n",
Expand Down Expand Up @@ -135,7 +135,7 @@
"source": [
"with Model() as model:\n",
" GLM.from_formula('y ~ x', data)\n",
" trace = sample(2000, njobs=2)\n",
" trace = sample(2000, cores=2)\n",
"\n",
"plt.figure(figsize=(5, 5))\n",
"plt.plot(x, y, 'x')\n",
Expand Down Expand Up @@ -194,7 +194,7 @@
"source": [
"with Model() as model:\n",
" GLM.from_formula('y ~ x', data_outlier)\n",
" trace = sample(2000, njobs=2)\n",
" trace = sample(2000, cores=2)\n",
"\n",
"plt.figure(figsize=(5, 5))\n",
"plt.plot(x_out, y_out, 'x')\n",
Expand Down Expand Up @@ -244,7 +244,7 @@
" priors={'nu': 1.5,\n",
" 'lam': Uniform.dist(0, 20)})\n",
" GLM.from_formula('y ~ x', data_outlier, family=family) \n",
" trace = sample(2000, njobs=2)\n",
" trace = sample(2000, cores=2)\n",
"\n",
"plt.figure(figsize=(5, 5))\n",
"plt.plot(x_out, y_out, 'x')\n",
Expand Down Expand Up @@ -301,7 +301,7 @@
" 'prcnt_take': Normal.dist(mu=grp_mean, sd=grp_sd)\n",
" }\n",
" GLM.from_formula('sat_t ~ spend + stu_tea_rat + salary + prcnt_take', sat_data, priors=priors)\n",
" trace_sat = sample(2000, njobs=2)"
" trace_sat = sample(2000, cores=2)"
]
},
{
Expand Down Expand Up @@ -358,7 +358,7 @@
" intercept = Normal.dist(mu=sat_data.sat_t.mean(), sd=sat_data.sat_t.std())\n",
" GLM.from_formula('sat_t ~ spend + stu_tea_rat + salary + prcnt_take', sat_data,\n",
" priors={'Intercept': intercept, 'Regressor': slope})\n",
" trace_sat = sample(2000, njobs=2)"
" trace_sat = sample(2000, cores=2)"
]
},
{
Expand Down Expand Up @@ -417,7 +417,7 @@
" GLM.from_formula('sat_t ~ spend + stu_tea_rat + salary + prcnt_take', sat_data,\n",
" priors={'Intercept': intercept, 'Regressor': slope})\n",
"\n",
" trace_sat = sample(2000, njobs=2)"
" trace_sat = sample(2000, cores=2)"
]
},
{
Expand Down Expand Up @@ -599,7 +599,7 @@
"source": [
"with Model() as model_htwt:\n",
" GLM.from_formula('male ~ height + weight', htwt_data, family=glm.families.Binomial())\n",
" trace_htwt = sample(2000, njobs=2)"
" trace_htwt = sample(2000, cores=2)"
]
},
{
Expand Down Expand Up @@ -747,7 +747,7 @@
" GLM.from_formula('male ~ height + weight', htwt_data, family=glm.families.Binomial(),\n",
" priors=priors)\n",
" \n",
" trace_lasso = sample(500, njobs=2)\n",
" trace_lasso = sample(500, cores=2)\n",
" \n",
"trace_df = trace_to_dataframe(trace_lasso)\n",
"scatter_matrix(trace_df, figsize=(8, 8));\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/LKJ.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@
],
"source": [
"with model:\n",
" trace = pm.sample(random_seed=SEED, njobs=4)"
" trace = pm.sample(random_seed=SEED, cores=4)"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/MvGaussianRandomWalk_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@
"\n",
" sd = pm.Uniform('sd', 0, 1)\n",
" likelihood = pm.Normal('y', mu=regression, sd=sd, observed=y_t)\n",
" trace = pm.sample(n_samples, njobs=4)\n",
" trace = pm.sample(n_samples, cores=4)\n",
"\n",
" return trace, y_scaler, t_scaler, t_section"
]
Expand Down
8 changes: 4 additions & 4 deletions docs/source/notebooks/PyMC3_tips_and_heuristic.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,7 @@
" # Proportion sptial variance\n",
" alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))\n",
"\n",
" trace1 = pm.sample(3e3, njobs=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
" trace1 = pm.sample(3e3, cores=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
]
},
{
Expand Down Expand Up @@ -702,7 +702,7 @@
" # Proportion sptial variance\n",
" alpha = pm.Deterministic('alpha', sd_c/(sd_h+sd_c))\n",
"\n",
" trace2 = pm.sample(3e3, njobs=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
" trace2 = pm.sample(3e3, cores=2, tune=1000, nuts_kwargs={'max_treedepth': 15})"
]
},
{
Expand Down Expand Up @@ -856,7 +856,7 @@
" # Likelihood\n",
" Yi = pm.Poisson('Yi', mu=mu.ravel(), observed=O)\n",
"\n",
" trace3 = pm.sample(3e3, njobs=2, tune=1000)"
" trace3 = pm.sample(3e3, cores=2, tune=1000)"
]
},
{
Expand Down Expand Up @@ -1104,7 +1104,7 @@
" # Likelihood\n",
" Yi = pm.Poisson('Yi', mu=mu.ravel(), observed=O)\n",
"\n",
" trace4 = pm.sample(3e3, njobs=2, tune=1000)"
" trace4 = pm.sample(3e3, cores=2, tune=1000)"
]
},
{
Expand Down
8 changes: 4 additions & 4 deletions docs/source/notebooks/api_quickstart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -814,7 +814,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"You can also run multiple chains in parallel using the `njobs` kwarg:"
"You can also run multiple chains in parallel using the `cores` kwarg:"
]
},
{
Expand All @@ -837,7 +837,7 @@
" mu = pm.Normal('mu', mu=0, sd=1)\n",
" obs = pm.Normal('obs', mu=mu, sd=1, observed=np.random.randn(100))\n",
" \n",
" trace = pm.sample(njobs=4)"
" trace = pm.sample(cores=4)"
]
},
{
Expand Down Expand Up @@ -1009,7 +1009,7 @@
" \n",
" step1 = pm.Metropolis(vars=[mu])\n",
" step2 = pm.Slice(vars=[sd])\n",
" trace = pm.sample(10000, step=[step1, step2], njobs=4)"
" trace = pm.sample(10000, step=[step1, step2], cores=4)"
]
},
{
Expand Down Expand Up @@ -1159,7 +1159,7 @@
"source": [
"with pm.Model() as model:\n",
" x = pm.Normal('x', mu=0, sd=1, shape=100) \n",
" trace = pm.sample(njobs=4)\n",
" trace = pm.sample(cores=4)\n",
" \n",
"pm.energyplot(trace);"
]
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/hierarchical_partial_pooling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@
],
"source": [
"with baseball_model:\n",
" trace = pm.sample(2000, tune=1000, nchains=2,\n",
" trace = pm.sample(2000, tune=1000, chains=2,\n",
" nuts_kwargs={'target_accept': 0.95})"
]
},
Expand Down
4 changes: 2 additions & 2 deletions docs/source/notebooks/normalizing_flows_overview.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@
"pm.set_tt_rng(42)\n",
"np.random.seed(42)\n",
"with pot1m:\n",
" trace = pm.sample(1000, init='auto', njobs=2, start=[dict(pot1=np.array([-2, 0])),\n",
" trace = pm.sample(1000, init='auto', cores=2, start=[dict(pot1=np.array([-2, 0])),\n",
" dict(pot1=np.array([2, 0]))])"
]
},
Expand Down Expand Up @@ -975,7 +975,7 @@
],
"source": [
"with pot_m:\n",
" traceNUTS = pm.sample(3000, tune=1000, target_accept=0.9, njobs=2)"
" traceNUTS = pm.sample(3000, tune=1000, target_accept=0.9, cores=2)"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -810,17 +810,17 @@
" basename = 'pmf-mcmc-d%d' % self.dim\n",
" return os.path.join(DATA_DIR, basename)\n",
"\n",
"def _draw_samples(self, nsamples=1000, njobs=2):\n",
"def _draw_samples(self, nsamples=1000, cores=2):\n",
" # First make sure the trace_dir does not already exist.\n",
" if os.path.isdir(self.trace_dir):\n",
" shutil.rmtree(self.trace_dir)\n",
"\n",
" with self.model:\n",
" logging.info('drawing %d samples using %d jobs' % (nsamples, njobs))\n",
" logging.info('drawing %d samples using %d jobs' % (nsamples, cores))\n",
" backend = pm.backends.Text(self.trace_dir)\n",
" logging.info('backing up trace to directory: %s' % self.trace_dir)\n",
" self.trace = pm.sample(draws=nsamples, init='advi',\n",
" n_init=150000, njobs=njobs, trace=backend)\n",
" n_init=150000, cores=cores, trace=backend)\n",
" \n",
"def _load_trace(self):\n",
" with self.model:\n",
Expand All @@ -837,7 +837,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"We could define some kind of default trace property like we did for the MAP, but that would mean using possibly nonsensical values for `nsamples` and `njobs`. Better to leave it as a non-optional call to `draw_samples`. Finally, we'll need a function to make predictions using our inferred values for $U$ and $V$. For user $i$ and joke $j$, a prediction is generated by drawing from $\\mathcal{N}(U_i V_j^T, \\alpha)$. To generate predictions from the sampler, we generate an $R$ matrix for each $U$ and $V$ sampled, then we combine these by averaging over the $K$ samples.\n",
"We could define some kind of default trace property like we did for the MAP, but that would mean using possibly nonsensical values for `nsamples` and `cores`. Better to leave it as a non-optional call to `draw_samples`. Finally, we'll need a function to make predictions using our inferred values for $U$ and $V$. For user $i$ and joke $j$, a prediction is generated by drawing from $\\mathcal{N}(U_i V_j^T, \\alpha)$. To generate predictions from the sampler, we generate an $R$ matrix for each $U$ and $V$ sampled, then we combine these by averaging over the $K$ samples.\n",
"\n",
"\\begin{equation}\n",
"P(R_{ij}^* \\given R, \\alpha, \\alpha_U, \\alpha_V) \\approx\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/source/notebooks/rugby_analytics.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -815,7 +815,7 @@
],
"source": [
"with model:\n",
" trace = pm.sample(1000, tune=1000, njobs=3)\n",
" trace = pm.sample(1000, tune=1000, cores=3)\n",
" pm.traceplot(trace)"
]
},
Expand Down
Loading