Skip to content

Commit d333163

Browse files
SebastianAmentfacebook-github-bot
authored andcommitted
compute_smoothed_constraint_indicator -> compute_smoothed_feasibility_indicator (#1935)
Summary: Pull Request resolved: #1935 D47365085 introduced the aptly named `compute_feasibility_indicator`. This commit brings the smoothed counterpart in alignment with the naming convention. Reviewed By: Balandat Differential Revision: D47436246 fbshipit-source-id: 00be7f9fad99f1ce7e317e7b385629010e066d09
1 parent ef5a939 commit d333163

File tree

5 files changed

+19
-19
lines changed

5 files changed

+19
-19
lines changed

botorch/acquisition/input_constructors.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -482,7 +482,7 @@ def construct_inputs_qEI(
482482
are considered satisfied if the output is less than zero.
483483
eta: Temperature parameter(s) governing the smoothness of the sigmoid
484484
approximation to the constraint indicators. For more details, on this
485-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
485+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
486486
ignored: Not used.
487487
488488
Returns:
@@ -548,7 +548,7 @@ def construct_inputs_qNEI(
548548
are considered satisfied if the output is less than zero.
549549
eta: Temperature parameter(s) governing the smoothness of the sigmoid
550550
approximation to the constraint indicators. For more details, on this
551-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
551+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
552552
ignored: Not used.
553553
554554
Returns:
@@ -620,7 +620,7 @@ def construct_inputs_qPI(
620620
are considered satisfied if the output is less than zero.
621621
eta: Temperature parameter(s) governing the smoothness of the sigmoid
622622
approximation to the constraint indicators. For more details, on this
623-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
623+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
624624
ignored: Not used.
625625
626626
Returns:

botorch/acquisition/monte_carlo.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
from botorch.exceptions.errors import UnsupportedError
4343
from botorch.models.model import Model
4444
from botorch.sampling.base import MCSampler
45-
from botorch.utils.objective import compute_smoothed_constraint_indicator
45+
from botorch.utils.objective import compute_smoothed_feasibility_indicator
4646
from botorch.utils.transforms import (
4747
concatenate_pending_points,
4848
match_batch_shape,
@@ -215,7 +215,7 @@ def __init__(
215215
acquistion utilities, e.g. all improvement-based acquisition functions.
216216
eta: Temperature parameter(s) governing the smoothness of the sigmoid
217217
approximation to the constraint indicators. For more details, on this
218-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
218+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
219219
"""
220220
if constraints is not None and isinstance(objective, ConstrainedMCObjective):
221221
raise ValueError(
@@ -305,7 +305,7 @@ def _apply_constraints(self, acqval: Tensor, samples: Tensor) -> Tensor:
305305
"Constraint-weighting requires unconstrained "
306306
"acquisition values to be non-negative."
307307
)
308-
acqval = acqval * compute_smoothed_constraint_indicator(
308+
acqval = acqval * compute_smoothed_feasibility_indicator(
309309
constraints=self._constraints, samples=samples, eta=self._eta
310310
)
311311
return acqval
@@ -366,7 +366,7 @@ def __init__(
366366
are considered satisfied if the output is less than zero.
367367
eta: Temperature parameter(s) governing the smoothness of the sigmoid
368368
approximation to the constraint indicators. For more details, on this
369-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
369+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
370370
"""
371371
super().__init__(
372372
model=model,
@@ -457,7 +457,7 @@ def __init__(
457457
are considered satisfied if the output is less than zero.
458458
eta: Temperature parameter(s) governing the smoothness of the sigmoid
459459
approximation to the constraint indicators. For more details, on this
460-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
460+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
461461
462462
TODO: similar to qNEHVI, when we are using sequential greedy candidate
463463
selection, we could incorporate pending points X_baseline and compute
@@ -671,7 +671,7 @@ def __init__(
671671
scalar is less than zero.
672672
eta: Temperature parameter(s) governing the smoothness of the sigmoid
673673
approximation to the constraint indicators. For more details, on this
674-
parameter, see the docs of `compute_smoothed_constraint_indicator`.
674+
parameter, see the docs of `compute_smoothed_feasibility_indicator`.
675675
"""
676676
super().__init__(
677677
model=model,

botorch/acquisition/multi_objective/monte_carlo.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
from botorch.utils.multi_objective.box_decompositions.utils import (
5858
_pad_batch_pareto_frontier,
5959
)
60-
from botorch.utils.objective import compute_smoothed_constraint_indicator
60+
from botorch.utils.objective import compute_smoothed_feasibility_indicator
6161
from botorch.utils.torch import BufferDict
6262
from botorch.utils.transforms import (
6363
concatenate_pending_points,
@@ -279,7 +279,7 @@ def _compute_qehvi(self, samples: Tensor, X: Optional[Tensor] = None) -> Tensor:
279279
obj = self.objective(samples, X=X)
280280
q = obj.shape[-2]
281281
if self.constraints is not None:
282-
feas_weights = compute_smoothed_constraint_indicator(
282+
feas_weights = compute_smoothed_feasibility_indicator(
283283
constraints=self.constraints, samples=samples, eta=self.eta
284284
) # `sample_shape x batch-shape x q`
285285
self._cache_q_subset_indices(q_out=q)
@@ -414,7 +414,7 @@ def __init__(
414414
tensor the length of the tensor must match the number of provided
415415
constraints. The i-th constraint is then estimated with the i-th
416416
eta value. For more details, on this parameter, see the docs of
417-
`compute_smoothed_constraint_indicator`.
417+
`compute_smoothed_feasibility_indicator`.
418418
prune_baseline: If True, remove points in `X_baseline` that are
419419
highly unlikely to be the pareto optimal and better than the
420420
reference point. This can significantly improve computation time and

botorch/utils/objective.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def apply_constraints_nonnegative_soft(
8787
Returns:
8888
A `n_samples x b x q (x m')`-dim tensor of feasibility-weighted objectives.
8989
"""
90-
w = compute_smoothed_constraint_indicator(
90+
w = compute_smoothed_feasibility_indicator(
9191
constraints=constraints, samples=samples, eta=eta
9292
)
9393
if obj.dim() == samples.dim():
@@ -116,7 +116,7 @@ def compute_feasibility_indicator(
116116
return ind
117117

118118

119-
def compute_smoothed_constraint_indicator(
119+
def compute_smoothed_feasibility_indicator(
120120
constraints: List[Callable[[Tensor], Tensor]],
121121
samples: Tensor,
122122
eta: Union[Tensor, float],

test/utils/test_objective.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from botorch.utils import apply_constraints, get_objective_weights_transform
1010
from botorch.utils.objective import (
1111
compute_feasibility_indicator,
12-
compute_smoothed_constraint_indicator,
12+
compute_smoothed_feasibility_indicator,
1313
)
1414
from botorch.utils.testing import BotorchTestCase
1515
from torch import Tensor
@@ -196,14 +196,14 @@ def test_constraint_indicators(self):
196196
self.assertAllClose(ind, torch.zeros_like(ind))
197197
self.assertEqual(ind.dtype, torch.bool)
198198

199-
smoothed_ind = compute_smoothed_constraint_indicator(
199+
smoothed_ind = compute_smoothed_feasibility_indicator(
200200
constraints=[zeros_f], samples=samples, eta=1e-3
201201
)
202202
self.assertAllClose(smoothed_ind, ones_f(samples) / 2)
203203

204204
# two constraints
205205
samples = torch.randn(1)
206-
smoothed_ind = compute_smoothed_constraint_indicator(
206+
smoothed_ind = compute_smoothed_feasibility_indicator(
207207
constraints=[zeros_f, zeros_f],
208208
samples=samples,
209209
eta=1e-3,
@@ -218,13 +218,13 @@ def test_constraint_indicators(self):
218218
)
219219
self.assertAllClose(ind, torch.ones_like(ind))
220220

221-
smoothed_ind = compute_smoothed_constraint_indicator(
221+
smoothed_ind = compute_smoothed_feasibility_indicator(
222222
constraints=[minus_one_f], samples=samples, eta=1e-3
223223
)
224224
self.assertTrue((smoothed_ind > 3 / 4).all())
225225

226226
with self.assertRaisesRegex(ValueError, "Number of provided constraints"):
227-
compute_smoothed_constraint_indicator(
227+
compute_smoothed_feasibility_indicator(
228228
constraints=[zeros_f, zeros_f],
229229
samples=samples,
230230
eta=torch.tensor([0.1], device=self.device),

0 commit comments

Comments
 (0)