From 18c80ff31d3f31e753276067915f48caa372c7d9 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Wed, 12 Feb 2025 09:41:21 +0100 Subject: [PATCH 1/3] Add test verifying output dimension in degenerate case --- test/optim/test_optimize.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/test/optim/test_optimize.py b/test/optim/test_optimize.py index 8d8be47ea0..e0bff2ce62 100644 --- a/test/optim/test_optimize.py +++ b/test/optim/test_optimize.py @@ -14,10 +14,12 @@ import numpy as np import torch + from botorch.acquisition.acquisition import ( AcquisitionFunction, OneShotAcquisitionFunction, ) +from botorch.acquisition.analytic import LogExpectedImprovement from botorch.acquisition.knowledge_gradient import qKnowledgeGradient from botorch.acquisition.monte_carlo import qExpectedImprovement from botorch.acquisition.multi_objective.hypervolume_knowledge_gradient import ( @@ -1147,6 +1149,23 @@ def nlc(x): ), ) + def test_optimize_acqf_all_fixed_features(self): + train_X = torch.rand(3, 2) + train_Y = torch.rand(3, 1) + gp = SingleTaskGP(train_X=train_X, train_Y=train_Y) + gp.eval() + logEI = LogExpectedImprovement(model=gp, best_f=train_Y.max()) + bounds = torch.stack([torch.zeros(2), torch.ones(2)]) + _, acqf_value = optimize_acqf( + logEI, + bounds, + q=1, + num_restarts=1, + raw_samples=1, + fixed_features={0: 0, 1: 0}, + ) + self.assertEqual(acqf_value.ndim, 0) + def test_constraint_caching(self): def nlc(x): return 4 - x.sum(dim=-1) From 3d47b628a6acd94bfc55163c9df852e228e9652b Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Wed, 12 Feb 2025 08:59:02 +0100 Subject: [PATCH 2/3] Drop output dimension of acquisition function values --- botorch/optim/optimize.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/botorch/optim/optimize.py b/botorch/optim/optimize.py index 6f3a5876a9..c814321c0d 100644 --- a/botorch/optim/optimize.py +++ b/botorch/optim/optimize.py @@ -196,7 +196,7 @@ def _optimize_acqf_all_features_fixed( X = X.expand(q, *X.shape) with torch.no_grad(): acq_value = acq_function(X) - return X, acq_value + return X, acq_value[0] def _validate_sequential_inputs(opt_inputs: OptimizeAcqfInputs) -> None: From 3ced166eb66db032d982e95497642b8c5a56a101 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Wed, 19 Feb 2025 09:36:04 +0100 Subject: [PATCH 3/3] Extract scalar only when necessary --- botorch/optim/optimize.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/botorch/optim/optimize.py b/botorch/optim/optimize.py index c814321c0d..ef37c38d87 100644 --- a/botorch/optim/optimize.py +++ b/botorch/optim/optimize.py @@ -196,7 +196,9 @@ def _optimize_acqf_all_features_fixed( X = X.expand(q, *X.shape) with torch.no_grad(): acq_value = acq_function(X) - return X, acq_value[0] + if acq_value.ndim == 1: + acq_value = acq_value[0] + return X, acq_value def _validate_sequential_inputs(opt_inputs: OptimizeAcqfInputs) -> None: