Skip to content

Commit

Permalink
Deprecate optimization.backend
Browse files Browse the repository at this point in the history
  • Loading branch information
yngve-sk committed Jan 29, 2025
1 parent 7deaa30 commit 3b6f51e
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 14 deletions.
26 changes: 18 additions & 8 deletions src/everest/config/optimization_config.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
import logging
from typing import Any

from pydantic import BaseModel, Field, model_validator

from everest.config.cvar_config import CVaRConfig
from everest.optimizer.utils import get_ropt_plugin_manager
from everest.strings import EVEREST


class OptimizationConfig(BaseModel, extra="forbid"): # type: ignore
algorithm: str | None = Field(
default="default",
default="optpp_q_newton",
description="""Algorithm used by Everest. Defaults to
optpp_q_newton, a quasi-Newton algorithm in Dakota's OPT PP library.
""",
Expand All @@ -33,8 +35,8 @@ class OptimizationConfig(BaseModel, extra="forbid"): # type: ignore
(From the Dakota Manual.)""",
)
backend: str | None = Field(
default="dakota",
description="""The optimization backend used. Defaults to "dakota".
default=None,
description="""(deprecated) The optimization backend used.".
Currently, backends are included to use Dakota or SciPy ("dakota" and "scipy").
The Dakota backend is the default, and can be assumed to be installed. The SciPy
Expand Down Expand Up @@ -199,9 +201,17 @@ class OptimizationConfig(BaseModel, extra="forbid"): # type: ignore
@model_validator(mode="after")
def validate_backend_and_algorithm(self): # pylint: disable=E0213
method = "default" if self.algorithm is None else self.algorithm
backend = "dakota" if self.backend is None else self.backend
if not get_ropt_plugin_manager().is_supported(
"optimizer", f"{backend}/{method}"
):
raise ValueError(f"Optimizer algorithm '{backend}/{method}' not found")
if not get_ropt_plugin_manager().is_supported("optimizer", method):
raise ValueError(f"Optimizer algorithm '{method}' not found")

if self.backend is not None:
message = (
"optimization.backend is deprecated. "
"The correct backend will be inferred by the algorithm. "
"If several backends have an algorithm named A and you want to pick "
"a specific backend B, put B/A in optimization.algorithm."
)
print(message)
logging.getLogger(EVEREST).warning(message)

return self
5 changes: 2 additions & 3 deletions src/everest/optimizer/everest2ropt.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,9 +258,8 @@ def _parse_optimization(
ropt_optimizer = ropt_config["optimizer"]
ropt_gradient = ropt_config["gradient"]

backend = ever_opt.backend or "dakota"
algorithm = ever_opt.algorithm or "default"
ropt_optimizer["method"] = f"{backend}/{algorithm}"
algorithm = ever_opt.algorithm or "optpp_q_newton"
ropt_optimizer["method"] = f"{algorithm}"

alg_max_iter = ever_opt.max_iterations
if alg_max_iter:
Expand Down
2 changes: 1 addition & 1 deletion tests/everest/test_config_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ def test_that_missing_optimization_algorithm_errors():
with pytest.raises(ValueError) as e:
EverestConfig.with_defaults(**{"optimization": {"algorithm": "ddlygldt"}})

assert has_error(e.value, match="Optimizer algorithm 'dakota/ddlygldt' not found")
assert has_error(e.value, match="Optimizer algorithm 'ddlygldt' not found")


@pytest.mark.parametrize(
Expand Down
4 changes: 2 additions & 2 deletions tests/everest/test_ropt_initialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def test_everest2ropt_controls_optimizer_setting():
config = EverestConfig.load_file(config)
ropt_config = everest2ropt(config)
assert len(ropt_config.realizations.names) == 15
assert ropt_config.optimizer.method == "dakota/conmin_mfd"
assert ropt_config.optimizer.method == "conmin_mfd"
assert ropt_config.gradient.number_of_perturbations == 20
assert ropt_config.realizations.names == tuple(range(15))

Expand Down Expand Up @@ -244,4 +244,4 @@ def test_everest2ropt_no_algorithm_name(copy_test_data_to_tmp):

config.optimization.algorithm = None
ropt_config = everest2ropt(config)
assert ropt_config.optimizer.method == "dakota/default"
assert ropt_config.optimizer.method == "optpp_q_newton"

0 comments on commit 3b6f51e

Please sign in to comment.