|
| 1 | +# This code is part of a Qiskit project. |
| 2 | +# |
| 3 | +# (C) Copyright IBM 2025. |
| 4 | +# |
| 5 | +# This code is licensed under the Apache License, Version 2.0. You may |
| 6 | +# obtain a copy of this license in the LICENSE.txt file in the root directory |
| 7 | +# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. |
| 8 | +# |
| 9 | +# Any modifications or derivative works of this code must retain this |
| 10 | +# copyright notice, and modified files need to carry a notice indicating |
| 11 | +# that they have been altered from the originals. |
| 12 | + |
| 13 | +"""Sbplx (Subplex) optimizer.""" |
| 14 | + |
| 15 | +from .nloptimizer import NLoptOptimizer, NLoptOptimizerType |
| 16 | + |
| 17 | + |
| 18 | +class SBPLX(NLoptOptimizer): |
| 19 | + """ |
| 20 | + Subplex optimizer. |
| 21 | +
|
| 22 | + 'Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces) |
| 23 | + is claimed to be much more efficient and robust than the original Nelder-Mead, |
| 24 | + while retaining the latter's facility with discontinuous objectives. |
| 25 | + While these claims seem to be true in many cases, we could not find any proof that |
| 26 | + Subplex is globally convergent, and perhaps it may fail for some objective functions |
| 27 | + like Nelder-Mead; YMMV.)', by Steven G. Johnson, author of NLopt library. |
| 28 | +
|
| 29 | + NLopt local optimizer, derivative-free. |
| 30 | + For further detail, please refer to |
| 31 | + https://nlopt.readthedocs.io/en/latest/NLopt_Algorithms/#sbplx-based-on-subplex |
| 32 | + """ |
| 33 | + |
| 34 | + def get_nlopt_optimizer(self) -> NLoptOptimizerType: |
| 35 | + """Return NLopt optimizer type.""" |
| 36 | + return NLoptOptimizerType.LN_SBPLX |
0 commit comments