Skip to content

Commit 9725ade

Browse files
authored
Add SBPLX optimizer (#924)
* Port optimizer update from Algorithms * Fix indentation html * Fix indentation and polish up inits * Fix copyright
1 parent ff55d2c commit 9725ade

File tree

7 files changed

+115
-48
lines changed

7 files changed

+115
-48
lines changed

.pylintdict

+5-1
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,7 @@ izaac
253253
izz
254254
jac
255255
jacobian
256+
johnson
256257
jm
257258
jonathan
258259
jones
@@ -487,6 +488,7 @@ sanjiv
487488
sashank
488489
satisfiability
489490
satyen
491+
sbplx
490492
scalability
491493
schroediger
492494
schroedinger
@@ -529,13 +531,15 @@ stdout
529531
stefano
530532
steppable
531533
stepsize
534+
steven
532535
str
533536
stratifications
534537
stratification
535538
subcircuits
536539
subclassed
537540
subclasses
538541
subcomponents
542+
subplex
539543
submodules
540544
subobjects
541545
subseteq
@@ -627,4 +631,4 @@ zz
627631
ω
628632
φ_i
629633
φ_ij
630-
Δ
634+
Δ

qiskit_machine_learning/optimizers/__init__.py

+56-42
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This code is part of a Qiskit project.
22
#
3-
# (C) Copyright IBM 2018, 2024.
3+
# (C) Copyright IBM 2018, 2025.
44
#
55
# This code is licensed under the Apache License, Version 2.0. You may
66
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -32,56 +32,57 @@
3232
----------------------
3333
3434
.. autosummary::
35-
:toctree: ../stubs/
36-
:nosignatures:
35+
:toctree: ../stubs/
36+
:nosignatures:
3737
38-
OptimizerResult
39-
Optimizer
40-
Minimizer
38+
OptimizerResult
39+
Optimizer
40+
Minimizer
4141
4242
Steppable optimization
4343
----------------------
4444
4545
.. autosummary::
46-
:toctree: ../stubs/
46+
:toctree: ../stubs/
4747
48-
optimizer_utils
48+
optimizer_utils
4949
5050
.. autosummary::
51-
:toctree: ../stubs/
52-
:nosignatures:
51+
:toctree: ../stubs/
52+
:nosignatures:
5353
54-
SteppableOptimizer
55-
AskData
56-
TellData
57-
OptimizerState
54+
SteppableOptimizer
55+
AskData
56+
TellData
57+
OptimizerState
5858
5959
6060
Local optimizers
6161
----------------
6262
6363
.. autosummary::
64-
:toctree: ../stubs/
65-
:nosignatures:
66-
67-
ADAM
68-
AQGD
69-
CG
70-
COBYLA
71-
L_BFGS_B
72-
GSLS
73-
GradientDescent
74-
GradientDescentState
75-
NELDER_MEAD
76-
NFT
77-
P_BFGS
78-
POWELL
79-
SLSQP
80-
SPSA
81-
QNSPSA
82-
TNC
83-
SciPyOptimizer
84-
UMDA
64+
:toctree: ../stubs/
65+
:nosignatures:
66+
67+
ADAM
68+
AQGD
69+
CG
70+
COBYLA
71+
L_BFGS_B
72+
GSLS
73+
GradientDescent
74+
GradientDescentState
75+
NELDER_MEAD
76+
NFT
77+
P_BFGS
78+
POWELL
79+
SLSQP
80+
SPSA
81+
QNSPSA
82+
TNC
83+
SciPyOptimizer
84+
UMDA
85+
8586
8687
The optimizers from
8788
`scikit-quant <https://scikit-quant.readthedocs.io/en/latest/>`_ are not included in the
@@ -91,21 +92,32 @@
9192
https://github.com/qiskit-community/qiskit-algorithms/issues/84.
9293
9394
95+
Qiskit also provides local optimizers based on
96+
`NLOpt <https://nlopt.readthedocs.io/en/latest/>`_.
97+
See Global Optimizers section below for the optional NLOpt installation instructions.
98+
99+
.. autosummary::
100+
:toctree: ../stubs/
101+
:nosignatures:
102+
103+
SBPLX
104+
105+
94106
Global optimizers
95107
-----------------
96108
The global optimizers here all use `NLOpt <https://nlopt.readthedocs.io/en/latest/>`_ for their
97109
core function and can only be used if the optional dependent ``NLOpt`` package is installed.
98110
To install the ``NLOpt`` dependent package you can use ``pip install nlopt``.
99111
100112
.. autosummary::
101-
:toctree: ../stubs/
102-
:nosignatures:
113+
:toctree: ../stubs/
114+
:nosignatures:
103115
104-
CRS
105-
DIRECT_L
106-
DIRECT_L_RAND
107-
ESCH
108-
ISRES
116+
CRS
117+
DIRECT_L
118+
DIRECT_L_RAND
119+
ESCH
120+
ISRES
109121
110122
"""
111123

@@ -123,6 +135,7 @@
123135
from .nlopts.direct_l_rand import DIRECT_L_RAND
124136
from .nlopts.esch import ESCH
125137
from .nlopts.isres import ISRES
138+
from .nlopts.sbplx import SBPLX
126139
from .steppable_optimizer import SteppableOptimizer, AskData, TellData, OptimizerState
127140
from .optimizer import Minimizer, Optimizer, OptimizerResult, OptimizerSupportLevel
128141
from .p_bfgs import P_BFGS
@@ -165,5 +178,6 @@
165178
"DIRECT_L_RAND",
166179
"ESCH",
167180
"ISRES",
181+
"SBPLX",
168182
"UMDA",
169183
]
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This code is part of a Qiskit project.
22
#
3-
# (C) Copyright IBM 2018, 2024.
3+
# (C) Copyright IBM 2018, 2025.
44
#
55
# This code is licensed under the Apache License, Version 2.0. You may
66
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -10,11 +10,12 @@
1010
# copyright notice, and modified files need to carry a notice indicating
1111
# that they have been altered from the originals.
1212

13-
"""NLopt based global optimizers"""
13+
"""NLopt-based global and local optimizers"""
1414

1515
from .crs import CRS
1616
from .direct_l import DIRECT_L
1717
from .direct_l_rand import DIRECT_L_RAND
1818
from .esch import ESCH
1919
from .isres import ISRES
20+
from .sbplx import SBPLX
2021
from .nloptimizer import NLoptOptimizer

qiskit_machine_learning/optimizers/nlopts/nloptimizer.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This code is part of a Qiskit project.
22
#
3-
# (C) Copyright IBM 2018, 2024.
3+
# (C) Copyright IBM 2018, 2025.
44
#
55
# This code is licensed under the Apache License, Version 2.0. You may
66
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -33,12 +33,13 @@ class NLoptOptimizerType(Enum):
3333
GN_DIRECT_L = 3
3434
GN_ESCH = 4
3535
GN_ISRES = 5
36+
LN_SBPLX = 6
3637

3738

3839
@_optionals.HAS_NLOPT.require_in_instance
3940
class NLoptOptimizer(Optimizer):
4041
"""
41-
NLopt global optimizer base class
42+
NLopt local and global optimizer base class
4243
"""
4344

4445
_OPTIONS = ["max_evals"]
@@ -64,6 +65,7 @@ def __init__(self, max_evals: int = 1000) -> None: # pylint: disable=unused-arg
6465
NLoptOptimizerType.GN_DIRECT_L: nlopt.GN_DIRECT_L,
6566
NLoptOptimizerType.GN_ESCH: nlopt.GN_ESCH,
6667
NLoptOptimizerType.GN_ISRES: nlopt.GN_ISRES,
68+
NLoptOptimizerType.LN_SBPLX: nlopt.LN_SBPLX,
6769
}
6870

6971
@abstractmethod
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
# This code is part of a Qiskit project.
2+
#
3+
# (C) Copyright IBM 2025.
4+
#
5+
# This code is licensed under the Apache License, Version 2.0. You may
6+
# obtain a copy of this license in the LICENSE.txt file in the root directory
7+
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8+
#
9+
# Any modifications or derivative works of this code must retain this
10+
# copyright notice, and modified files need to carry a notice indicating
11+
# that they have been altered from the originals.
12+
13+
"""Sbplx (Subplex) optimizer."""
14+
15+
from .nloptimizer import NLoptOptimizer, NLoptOptimizerType
16+
17+
18+
class SBPLX(NLoptOptimizer):
19+
"""
20+
Subplex optimizer.
21+
22+
'Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
23+
is claimed to be much more efficient and robust than the original Nelder-Mead,
24+
while retaining the latter's facility with discontinuous objectives.
25+
While these claims seem to be true in many cases, we could not find any proof that
26+
Subplex is globally convergent, and perhaps it may fail for some objective functions
27+
like Nelder-Mead; YMMV.)', by Steven G. Johnson, author of NLopt library.
28+
29+
NLopt local optimizer, derivative-free.
30+
For further detail, please refer to
31+
https://nlopt.readthedocs.io/en/latest/NLopt_Algorithms/#sbplx-based-on-subplex
32+
"""
33+
34+
def get_nlopt_optimizer(self) -> NLoptOptimizerType:
35+
"""Return NLopt optimizer type."""
36+
return NLoptOptimizerType.LN_SBPLX
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
---
2+
features:
3+
- |
4+
Support for :class:`.SBPLX` optimizer from NLopt library has been added.
5+
SBPLX is a local gradient-free optimizer based on Nelder-Mead and
6+
is expected to show better convergence behavior.
7+
Further information about this optimizer and the others can be found in
8+
the API ref for the :mod:`~qiskit_machine_learning.optimizers`.

test/optimizers/test_optimizers.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This code is part of a Qiskit project.
22
#
3-
# (C) Copyright IBM 2018, 2024.
3+
# (C) Copyright IBM 2018, 2025.
44
#
55
# This code is licensed under the Apache License, Version 2.0. You may
66
# obtain a copy of this license in the LICENSE.txt file in the root directory
@@ -39,6 +39,7 @@
3939
Optimizer,
4040
P_BFGS,
4141
POWELL,
42+
SBPLX,
4243
SLSQP,
4344
SPSA,
4445
QNSPSA,
@@ -221,6 +222,7 @@ def test_scipy_optimizer_parse_bounds(self):
221222
(CRS, False),
222223
(DIRECT_L, False),
223224
(DIRECT_L_RAND, False),
225+
(SBPLX, True),
224226
)
225227
@unpack
226228
def test_nlopt(self, optimizer_cls, use_bound):

0 commit comments

Comments
 (0)