• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

qiskit-community / qiskit-algorithms / 15802199099

13 Jun 2025 01:21PM CUT coverage: 90.448%. Remained the same
15802199099

push

github

web-flow
Fix getter/setter type mismatch (#232)

1 of 1 new or added line in 1 file covered. (100.0%)

6401 of 7077 relevant lines covered (90.45%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

42.62
/qiskit_algorithms/optimizers/nlopts/nloptimizer.py
1
# This code is part of a Qiskit project.
2
#
3
# (C) Copyright IBM 2018, 2025.
4
#
5
# This code is licensed under the Apache License, Version 2.0. You may
6
# obtain a copy of this license in the LICENSE.txt file in the root directory
7
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8
#
9
# Any modifications or derivative works of this code must retain this
10
# copyright notice, and modified files need to carry a notice indicating
11
# that they have been altered from the originals.
12

13
"""Minimize using objective function"""
14
from __future__ import annotations
1✔
15

16
from collections.abc import Callable
1✔
17
from enum import Enum
1✔
18
from abc import abstractmethod
1✔
19
import logging
1✔
20
import numpy as np
1✔
21

22
from qiskit_algorithms.utils import optionals as _optionals
1✔
23
from ..optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT
1✔
24

25
logger = logging.getLogger(__name__)
1✔
26

27

28
class NLoptOptimizerType(Enum):
1✔
29
    """NLopt Valid Optimizer"""
30

31
    GN_CRS2_LM = 1
1✔
32
    GN_DIRECT_L_RAND = 2
1✔
33
    GN_DIRECT_L = 3
1✔
34
    GN_ESCH = 4
1✔
35
    GN_ISRES = 5
1✔
36
    LN_SBPLX = 6
1✔
37

38

39
@_optionals.HAS_NLOPT.require_in_instance
1✔
40
class NLoptOptimizer(Optimizer):
1✔
41
    """
42
    NLopt global and local optimizer base class
43
    """
44

45
    _OPTIONS = ["max_evals"]
1✔
46

47
    def __init__(self, max_evals: int = 1000) -> None:  # pylint: disable=unused-argument
1✔
48
        """
49
        Args:
50
            max_evals: Maximum allowed number of function evaluations.
51

52
        Raises:
53
            MissingOptionalLibraryError: NLopt library not installed.
54
        """
55
        import nlopt
×
56

57
        super().__init__()
×
58
        for k, v in list(locals().items()):
×
59
            if k in self._OPTIONS:
×
60
                self._options[k] = v
×
61

62
        self._optimizer_names = {
×
63
            NLoptOptimizerType.GN_CRS2_LM: nlopt.GN_CRS2_LM,
64
            NLoptOptimizerType.GN_DIRECT_L_RAND: nlopt.GN_DIRECT_L_RAND,
65
            NLoptOptimizerType.GN_DIRECT_L: nlopt.GN_DIRECT_L,
66
            NLoptOptimizerType.GN_ESCH: nlopt.GN_ESCH,
67
            NLoptOptimizerType.GN_ISRES: nlopt.GN_ISRES,
68
            NLoptOptimizerType.LN_SBPLX: nlopt.LN_SBPLX,
69
        }
70

71
    @abstractmethod
1✔
72
    def get_nlopt_optimizer(self) -> NLoptOptimizerType:
1✔
73
        """return NLopt optimizer enum type"""
74
        raise NotImplementedError
×
75

76
    def get_support_level(self):
1✔
77
        """return support level dictionary"""
78
        return {
×
79
            "gradient": OptimizerSupportLevel.ignored,
80
            "bounds": OptimizerSupportLevel.supported,
81
            "initial_point": OptimizerSupportLevel.required,
82
        }
83

84
    @property
1✔
85
    def settings(self):
1✔
86
        return {"max_evals": self._options.get("max_evals", 1000)}
×
87

88
    def minimize(
1✔
89
        self,
90
        fun: Callable[[POINT], float],
91
        x0: POINT,
92
        jac: Callable[[POINT], POINT] | None = None,
93
        bounds: list[tuple[float, float]] | None = None,
94
    ) -> OptimizerResult:
95
        import nlopt
×
96

97
        x0 = np.asarray(x0)
×
98

99
        if bounds is None:
×
100
            bounds = [(None, None)] * x0.size
×
101

102
        threshold = 3 * np.pi
×
103
        low = [(l if l is not None else -threshold) for (l, u) in bounds]
×
104
        high = [(u if u is not None else threshold) for (l, u) in bounds]
×
105

106
        name = self._optimizer_names[self.get_nlopt_optimizer()]
×
107
        opt = nlopt.opt(name, len(low))
×
108
        logger.debug(opt.get_algorithm_name())
×
109

110
        opt.set_lower_bounds(low)
×
111
        opt.set_upper_bounds(high)
×
112

113
        eval_count = 0
×
114

115
        def wrap_objfunc_global(x, _grad):
×
116
            nonlocal eval_count
117
            eval_count += 1
×
118
            return fun(x)
×
119

120
        opt.set_min_objective(wrap_objfunc_global)
×
121
        opt.set_maxeval(self._options.get("max_evals", 1000))
×
122

123
        xopt = opt.optimize(x0)
×
124
        minf = opt.last_optimum_value()
×
125

126
        logger.debug("Global minimize found %s eval count %s", minf, eval_count)
×
127

128
        result = OptimizerResult()
×
129
        result.x = xopt
×
130
        result.fun = minf
×
131
        result.nfev = eval_count
×
132

133
        return result
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc