• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

qiskit-community / qiskit-algorithms / 13954979262

19 Mar 2025 07:21PM CUT coverage: 90.454% (-0.02%) from 90.477%
13954979262

Pull #226

github

web-flow
Merge 4f82d7263 into ca48697e8
Pull Request #226: Improve custom equals fns

4 of 6 new or added lines in 2 files covered. (66.67%)

6396 of 7071 relevant lines covered (90.45%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

61.76
/qiskit_algorithms/optimizers/adam_amsgrad.py
1
# This code is part of a Qiskit project.
2
#
3
# (C) Copyright IBM 2019, 2024.
4
#
5
# This code is licensed under the Apache License, Version 2.0. You may
6
# obtain a copy of this license in the LICENSE.txt file in the root directory
7
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8
#
9
# Any modifications or derivative works of this code must retain this
10
# copyright notice, and modified files need to carry a notice indicating
11
# that they have been altered from the originals.
12

13
"""The Adam and AMSGRAD optimizers."""
14
from __future__ import annotations
1✔
15

16
from collections.abc import Callable
1✔
17
from typing import Any
1✔
18
import os
1✔
19

20
import csv
1✔
21
import numpy as np
1✔
22
from .optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT
1✔
23

24
# pylint: disable=invalid-name
25

26

27
class ADAM(Optimizer):
1✔
28
    """Adam and AMSGRAD optimizers.
29

30
    Adam [1] is a gradient-based optimization algorithm that is relies on adaptive estimates of
31
    lower-order moments. The algorithm requires little memory and is invariant to diagonal
32
    rescaling of the gradients. Furthermore, it is able to cope with non-stationary objective
33
    functions and noisy and/or sparse gradients.
34

35
    AMSGRAD [2] (a variant of Adam) uses a 'long-term memory' of past gradients and, thereby,
36
    improves convergence properties.
37

38
    References:
39

40
        [1]: Kingma, Diederik & Ba, Jimmy (2014), Adam: A Method for Stochastic Optimization.
41
             `arXiv:1412.6980 <https://arxiv.org/abs/1412.6980>`_
42

43
        [2]: Sashank J. Reddi and Satyen Kale and Sanjiv Kumar (2018),
44
             On the Convergence of Adam and Beyond.
45
             `arXiv:1904.09237 <https://arxiv.org/abs/1904.09237>`_
46
    """
47

48
    _OPTIONS = [
1✔
49
        "maxiter",
50
        "tol",
51
        "lr",
52
        "beta_1",
53
        "beta_2",
54
        "noise_factor",
55
        "eps",
56
        "amsgrad",
57
        "snapshot_dir",
58
    ]
59

60
    # pylint: disable=too-many-positional-arguments
61
    def __init__(
1✔
62
        self,
63
        maxiter: int = 10000,
64
        tol: float = 1e-6,
65
        lr: float = 1e-3,
66
        beta_1: float = 0.9,
67
        beta_2: float = 0.99,
68
        noise_factor: float = 1e-8,
69
        eps: float = 1e-10,
70
        amsgrad: bool = False,
71
        snapshot_dir: str | None = None,
72
    ) -> None:
73
        """
74
        Args:
75
            maxiter: Maximum number of iterations
76
            tol: Tolerance for termination
77
            lr: Value >= 0, Learning rate.
78
            beta_1: Value in range 0 to 1, Generally close to 1.
79
            beta_2: Value in range 0 to 1, Generally close to 1.
80
            noise_factor: Value >= 0, Noise factor
81
            eps : Value >=0, Epsilon to be used for finite differences if no analytic
82
                gradient method is given.
83
            amsgrad: True to use AMSGRAD, False if not
84
            snapshot_dir: If not None save the optimizer's parameter
85
                after every step to the given directory
86
        """
87
        super().__init__()
1✔
88
        for k, v in list(locals().items()):
1✔
89
            if k in self._OPTIONS:
1✔
90
                self._options[k] = v
1✔
91
        self._maxiter = maxiter
1✔
92
        self._snapshot_dir = snapshot_dir
1✔
93
        self._tol = tol
1✔
94
        self._lr = lr
1✔
95
        self._beta_1 = beta_1
1✔
96
        self._beta_2 = beta_2
1✔
97
        self._noise_factor = noise_factor
1✔
98
        self._eps = eps
1✔
99
        self._amsgrad = amsgrad
1✔
100

101
        # runtime variables
102
        self._t = 0  # time steps
1✔
103
        self._m = np.zeros(1)
1✔
104
        self._v = np.zeros(1)
1✔
105
        if self._amsgrad:
1✔
106
            self._v_eff = np.zeros(1)
1✔
107

108
        if self._snapshot_dir:
1✔
109
            # pylint: disable=unspecified-encoding
110
            with open(os.path.join(self._snapshot_dir, "adam_params.csv"), mode="w") as csv_file:
×
111
                if self._amsgrad:
×
112
                    fieldnames = ["v", "v_eff", "m", "t"]
×
113
                else:
114
                    fieldnames = ["v", "m", "t"]
×
115
                writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
×
116
                writer.writeheader()
×
117

118
    @property
1✔
119
    def settings(self) -> dict[str, Any]:
1✔
120
        return {
1✔
121
            "maxiter": self._maxiter,
122
            "tol": self._tol,
123
            "lr": self._lr,
124
            "beta_1": self._beta_1,
125
            "beta_2": self._beta_2,
126
            "noise_factor": self._noise_factor,
127
            "eps": self._eps,
128
            "amsgrad": self._amsgrad,
129
            "snapshot_dir": self._snapshot_dir,
130
        }
131

132
    def get_support_level(self):
1✔
133
        """Return support level dictionary"""
134
        return {
1✔
135
            "gradient": OptimizerSupportLevel.supported,
136
            "bounds": OptimizerSupportLevel.ignored,
137
            "initial_point": OptimizerSupportLevel.supported,
138
        }
139

140
    def save_params(self, snapshot_dir: str) -> None:
1✔
141
        """Save the current iteration parameters to a file called ``adam_params.csv``.
142

143
        Note:
144

145
            The current parameters are appended to the file, if it exists already.
146
            The file is not overwritten.
147

148
        Args:
149
            snapshot_dir: The directory to store the file in.
150
        """
151
        if self._amsgrad:
×
152
            # pylint: disable=unspecified-encoding
153
            with open(os.path.join(snapshot_dir, "adam_params.csv"), mode="a") as csv_file:
×
154
                fieldnames = ["v", "v_eff", "m", "t"]
×
155
                writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
×
156
                writer.writerow({"v": self._v, "v_eff": self._v_eff, "m": self._m, "t": self._t})
×
157
        else:
158
            # pylint: disable=unspecified-encoding
159
            with open(os.path.join(snapshot_dir, "adam_params.csv"), mode="a") as csv_file:
×
160
                fieldnames = ["v", "m", "t"]
×
161
                writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
×
162
                writer.writerow({"v": self._v, "m": self._m, "t": self._t})
×
163

164
    def load_params(self, load_dir: str) -> None:
1✔
165
        """Load iteration parameters for a file called ``adam_params.csv``.
166

167
        Args:
168
            load_dir: The directory containing ``adam_params.csv``.
169
        """
170
        # pylint: disable=unspecified-encoding
171
        with open(os.path.join(load_dir, "adam_params.csv")) as csv_file:
×
172
            if self._amsgrad:
×
173
                fieldnames = ["v", "v_eff", "m", "t"]
×
174
            else:
175
                fieldnames = ["v", "m", "t"]
×
176
            reader = csv.DictReader(csv_file, fieldnames=fieldnames)
×
177
            for line in reader:
×
178
                v = line["v"]
×
179
                if self._amsgrad:
×
180
                    v_eff = line["v_eff"]
×
181
                m = line["m"]
×
182
                t = line["t"]
×
183

184
        v = v[1:-1]
×
185
        self._v = np.fromstring(v, dtype=float, sep=" ")
×
186
        if self._amsgrad:
×
187
            v_eff = v_eff[1:-1]
×
188
            self._v_eff = np.fromstring(v_eff, dtype=float, sep=" ")
×
189
        m = m[1:-1]
×
190
        self._m = np.fromstring(m, dtype=float, sep=" ")
×
191
        t = t[1:-1]
×
192
        self._t = int(np.fromstring(t, dtype=int, sep=" "))
×
193

194
    def minimize(
1✔
195
        self,
196
        fun: Callable[[POINT], float],
197
        x0: POINT,
198
        jac: Callable[[POINT], POINT] | None = None,
199
        bounds: list[tuple[float, float]] | None = None,
200
    ) -> OptimizerResult:
201
        """Minimize the scalar function.
202

203
        Args:
204
            fun: The scalar function to minimize.
205
            x0: The initial point for the minimization.
206
            jac: The gradient of the scalar function ``fun``.
207
            bounds: Bounds for the variables of ``fun``. This argument might be ignored if the
208
                optimizer does not support bounds.
209
        Returns:
210
            The result of the optimization, containing e.g. the result as attribute ``x``.
211
        """
212
        if jac is None:
1✔
213
            jac = Optimizer.wrap_function(
1✔
214
                Optimizer.gradient_num_diff, (fun, self._eps, self._max_evals_grouped)
215
            )
216

217
        derivative = jac(x0)
1✔
218
        self._t = 0
1✔
219
        self._m = np.zeros(np.shape(derivative))
1✔
220
        self._v = np.zeros(np.shape(derivative))
1✔
221
        if self._amsgrad:
1✔
222
            self._v_eff = np.zeros(np.shape(derivative))
×
223

224
        params = params_new = x0
1✔
225
        while self._t < self._maxiter:
1✔
226
            if self._t > 0:
1✔
227
                derivative = jac(params)
1✔
228
            self._t += 1
1✔
229
            self._m = self._beta_1 * self._m + (1 - self._beta_1) * derivative
1✔
230
            self._v = self._beta_2 * self._v + (1 - self._beta_2) * derivative * derivative
1✔
231
            lr_eff = self._lr * np.sqrt(1 - self._beta_2**self._t) / (1 - self._beta_1**self._t)
1✔
232
            if not self._amsgrad:
1✔
233
                params_new = params - lr_eff * self._m.flatten() / (
1✔
234
                    np.sqrt(self._v.flatten()) + self._noise_factor
235
                )
236
            else:
237
                self._v_eff = np.maximum(self._v_eff, self._v)
×
238
                params_new = params - lr_eff * self._m.flatten() / (
×
239
                    np.sqrt(self._v_eff.flatten()) + self._noise_factor
240
                )
241

242
            if self._snapshot_dir:
1✔
243
                self.save_params(self._snapshot_dir)
×
244

245
            # check termination
246
            if np.linalg.norm(params - params_new) < self._tol:
1✔
247
                break
1✔
248

249
            params = params_new
1✔
250

251
        result = OptimizerResult()
1✔
252
        result.x = params_new
1✔
253
        result.fun = fun(params_new)
1✔
254
        result.nfev = self._t
1✔
255
        return result
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc