• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

qiskit-community / qiskit-algorithms / 7891798718

13 Feb 2024 07:33PM CUT coverage: 90.066%. Remained the same
7891798718

push

github

web-flow
Remove remaining qiskit.org links (backport #152) (#154)

Co-authored-by: Eric Arellano <14852634+Eric-Arellano@users.noreply.github.com>
Co-authored-by: Steve Wood <40241007+woodsp-ibm@users.noreply.github.com>

6446 of 7157 relevant lines covered (90.07%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

62.14
/qiskit_algorithms/optimizers/adam_amsgrad.py
1
# This code is part of a Qiskit project.
2
#
3
# (C) Copyright IBM 2019, 2023.
4
#
5
# This code is licensed under the Apache License, Version 2.0. You may
6
# obtain a copy of this license in the LICENSE.txt file in the root directory
7
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8
#
9
# Any modifications or derivative works of this code must retain this
10
# copyright notice, and modified files need to carry a notice indicating
11
# that they have been altered from the originals.
12

13
"""The Adam and AMSGRAD optimizers."""
1✔
14
from __future__ import annotations
1✔
15

16
from collections.abc import Callable
1✔
17
from typing import Any
1✔
18
import os
1✔
19

20
import csv
1✔
21
import numpy as np
1✔
22
from .optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT
1✔
23

24
# pylint: disable=invalid-name
25

26

27
class ADAM(Optimizer):
1✔
28
    """Adam and AMSGRAD optimizers.
29

30
    Adam [1] is a gradient-based optimization algorithm that is relies on adaptive estimates of
31
    lower-order moments. The algorithm requires little memory and is invariant to diagonal
32
    rescaling of the gradients. Furthermore, it is able to cope with non-stationary objective
33
    functions and noisy and/or sparse gradients.
34

35
    AMSGRAD [2] (a variant of Adam) uses a 'long-term memory' of past gradients and, thereby,
36
    improves convergence properties.
37

38
    References:
39

40
        [1]: Kingma, Diederik & Ba, Jimmy (2014), Adam: A Method for Stochastic Optimization.
41
             `arXiv:1412.6980 <https://arxiv.org/abs/1412.6980>`_
42

43
        [2]: Sashank J. Reddi and Satyen Kale and Sanjiv Kumar (2018),
44
             On the Convergence of Adam and Beyond.
45
             `arXiv:1904.09237 <https://arxiv.org/abs/1904.09237>`_
46
    """
47

48
    _OPTIONS = [
1✔
49
        "maxiter",
50
        "tol",
51
        "lr",
52
        "beta_1",
53
        "beta_2",
54
        "noise_factor",
55
        "eps",
56
        "amsgrad",
57
        "snapshot_dir",
58
    ]
59

60
    def __init__(
1✔
61
        self,
62
        maxiter: int = 10000,
63
        tol: float = 1e-6,
64
        lr: float = 1e-3,
65
        beta_1: float = 0.9,
66
        beta_2: float = 0.99,
67
        noise_factor: float = 1e-8,
68
        eps: float = 1e-10,
69
        amsgrad: bool = False,
70
        snapshot_dir: str | None = None,
71
    ) -> None:
72
        """
73
        Args:
74
            maxiter: Maximum number of iterations
75
            tol: Tolerance for termination
76
            lr: Value >= 0, Learning rate.
77
            beta_1: Value in range 0 to 1, Generally close to 1.
78
            beta_2: Value in range 0 to 1, Generally close to 1.
79
            noise_factor: Value >= 0, Noise factor
80
            eps : Value >=0, Epsilon to be used for finite differences if no analytic
81
                gradient method is given.
82
            amsgrad: True to use AMSGRAD, False if not
83
            snapshot_dir: If not None save the optimizer's parameter
84
                after every step to the given directory
85
        """
86
        super().__init__()
1✔
87
        for k, v in list(locals().items()):
1✔
88
            if k in self._OPTIONS:
1✔
89
                self._options[k] = v
1✔
90
        self._maxiter = maxiter
1✔
91
        self._snapshot_dir = snapshot_dir
1✔
92
        self._tol = tol
1✔
93
        self._lr = lr
1✔
94
        self._beta_1 = beta_1
1✔
95
        self._beta_2 = beta_2
1✔
96
        self._noise_factor = noise_factor
1✔
97
        self._eps = eps
1✔
98
        self._amsgrad = amsgrad
1✔
99

100
        # runtime variables
101
        self._t = 0  # time steps
1✔
102
        self._m = np.zeros(1)
1✔
103
        self._v = np.zeros(1)
1✔
104
        if self._amsgrad:
1✔
105
            self._v_eff = np.zeros(1)
1✔
106

107
        if self._snapshot_dir:
1✔
108
            # pylint: disable=unspecified-encoding
109
            with open(os.path.join(self._snapshot_dir, "adam_params.csv"), mode="w") as csv_file:
×
110
                if self._amsgrad:
×
111
                    fieldnames = ["v", "v_eff", "m", "t"]
×
112
                else:
113
                    fieldnames = ["v", "m", "t"]
×
114
                writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
×
115
                writer.writeheader()
×
116

117
    @property
1✔
118
    def settings(self) -> dict[str, Any]:
1✔
119
        return {
1✔
120
            "maxiter": self._maxiter,
121
            "tol": self._tol,
122
            "lr": self._lr,
123
            "beta_1": self._beta_1,
124
            "beta_2": self._beta_2,
125
            "noise_factor": self._noise_factor,
126
            "eps": self._eps,
127
            "amsgrad": self._amsgrad,
128
            "snapshot_dir": self._snapshot_dir,
129
        }
130

131
    def get_support_level(self):
1✔
132
        """Return support level dictionary"""
133
        return {
1✔
134
            "gradient": OptimizerSupportLevel.supported,
135
            "bounds": OptimizerSupportLevel.ignored,
136
            "initial_point": OptimizerSupportLevel.supported,
137
        }
138

139
    def save_params(self, snapshot_dir: str) -> None:
1✔
140
        """Save the current iteration parameters to a file called ``adam_params.csv``.
141

142
        Note:
143

144
            The current parameters are appended to the file, if it exists already.
145
            The file is not overwritten.
146

147
        Args:
148
            snapshot_dir: The directory to store the file in.
149
        """
150
        if self._amsgrad:
×
151
            # pylint: disable=unspecified-encoding
152
            with open(os.path.join(snapshot_dir, "adam_params.csv"), mode="a") as csv_file:
×
153
                fieldnames = ["v", "v_eff", "m", "t"]
×
154
                writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
×
155
                writer.writerow({"v": self._v, "v_eff": self._v_eff, "m": self._m, "t": self._t})
×
156
        else:
157
            # pylint: disable=unspecified-encoding
158
            with open(os.path.join(snapshot_dir, "adam_params.csv"), mode="a") as csv_file:
×
159
                fieldnames = ["v", "m", "t"]
×
160
                writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
×
161
                writer.writerow({"v": self._v, "m": self._m, "t": self._t})
×
162

163
    def load_params(self, load_dir: str) -> None:
1✔
164
        """Load iteration parameters for a file called ``adam_params.csv``.
165

166
        Args:
167
            load_dir: The directory containing ``adam_params.csv``.
168
        """
169
        # pylint: disable=unspecified-encoding
170
        with open(os.path.join(load_dir, "adam_params.csv")) as csv_file:
×
171
            if self._amsgrad:
×
172
                fieldnames = ["v", "v_eff", "m", "t"]
×
173
            else:
174
                fieldnames = ["v", "m", "t"]
×
175
            reader = csv.DictReader(csv_file, fieldnames=fieldnames)
×
176
            for line in reader:
×
177
                v = line["v"]
×
178
                if self._amsgrad:
×
179
                    v_eff = line["v_eff"]
×
180
                m = line["m"]
×
181
                t = line["t"]
×
182

183
        v = v[1:-1]
×
184
        self._v = np.fromstring(v, dtype=float, sep=" ")
×
185
        if self._amsgrad:
×
186
            v_eff = v_eff[1:-1]
×
187
            self._v_eff = np.fromstring(v_eff, dtype=float, sep=" ")
×
188
        m = m[1:-1]
×
189
        self._m = np.fromstring(m, dtype=float, sep=" ")
×
190
        t = t[1:-1]
×
191
        self._t = np.fromstring(t, dtype=int, sep=" ")
×
192

193
    def minimize(
1✔
194
        self,
195
        fun: Callable[[POINT], float],
196
        x0: POINT,
197
        jac: Callable[[POINT], POINT] | None = None,
198
        bounds: list[tuple[float, float]] | None = None,
199
    ) -> OptimizerResult:
200
        """Minimize the scalar function.
201

202
        Args:
203
            fun: The scalar function to minimize.
204
            x0: The initial point for the minimization.
205
            jac: The gradient of the scalar function ``fun``.
206
            bounds: Bounds for the variables of ``fun``. This argument might be ignored if the
207
                optimizer does not support bounds.
208
        Returns:
209
            The result of the optimization, containing e.g. the result as attribute ``x``.
210
        """
211
        if jac is None:
1✔
212
            jac = Optimizer.wrap_function(Optimizer.gradient_num_diff, (fun, self._eps))
1✔
213

214
        derivative = jac(x0)
1✔
215
        self._t = 0
1✔
216
        self._m = np.zeros(np.shape(derivative))
1✔
217
        self._v = np.zeros(np.shape(derivative))
1✔
218
        if self._amsgrad:
1✔
219
            self._v_eff = np.zeros(np.shape(derivative))
×
220

221
        params = params_new = x0
1✔
222
        while self._t < self._maxiter:
1✔
223
            if self._t > 0:
1✔
224
                derivative = jac(params)
1✔
225
            self._t += 1
1✔
226
            self._m = self._beta_1 * self._m + (1 - self._beta_1) * derivative
1✔
227
            self._v = self._beta_2 * self._v + (1 - self._beta_2) * derivative * derivative
1✔
228
            lr_eff = self._lr * np.sqrt(1 - self._beta_2**self._t) / (1 - self._beta_1**self._t)
1✔
229
            if not self._amsgrad:
1✔
230
                params_new = params - lr_eff * self._m.flatten() / (
1✔
231
                    np.sqrt(self._v.flatten()) + self._noise_factor
232
                )
233
            else:
234
                self._v_eff = np.maximum(self._v_eff, self._v)
×
235
                params_new = params - lr_eff * self._m.flatten() / (
×
236
                    np.sqrt(self._v_eff.flatten()) + self._noise_factor
237
                )
238

239
            if self._snapshot_dir:
1✔
240
                self.save_params(self._snapshot_dir)
×
241

242
            # check termination
243
            if np.linalg.norm(params - params_new) < self._tol:
1✔
244
                break
1✔
245

246
            params = params_new
1✔
247

248
        result = OptimizerResult()
1✔
249
        result.x = params_new
1✔
250
        result.fun = fun(params_new)
1✔
251
        result.nfev = self._t
1✔
252
        return result
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc