• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

qiskit-community / qiskit-algorithms / 16793992272

06 Aug 2025 07:44PM UTC coverage: 90.135% (-0.3%) from 90.475%
16793992272

push

github

web-flow
Update copyright dates following #197 merge (#236)

6624 of 7349 relevant lines covered (90.13%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.47
/qiskit_algorithms/gradients/finite_diff/finite_diff_sampler_gradient.py
1
# This code is part of a Qiskit project.
2
#
3
# (C) Copyright IBM 2022, 2025.
4
#
5
# This code is licensed under the Apache License, Version 2.0. You may
6
# obtain a copy of this license in the LICENSE.txt file in the root directory
7
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8
#
9
# Any modifications or derivative works of this code must retain this
10
# copyright notice, and modified files need to carry a notice indicating
11
# that they have been altered from the originals.
12

13
"""Gradient of Sampler with Finite difference method."""
14

15
from __future__ import annotations
1✔
16

17
from collections import defaultdict
1✔
18
from typing import Literal, Sequence, Any
1✔
19

20
import numpy as np
1✔
21

22
from qiskit.circuit import Parameter, QuantumCircuit
1✔
23
from qiskit.primitives import BaseSamplerV2
1✔
24

25
from ..base.base_sampler_gradient import BaseSamplerGradient
1✔
26
from ..base.sampler_gradient_result import SamplerGradientResult
1✔
27
from ...custom_types import Transpiler
1✔
28

29
from ...exceptions import AlgorithmError
1✔
30

31

32
class FiniteDiffSamplerGradient(BaseSamplerGradient):
1✔
33
    """
34
    Compute the gradients of the sampling probability by finite difference method [1].
35

36
    **Reference:**
37
    [1] `Finite difference method <https://en.wikipedia.org/wiki/Finite_difference_method>`_
38
    """
39

40
    def __init__(
1✔
41
        self,
42
        sampler: BaseSamplerV2,
43
        epsilon: float,
44
        shots: int | None = None,
45
        *,
46
        method: Literal["central", "forward", "backward"] = "central",
47
        transpiler: Transpiler | None = None,
48
        transpiler_options: dict[str, Any] | None = None,
49
    ):
50
        r"""
51
        Args:
52
            sampler: The sampler used to compute the gradients.
53
            epsilon: The offset size for the finite difference gradients.
54
            shots: Number of shots to be used by the underlying Sampler. If provided, this number
55
                takes precedence over the default precision of the primitive. If None, the default
56
                number of shots of the primitive is used.
57
            method: The computation method of the gradients.
58

59
                    - ``central`` computes :math:`\frac{f(x+e)-f(x-e)}{2e}`,
60
                    - ``forward`` computes :math:`\frac{f(x+e) - f(x)}{e}`,
61
                    - ``backward`` computes :math:`\frac{f(x)-f(x-e)}{e}`
62

63
                where :math:`e` is epsilon.
64
            transpiler: An optional object with a `run` method allowing to transpile the circuits
65
                that are run when using this algorithm. If set to `None`, these won't be
66
                transpiled.
67
            transpiler_options: A dictionary of options to be passed to the transpiler's `run`
68
                method as keyword arguments.
69

70
        Raises:
71
            ValueError: If ``epsilon`` is not positive.
72
            TypeError: If ``method`` is invalid.
73
        """
74
        if epsilon <= 0:
1✔
75
            raise ValueError(f"epsilon ({epsilon}) should be positive.")
×
76
        self._epsilon = epsilon
1✔
77
        if method not in ("central", "forward", "backward"):
1✔
78
            raise TypeError(
×
79
                f"The argument method should be central, forward, or backward: {method} is given."
80
            )
81
        self._method = method
1✔
82
        super().__init__(
1✔
83
            sampler, shots, transpiler=transpiler, transpiler_options=transpiler_options
84
        )
85

86
    def _run(
1✔
87
        self,
88
        circuits: Sequence[QuantumCircuit],
89
        parameter_values: Sequence[Sequence[float]],
90
        parameters: Sequence[Sequence[Parameter] | None] | None,
91
        *,
92
        shots: int | Sequence[int] | None,
93
    ) -> SamplerGradientResult:
94
        """Compute the sampler gradients on the given circuits."""
95
        metadata = []
1✔
96
        all_n = []
1✔
97
        has_transformed_shots = False
1✔
98

99
        if isinstance(shots, int) or shots is None:
1✔
100
            shots = [shots] * len(circuits)
1✔
101
            has_transformed_shots = True
1✔
102

103
        if self._transpiler is not None:
1✔
104
            circuits = self._transpiler.run(circuits, **self._transpiler_options)
1✔
105

106
        pubs = []
1✔
107
        for circuit, parameter_values_, parameters_, shots_ in zip(
1✔
108
            circuits, parameter_values, parameters, shots
109
        ):
110
            # Indices of parameters to be differentiated
111
            indices = [circuit.parameters.data.index(p) for p in parameters_]
1✔
112
            metadata.append({"parameters": parameters_})
1✔
113
            # Combine inputs into a single job to reduce overhead.
114
            offset = np.identity(circuit.num_parameters)[indices, :]
1✔
115
            if self._method == "central":
1✔
116
                plus = parameter_values_ + self._epsilon * offset
1✔
117
                minus = parameter_values_ - self._epsilon * offset
1✔
118
                n = 2 * len(indices)
1✔
119
                all_n.append(n)
1✔
120
                pubs.append((circuit, plus.tolist() + minus.tolist(), shots_))
1✔
121
            elif self._method == "forward":
1✔
122
                plus = parameter_values_ + self._epsilon * offset
1✔
123
                n = len(indices) + 1
1✔
124
                pubs.append((circuit, [parameter_values_] + plus.tolist(), shots_))
1✔
125
                all_n.append(n)
1✔
126
            elif self._method == "backward":
1✔
127
                minus = parameter_values_ - self._epsilon * offset
1✔
128
                n = len(indices) + 1
1✔
129
                pubs.append((circuit, [parameter_values_] + minus.tolist(), shots_))
1✔
130
                all_n.append(n)
1✔
131

132
        # Run the single job with all circuits.
133
        job = self._sampler.run(pubs)
1✔
134
        try:
1✔
135
            results = job.result()
1✔
136
        except Exception as exc:
×
137
            raise AlgorithmError("Sampler job failed.") from exc
×
138

139
        # Compute the gradients.
140
        gradients = []
1✔
141

142
        for n, result_n in zip(all_n, results):
1✔
143
            gradient = []
1✔
144
            result = [
1✔
145
                {label: value / res.num_shots for label, value in res.get_int_counts().items()}
146
                for res in getattr(result_n.data, next(iter(result_n.data)))
147
            ]
148
            if self._method == "central":
1✔
149
                for dist_plus, dist_minus in zip(result[: n // 2], result[n // 2 :]):
1✔
150
                    grad_dist: dict[int, float] = defaultdict(float)
1✔
151
                    for key, value in dist_plus.items():
1✔
152
                        grad_dist[key] += value / (2 * self._epsilon)
1✔
153
                    for key, value in dist_minus.items():
1✔
154
                        grad_dist[key] -= value / (2 * self._epsilon)
1✔
155
                    gradient.append(dict(grad_dist))
1✔
156
            elif self._method == "forward":
1✔
157
                dist_zero = result[0]
1✔
158
                for dist_plus in result[1:]:
1✔
159
                    grad_dist = defaultdict(float)
1✔
160
                    for key, value in dist_plus.items():
1✔
161
                        grad_dist[key] += value / self._epsilon
1✔
162
                    for key, value in dist_zero.items():
1✔
163
                        grad_dist[key] -= value / self._epsilon
1✔
164
                    gradient.append(dict(grad_dist))
1✔
165
            elif self._method == "backward":
1✔
166
                dist_zero = result[0]
1✔
167
                for dist_minus in result[1:]:
1✔
168
                    grad_dist = defaultdict(float)
1✔
169
                    for key, value in dist_zero.items():
1✔
170
                        grad_dist[key] += value / self._epsilon
1✔
171
                    for key, value in dist_minus.items():
1✔
172
                        grad_dist[key] -= value / self._epsilon
1✔
173
                    gradient.append(dict(grad_dist))
1✔
174

175
            gradients.append(gradient)
1✔
176

177
        if has_transformed_shots:
1✔
178
            shots = shots[0]
1✔
179

180
            if shots is None:
1✔
181
                shots = results[0].metadata["shots"]
1✔
182
        else:
183
            for i, (shots_, result) in enumerate(zip(shots, results)):
×
184
                if shots_ is None:
×
185
                    shots[i] = result.metadata["shots"]
×
186

187
        return SamplerGradientResult(gradients=gradients, metadata=metadata, shots=shots)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc