• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

qiskit-community / qiskit-algorithms / 15778287374

20 Jun 2025 11:50AM UTC coverage: 89.98% (-0.5%) from 90.448%
15778287374

Pull #197

github

web-flow
Merge 0fdef580b into bc071ca43
Pull Request #197: Added V2 and ISA support

558 of 609 new or added lines in 51 files covered. (91.63%)

11 existing lines in 3 files now uncovered.

6439 of 7156 relevant lines covered (89.98%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.22
/qiskit_algorithms/gradients/finite_diff/finite_diff_sampler_gradient.py
1
# This code is part of a Qiskit project.
2
#
3
# (C) Copyright IBM 2022, 2025.
4
#
5
# This code is licensed under the Apache License, Version 2.0. You may
6
# obtain a copy of this license in the LICENSE.txt file in the root directory
7
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8
#
9
# Any modifications or derivative works of this code must retain this
10
# copyright notice, and modified files need to carry a notice indicating
11
# that they have been altered from the originals.
12

13
"""Gradient of Sampler with Finite difference method."""
14

15
from __future__ import annotations
1✔
16

17
from collections import defaultdict
1✔
18
from typing import Literal, Sequence
1✔
19

20
import numpy as np
1✔
21

22
from qiskit.circuit import Parameter, QuantumCircuit
1✔
23
from qiskit.primitives import BaseSamplerV2
1✔
24

25
from ..base.base_sampler_gradient import BaseSamplerGradient
1✔
26
from ..base.sampler_gradient_result import SamplerGradientResult
1✔
27

28
from ...exceptions import AlgorithmError
1✔
29

30

31
class FiniteDiffSamplerGradient(BaseSamplerGradient):
1✔
32
    """
33
    Compute the gradients of the sampling probability by finite difference method [1].
34

35
    **Reference:**
36
    [1] `Finite difference method <https://en.wikipedia.org/wiki/Finite_difference_method>`_
37
    """
38

39
    def __init__(
1✔
40
        self,
41
        sampler: BaseSamplerV2,
42
        epsilon: float,
43
        shots: int | None = None,
44
        *,
45
        method: Literal["central", "forward", "backward"] = "central",
46
    ):
47
        r"""
48
        Args:
49
            sampler: The sampler used to compute the gradients.
50
            epsilon: The offset size for the finite difference gradients.
51
            shots: Number of shots to be used by the underlying Sampler. If provided, this number
52
                takes precedence over the default precision of the primitive. If None, the default
53
                number of shots of the primitive is used.
54
            method: The computation method of the gradients.
55

56
                    - ``central`` computes :math:`\frac{f(x+e)-f(x-e)}{2e}`,
57
                    - ``forward`` computes :math:`\frac{f(x+e) - f(x)}{e}`,
58
                    - ``backward`` computes :math:`\frac{f(x)-f(x-e)}{e}`
59

60
                where :math:`e` is epsilon.
61

62
        Raises:
63
            ValueError: If ``epsilon`` is not positive.
64
            TypeError: If ``method`` is invalid.
65
        """
66
        if epsilon <= 0:
1✔
67
            raise ValueError(f"epsilon ({epsilon}) should be positive.")
×
68
        self._epsilon = epsilon
1✔
69
        if method not in ("central", "forward", "backward"):
1✔
70
            raise TypeError(
×
71
                f"The argument method should be central, forward, or backward: {method} is given."
72
            )
73
        self._method = method
1✔
74
        super().__init__(sampler, shots)
1✔
75

76
    def _run(
1✔
77
        self,
78
        circuits: Sequence[QuantumCircuit],
79
        parameter_values: Sequence[Sequence[float]],
80
        parameters: Sequence[Sequence[Parameter] | None] | None,
81
        *,
82
        shots: int | Sequence[int] | None,
83
    ) -> SamplerGradientResult:
84
        """Compute the sampler gradients on the given circuits."""
85
        metadata = []
1✔
86
        all_n = []
1✔
87
        has_transformed_shots = False
1✔
88

89
        if isinstance(shots, int) or shots is None:
1✔
90
            shots = [shots] * len(circuits)
1✔
91
            has_transformed_shots = True
1✔
92

93
        pubs = []
1✔
94
        for circuit, parameter_values_, parameters_, shots_ in zip(
1✔
95
            circuits, parameter_values, parameters, shots
96
        ):
97
            # Indices of parameters to be differentiated
98
            indices = [circuit.parameters.data.index(p) for p in parameters_]
1✔
99
            metadata.append({"parameters": parameters_})
1✔
100
            # Combine inputs into a single job to reduce overhead.
101
            offset = np.identity(circuit.num_parameters)[indices, :]
1✔
102
            if self._method == "central":
1✔
103
                plus = parameter_values_ + self._epsilon * offset
1✔
104
                minus = parameter_values_ - self._epsilon * offset
1✔
105
                n = 2 * len(indices)
1✔
106
                all_n.append(n)
1✔
107
                pubs.append((circuit, plus.tolist() + minus.tolist(), shots_))
1✔
108
            elif self._method == "forward":
1✔
109
                plus = parameter_values_ + self._epsilon * offset
1✔
110
                n = len(indices) + 1
1✔
111
                pubs.append((circuit, [parameter_values_] + plus.tolist(), shots_))
1✔
112
                all_n.append(n)
1✔
113
            elif self._method == "backward":
1✔
114
                minus = parameter_values_ - self._epsilon * offset
1✔
115
                n = len(indices) + 1
1✔
116
                pubs.append((circuit, [parameter_values_] + minus.tolist(), shots_))
1✔
117
                all_n.append(n)
1✔
118

119
        # Run the single job with all circuits.
120
        job = self._sampler.run(pubs)
1✔
121
        try:
1✔
122
            results = job.result()
1✔
123
        except Exception as exc:
×
124
            raise AlgorithmError("Sampler job failed.") from exc
×
125

126
        # Compute the gradients.
127
        gradients = []
1✔
128

129
        for n, result_n in zip(all_n, results):
1✔
130
            gradient = []
1✔
131
            result = [
1✔
132
                {label: value / res.num_shots for label, value in res.get_int_counts().items()}
133
                for res in getattr(result_n.data, next(iter(result_n.data)))
134
            ]
135
            if self._method == "central":
1✔
136
                for dist_plus, dist_minus in zip(result[: n // 2], result[n // 2 :]):
1✔
137
                    grad_dist: dict[int, float] = defaultdict(float)
1✔
138
                    for key, value in dist_plus.items():
1✔
139
                        grad_dist[key] += value / (2 * self._epsilon)
1✔
140
                    for key, value in dist_minus.items():
1✔
141
                        grad_dist[key] -= value / (2 * self._epsilon)
1✔
142
                    gradient.append(dict(grad_dist))
1✔
143
            elif self._method == "forward":
1✔
144
                dist_zero = result[0]
1✔
145
                for dist_plus in result[1:]:
1✔
146
                    grad_dist = defaultdict(float)
1✔
147
                    for key, value in dist_plus.items():
1✔
148
                        grad_dist[key] += value / self._epsilon
1✔
149
                    for key, value in dist_zero.items():
1✔
150
                        grad_dist[key] -= value / self._epsilon
1✔
151
                    gradient.append(dict(grad_dist))
1✔
152
            elif self._method == "backward":
1✔
153
                dist_zero = result[0]
1✔
154
                for dist_minus in result[1:]:
1✔
155
                    grad_dist = defaultdict(float)
1✔
156
                    for key, value in dist_zero.items():
1✔
157
                        grad_dist[key] += value / self._epsilon
1✔
158
                    for key, value in dist_minus.items():
1✔
159
                        grad_dist[key] -= value / self._epsilon
1✔
160
                    gradient.append(dict(grad_dist))
1✔
161

162
            gradients.append(gradient)
1✔
163

164
        if has_transformed_shots:
1✔
165
            shots = shots[0]
1✔
166

167
            if shots is None:
1✔
168
                shots = results[0].metadata["shots"]
1✔
169
        else:
NEW
170
            for i, (shots_, result) in enumerate(zip(shots, results)):
×
NEW
171
                if shots_ is None:
×
NEW
172
                    shots[i] = result.metadata["shots"]
×
173

174
        return SamplerGradientResult(gradients=gradients, metadata=metadata, shots=shots)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc