• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

qiskit-community / qiskit-machine-learning / 8545395904

03 Apr 2024 08:55PM CUT coverage: 92.711% (+0.08%) from 92.636%
8545395904

Pull #793

github

web-flow
Merge 240d02fb3 into 97513d377
Pull Request #793: Patches einsum dimensionality in `torch_connector` - #716

25 of 25 new or added lines in 1 file covered. (100.0%)

10 existing lines in 1 file now uncovered.

1908 of 2058 relevant lines covered (92.71%)

0.93 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

89.09
/qiskit_machine_learning/algorithms/trainable_model.py
1
# This code is part of a Qiskit project.
2
#
3
# (C) Copyright IBM 2021, 2024.
4
#
5
# This code is licensed under the Apache License, Version 2.0. You may
6
# obtain a copy of this license in the LICENSE.txt file in the root directory
7
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8
#
9
# Any modifications or derivative works of this code must retain this
10
# copyright notice, and modified files need to carry a notice indicating
11
# that they have been altered from the originals.
12
"""A base ML model with a Scikit-Learn like interface."""
1✔
13
from __future__ import annotations
1✔
14

15
from abc import abstractmethod
1✔
16
from typing import Callable
1✔
17

18
import numpy as np
1✔
19
from qiskit_algorithms.optimizers import Optimizer, SLSQP, OptimizerResult, Minimizer
1✔
20
from qiskit_algorithms.utils import algorithm_globals
1✔
21

22
from qiskit_machine_learning import QiskitMachineLearningError
1✔
23
from qiskit_machine_learning.neural_networks import NeuralNetwork
1✔
24
from qiskit_machine_learning.utils.loss_functions import (
1✔
25
    Loss,
26
    L1Loss,
27
    L2Loss,
28
    CrossEntropyLoss,
29
)
30

31
from .objective_functions import ObjectiveFunction
1✔
32
from .serializable_model import SerializableModelMixin
1✔
33

34

35
class TrainableModel(SerializableModelMixin):
1✔
36
    """Base class for ML model that defines a scikit-learn like interface for Estimators."""
37

38
    def __init__(
1✔
39
        self,
40
        neural_network: NeuralNetwork,
41
        loss: str | Loss = "squared_error",
42
        optimizer: Optimizer | Minimizer | None = None,
43
        warm_start: bool = False,
44
        initial_point: np.ndarray = None,
45
        callback: Callable[[np.ndarray, float], None] | None = None,
46
    ):
47
        """
48
        Args:
49
            neural_network: An instance of an quantum neural network. If the neural network has a
50
                one-dimensional output, i.e., `neural_network.output_shape=(1,)`, then it is
51
                expected to return values in [-1, +1] and it can only be used for binary
52
                classification. If the output is multi-dimensional, it is assumed that the result
53
                is a probability distribution, i.e., that the entries are non-negative and sum up
54
                to one. Then there are two options, either one-hot encoding or not. In case of
55
                one-hot encoding, each probability vector resulting a neural network is considered
56
                as one sample and the loss function is applied to the whole vector. Otherwise, each
57
                entry of the probability vector is considered as an individual sample and the loss
58
                function is applied to the index and weighted with the corresponding probability.
59
            loss: A target loss function to be used in training. Default is `squared_error`,
60
                i.e. L2 loss. Can be given either as a string for 'absolute_error' (i.e. L1 Loss),
61
                'squared_error', 'cross_entropy', or as a loss function
62
                implementing the Loss interface.
63
            optimizer: An instance of an optimizer or a callable to be used in training.
64
                Refer to :class:`~qiskit_algorithms.optimizers.Minimizer` for more information on
65
                the callable protocol. When `None` defaults to
66
                :class:`~qiskit_algorithms.optimizers.SLSQP`.
67
            warm_start: Use weights from previous fit to start next fit.
68
            initial_point: Initial point for the optimizer to start from.
69
            callback: A reference to a user's callback function that has two parameters and
70
                returns ``None``. The callback can access intermediate data during training.
71
                On each iteration an optimizer invokes the callback and passes current weights
72
                as an array and a computed value as a float of the objective function being
73
                optimized. This allows to track how well optimization / training process is going on.
74
        Raises:
75
            QiskitMachineLearningError: unknown loss, invalid neural network
76
        """
77
        self._neural_network = neural_network
1✔
78
        if len(neural_network.output_shape) > 1:
1✔
79
            raise QiskitMachineLearningError("Invalid neural network output shape!")
×
80
        if isinstance(loss, Loss):
1✔
81
            self._loss = loss
1✔
82
        else:
83
            loss = loss.lower()
1✔
84
            if loss == "absolute_error":
1✔
85
                self._loss = L1Loss()
1✔
86
            elif loss == "squared_error":
1✔
87
                self._loss = L2Loss()
1✔
88
            elif loss == "cross_entropy":
1✔
89
                self._loss = CrossEntropyLoss()
1✔
90
            else:
91
                raise QiskitMachineLearningError(f"Unknown loss {loss}!")
×
92

93
        # call the setter that has some additional checks
94
        self.optimizer = optimizer
1✔
95

96
        self._warm_start = warm_start
1✔
97
        self._fit_result: OptimizerResult | None = None
1✔
98
        self._initial_point = initial_point
1✔
99
        self._callback = callback
1✔
100

101
    @property
1✔
102
    def neural_network(self):
1✔
103
        """Returns the underlying neural network."""
104
        return self._neural_network
×
105

106
    @property
1✔
107
    def loss(self):
1✔
108
        """Returns the underlying neural network."""
109
        return self._loss
×
110

111
    @property
1✔
112
    def optimizer(self) -> Optimizer | Minimizer:
1✔
113
        """Returns an optimizer to be used in training."""
114
        return self._optimizer
×
115

116
    @optimizer.setter
1✔
117
    def optimizer(self, optimizer: Optimizer | Minimizer | None = None):
1✔
118
        """Sets the optimizer to use in training process."""
119
        if optimizer is None:
1✔
120
            optimizer = SLSQP()
1✔
121
        self._optimizer = optimizer
1✔
122

123
    @property
1✔
124
    def warm_start(self) -> bool:
1✔
125
        """Returns the warm start flag."""
126
        return self._warm_start
×
127

128
    @warm_start.setter
1✔
129
    def warm_start(self, warm_start: bool) -> None:
1✔
130
        """Sets the warm start flag."""
131
        self._warm_start = warm_start
1✔
132

133
    @property
1✔
134
    def initial_point(self) -> np.ndarray:
1✔
135
        """Returns current initial point"""
136
        return self._initial_point
×
137

138
    @initial_point.setter
1✔
139
    def initial_point(self, initial_point: np.ndarray) -> None:
1✔
140
        """Sets the initial point"""
141
        self._initial_point = initial_point
×
142

143
    @property
1✔
144
    def weights(self) -> np.ndarray:
1✔
145
        """Returns trained weights as a numpy array. The weights can be also queried by calling
146
        `model.fit_result.x`, but in this case their representation depends on the optimizer used.
147

148
        Raises:
149
            QiskitMachineLearningError: If the model has not been fit.
150
        """
151
        self._check_fitted()
1✔
152
        return np.asarray(self._fit_result.x)
1✔
153

154
    @property
1✔
155
    def fit_result(self) -> OptimizerResult:
1✔
156
        """Returns a resulting object from the optimization procedure. Please refer to the
157
        documentation of the `OptimizerResult
158
        <https://qiskit-community.github.io/qiskit-algorithms/stubs/qiskit_algorithms.optimizers.OptimizerResult.html>`_
159
        class for more details.
160

161
        Raises:
162
            QiskitMachineLearningError: If the model has not been fit.
163
        """
164
        self._check_fitted()
1✔
165
        return self._fit_result
1✔
166

167
    @property
1✔
168
    def callback(self) -> Callable[[np.ndarray, float], None] | None:
1✔
169
        """Return the callback."""
170
        return self._callback
×
171

172
    @callback.setter
1✔
173
    def callback(self, callback: Callable[[np.ndarray, float], None] | None) -> None:
1✔
174
        """Set the callback."""
175
        self._callback = callback
1✔
176

177
    def _check_fitted(self) -> None:
1✔
178
        if self._fit_result is None:
1✔
179
            raise QiskitMachineLearningError("The model has not been fitted yet")
1✔
180

181
    # pylint: disable=invalid-name
182
    def fit(self, X: np.ndarray, y: np.ndarray) -> TrainableModel:
1✔
183
        """
184
        Fit the model to data matrix X and target(s) y.
185

186
        Args:
187
            X: The input data.
188
            y: The target values.
189

190
        Returns:
191
            self: returns a trained model.
192

193
        Raises:
194
            QiskitMachineLearningError: In case of invalid data (e.g. incompatible with network)
195
        """
196
        if not self._warm_start:
1✔
197
            self._fit_result = None
1✔
198

199
        self._fit_result = self._fit_internal(X, y)
1✔
200
        return self
1✔
201

202
    @abstractmethod
1✔
203
    # pylint: disable=invalid-name
204
    def _fit_internal(self, X: np.ndarray, y: np.ndarray) -> OptimizerResult:
1✔
205
        raise NotImplementedError
×
206

207
    @abstractmethod
1✔
208
    def predict(self, X: np.ndarray) -> np.ndarray:
1✔
209
        """
210
        Predict using the network specified to the model.
211

212
        Args:
213
            X: The input data.
214
        Raises:
215
            QiskitMachineLearningError: Model needs to be fit to some training data first
216
        Returns:
217
            The predicted classes.
218
        """
219
        raise NotImplementedError
×
220

221
    @abstractmethod
1✔
222
    # pylint: disable=invalid-name
223
    def score(self, X: np.ndarray, y: np.ndarray, sample_weight: np.ndarray | None = None) -> float:
1✔
224
        """
225
        Returns a score of this model given samples and true values for the samples. In case of
226
        classification this should be mean accuracy, in case of regression the coefficient of
227
        determination :math:`R^2` of the prediction.
228

229
        Args:
230
            X: Test samples.
231
            y: True values for ``X``.
232
            sample_weight: Sample weights. Default is ``None``.
233

234
        Returns:
235
            a float score of the model.
236
        """
237
        raise NotImplementedError
×
238

239
    def _choose_initial_point(self) -> np.ndarray:
1✔
240
        """Choose an initial point for the optimizer. If warm start is set and the model is
241
        already trained then use a fit result as an initial point. If initial point is passed,
242
        then use this value, otherwise pick a random location.
243

244
        Returns:
245
            An array as an initial point
246
        """
247
        if self._warm_start and self._fit_result is not None:
1✔
248
            self._initial_point = self._fit_result.x
1✔
249
        elif self._initial_point is None:
1✔
250
            self._initial_point = algorithm_globals.random.random(self._neural_network.num_weights)
1✔
251
        return self._initial_point
1✔
252

253
    def _get_objective(
1✔
254
        self,
255
        function: ObjectiveFunction,
256
    ) -> Callable:
257
        """
258
        Wraps the given `ObjectiveFunction` to add callback calls, if `callback` is not None, along
259
        with evaluating the objective value. Returned objective function is passed to
260
        `Optimizer.minimize()`.
261
        Args:
262
            function: The objective function whose objective is to be evaluated.
263

264
        Returns:
265
            Objective function to evaluate objective value and optionally invoke callback calls.
266
        """
267
        if self._callback is None:
1✔
268
            return function.objective
1✔
269

270
        def objective(objective_weights):
1✔
271
            objective_value = function.objective(objective_weights)
1✔
272
            self._callback(objective_weights, objective_value)
1✔
273
            return objective_value
1✔
274

275
        return objective
1✔
276

277
    def _minimize(self, function: ObjectiveFunction) -> OptimizerResult:
1✔
278
        """
279
        Minimizes the objective function.
280

281
        Args:
282
            function: a function to minimize.
283

284
        Returns:
285
            An optimization result.
286
        """
287
        objective = self._get_objective(function)
1✔
288

289
        initial_point = self._choose_initial_point()
1✔
290
        if callable(self._optimizer):
1✔
291
            optimizer_result = self._optimizer(
1✔
292
                fun=objective, x0=initial_point, jac=function.gradient
293
            )
294
        else:
295
            optimizer_result = self._optimizer.minimize(
1✔
296
                fun=objective,
297
                x0=initial_point,
298
                jac=function.gradient,
299
            )
300
        return optimizer_result
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc