• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

PrincetonUniversity / PsyNeuLink / 15917088825

05 Jun 2025 04:18AM UTC coverage: 84.482% (+0.5%) from 84.017%
15917088825

push

github

web-flow
Merge pull request #3271 from PrincetonUniversity/devel

Devel

9909 of 12966 branches covered (76.42%)

Branch coverage included in aggregate %.

1708 of 1908 new or added lines in 54 files covered. (89.52%)

25 existing lines in 14 files now uncovered.

34484 of 39581 relevant lines covered (87.12%)

0.87 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

73.19
/psyneulink/core/components/functions/nonstateful/optimizationfunctions.py
1
#
2
# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License");
3
# you may not use this file except in compliance with the License.  You may obtain a copy of the License at:
4
#     http://www.apache.org/licenses/LICENSE-2.0
5
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
6
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
7
# See the License for the specific language governing permissions and limitations under the License.
8
#
9
#
10
# ******************************************   OPTIMIZATION FUNCTIONS **************************************************
11
"""
12
Contents
13
--------
14

15
* `OptimizationFunction`
16
* `GradientOptimization`
17
* `GridSearch`
18
* `GaussianProcess`
19

20
Overview
21
--------
22

23
Functions that return the sample of a variable yielding the optimized value of an objective_function.
24

25
"""
26

27
import contextlib
1✔
28
# from fractions import Fraction
29
import itertools
1✔
30
import warnings
1✔
31
from numbers import Number
1✔
32

33
import numpy as np
1✔
34

35
# Conditionally import torch
36
try:
1✔
37
    import torch
1✔
38
except ImportError:
×
39
    torch = None
×
40

41
from beartype import beartype
1✔
42

43
from psyneulink._typing import Optional, Union, Callable, Literal
1✔
44

45
from psyneulink.core import llvm as pnlvm
1✔
46
from psyneulink.core.components.functions.function import (
1✔
47
    DEFAULT_SEED, Function_Base, FunctionError, _random_state_getter,
48
    _seed_setter, is_function_type,
49
)
50
from psyneulink.core.globals.context import ContextFlags, handle_external_context
1✔
51
from psyneulink.core.globals.defaults import MPI_IMPLEMENTATION
1✔
52
from psyneulink.core.globals.keywords import (
1✔
53
    BOUNDS, GRADIENT_OPTIMIZATION_FUNCTION, GRID_SEARCH_FUNCTION, GAUSSIAN_PROCESS_FUNCTION,
54
    OPTIMIZATION_FUNCTION_TYPE, OWNER, VALUE, DEFAULT,
55
)
56
from psyneulink.core.globals.parameters import Parameter, check_user_specified
1✔
57
from psyneulink.core.globals.sampleiterator import SampleIterator
1✔
58
from psyneulink.core.globals.utilities import call_with_pruned_args, convert_to_np_array
1✔
59

60
__all__ = ['OptimizationFunction', 'GradientOptimization', 'GridSearch', 'GaussianProcess',
1✔
61
           'ASCENT', 'DESCENT', 'DIRECTION', 'MAXIMIZE', 'MINIMIZE', 'OBJECTIVE_FUNCTION', 'SEARCH_FUNCTION',
62
           'SEARCH_SPACE', 'RANDOMIZATION_DIMENSION', 'SEARCH_TERMINATION_FUNCTION', 'SIMULATION_PROGRESS'
63
           ]
64

65
OBJECTIVE_FUNCTION = 'objective_function'
1✔
66
AGGREGATION_FUNCTION = 'aggregation_function'
1✔
67
SEARCH_FUNCTION = 'search_function'
1✔
68
SEARCH_SPACE = 'search_space'
1✔
69
RANDOMIZATION_DIMENSION = 'randomization_dimension'
1✔
70
SEARCH_TERMINATION_FUNCTION = 'search_termination_function'
1✔
71
DIRECTION = 'direction'
1✔
72
SIMULATION_PROGRESS = 'simulation_progress'
1✔
73

74
class OptimizationFunctionError(FunctionError):
1✔
75
    pass
1✔
76

77

78
def _num_estimates_getter(owning_component, context):
1✔
79
    if owning_component.parameters.randomization_dimension._get(context) is None:
1✔
80
        return np.array(1)
1✔
81
    else:
82
        return np.array(
1✔
83
            owning_component.parameters.search_space._get(context)[owning_component.randomization_dimension].num
84
        )
85

86

87
class OptimizationFunction(Function_Base):
1✔
88
    """
89
    OptimizationFunction(                            \
90
    default_variable=None,                           \
91
    objective_function=lambda x:0,                   \
92
    search_function=lambda x:x,                      \
93
    search_space=[0],                                \
94
    randomization_dimension=None,                    \
95
    search_termination_function=lambda x,y,z:True,   \
96
    save_samples=False,                              \
97
    save_values=False,                               \
98
    max_iterations=None,                             \
99
    params=Nonse,                                    \
100
    owner=Nonse,                                     \
101
    prefs=None)
102

103
    Provides an interface to subclasses and external optimization functions. The default `function
104
    <OptimizationFunction.function>` raises a not implemented exception. Subclasses must implement
105
    the default function. The `_evaluate <OptimizationFunction._evaluate>` method implements the default procedure
106
    of generating samples from `search_space <OptimizationFunction.search_space>` using
107
    `search_function <OptimizationFunction.search_function>`, evaluating them using
108
    `objective_function <OptimizationFunction.objective_function>`, and reporting the value of each using
109
    `report_value <OptimizationFunction.report_value>` until terminated by
110
    `search_termination_function <OptimizationFunction.search_termination_function>`. Subclasses must override
111
    `function <OptimizationFunction.function>` to implement their own optimization function or call an external one.
112
    The base class method `_evaluate <OptimizationFunction._evaluate>` maybe be used to implement the optimization
113
    procedure.
114

115
    Samples in `search_space <OptimizationFunction.search_space>` are assumed to be a list of one or more
116
    `SampleIterator` objects.
117

118
    .. _OptimizationFunction_Procedure:
119

120
    **Default Optimization Procedure**
121

122
    When `_evaluate <OptimizationFunction._evaluate>` is executed, it iterates over the following steps:
123

124
        - get sample from `search_space <OptimizationFunction.search_space>` by calling `search_function
125
          <OptimizationFunction.search_function>`;
126
        ..
127
        - estimate the value of `objective_function <OptimizationFunction.objective_function>` for the sample
128
          by calling `objective_function <OptimizationFunction.objective_function>` the number of times
129
          specified in its `num_estimates <OptimizationFunction.num_estimates>` attribute;
130
        ..
131
        - aggregate value of the estimates using `aggregation_function <OptimizationFunction.aggregation_function>`
132
          (the default is to average the values; if `aggregation_function <OptimizationFunction.aggregation_function>`
133
          is not specified, the entire list of estimates is returned);
134
        ..
135
        - report the aggregated value for the sample by calling `report_value <OptimizationFunction.report_value>`;
136
        ..
137
        - evaluate `search_termination_function <OptimizationFunction.search_termination_function>`.
138

139
    The current iteration number is contained in `iteration <OptimizationFunction.iteration>`. Iteration continues
140
    until all values of `search_space <OptimizationFunction.search_space>` have been evaluated and/or
141
    `search_termination_function <OptimizationFunction.search_termination_function>` returns `True`.  The `function
142
    <OptimizationFunction.function>` returns:
143

144
    - the last sample evaluated (which may or may not be the optimal value, depending on the `objective_function
145
      <OptimizationFunction.objective_function>`);
146

147
    - the value of `objective_function <OptimizationFunction.objective_function>` associated with the last sample;
148

149
    - two lists, that may contain all of the samples evaluated and their values, depending on whether `save_samples
150
      <OptimizationFunction.save_samples>` and/or `save_vales <OptimizationFunction.save_values>` are `True`,
151
      respectively.
152

153
    .. _OptimizationFunction_Defaults:
154

155
    .. note::
156

157
        An OptimizationFunction or any of its subclasses can be created by calling its constructor.  This provides
158
        runnable defaults for all of its arguments (see below). However, these do not yield useful results, and are
159
        meant simply to allow the  constructor of the OptimziationFunction to be used to specify some but not all of
160
        its parameters when specifying the OptimizationFunction in the constructor for another Component. For
161
        example, an OptimizationFunction may use for its `objective_function <OptimizationFunction.objective_function>`
162
        or `search_function <OptimizationFunction.search_function>` a method of the Component to which it is being
163
        assigned;  however, those methods will not yet be available, as the Component itself has not yet been
164
        constructed. This can be handled by calling the OptimizationFunction's `reset
165
        <OptimizationFunction.reset>` method after the Component has been instantiated, with a parameter
166
        specification dictionary with a key for each entry that is the name of a parameter and its value the value to
167
        be assigned to the parameter.  This is done automatically for Mechanisms that take an ObjectiveFunction as
168
        their `function <Mechanism_Base.function>` (such as the `OptimizationControlMechanism`), but will require it be
169
        done explicitly for Components for which that is not the case. A warning is issued if defaults are used for
170
        the arguments of an OptimizationFunction or its subclasses;  this can be suppressed by specifying the
171
        relevant argument(s) as `NotImplemented`.
172

173
    .. technical_note::
174
       - Constructors of subclasses should include ``**kwargs`` in their constructor method, to accommodate arguments
175
         required by some subclasses but not others (e.g., search_space needed by `GridSearch` but not
176
         `GradientOptimization`) so that subclasses can be used interchangeably by OptimizationControlMechanism.
177

178
       - Subclasses with attributes that depend on one of the OptimizationFunction's parameters should implement the
179
         `reset <OptimizationFunction.reset>` method, that calls ``super().reset(*args)`` and then
180
         reassigns the values of the dependent attributes accordingly.  If an argument is not needed for the subclass,
181
         `NotImplemented` should be passed as the argument's value in the call to super (i.e.,
182
         the OptimizationFunction's
183
         constructor).
184

185

186
    Arguments
187
    ---------
188

189
    default_variable : list or ndarray : default None
190
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
191
        `objective_function <OptimizationFunction.objective_function>`.
192

193
    objective_function : function or method : default None
194
        specifies function used to make a single estimate for a sample, `num_estimates
195
        <OptimizationFunction.num_estimates>` of which are made for a given sample in each iteration of the
196
        `optimization process <OptimizationFunction_Procedure>`; if it is not specified, a default function is used
197
        that simply returns the value passed as its `variable <OptimizationFunction.variable>` parameter (see `note
198
        <OptimizationFunction_Defaults>`).
199

200
    aggregation_function : function or method : default None
201
        specifies function used to aggregate the values returned over the `num_estimates
202
        <OptimizationFunction.num_estimates>` calls to the `objective_function
203
        <OptimizationFunction.objective_function>` for a given sample in each iteration of the `optimization
204
        process <OptimizationFunction_Procedure>`; if it is not specified, a default function is used that simply
205
        returns the value passed as its `variable <OptimizationFunction.variable>` parameter (see `note
206
        <OptimizationFunction_Defaults>`).
207

208
    search_function : function or method : default None
209
        specifies function used to select a sample for `objective_function <OptimizationFunction.objective_function>`
210
        in each iteration of the `optimization process <OptimizationFunction_Procedure>`.  It **must be specified**
211
        if the `objective_function <OptimizationFunction.objective_function>` does not generate samples on its own
212
        (e.g., as does `GradientOptimization`).  If it is required and not specified, the optimization process
213
        executes exactly once using the value passed as its `variable <OptimizationFunction.variable>` parameter
214
        (see `note <OptimizationFunction_Defaults>`).
215

216
    search_space : list or array of SampleIterators : default None
217
        specifies iterators used by `search_function <OptimizationFunction.search_function>` to generate samples
218
        evaluated `objective_function <OptimizationFunction.objective_function>` in each iteration of the
219
        `optimization process <OptimizationFunction_Procedure>`. It **must be specified**
220
        if the `objective_function <OptimizationFunction.objective_function>` does not generate samples on its own
221
        (e.g., as does `GradientOptimization`). If it is required and not specified, the optimization process
222
        executes exactly once using the value passed as its `variable <OptimizationFunction.variable>` parameter
223
        (see `note <OptimizationFunction_Defaults>`).
224

225
    randomization_dimension : int
226
        specifies the index of `search_space <OptimizationFunction.search_space>` containing the seeds for use in
227
        randomization over each estimate of a sample (see `num_estimates <OptimizationFunction.num_estimates>`).
228

229
    search_termination_function : function or method : None
230
        specifies function used to terminate iterations of the `optimization process <OptimizationFunction_Procedure>`.
231
        It must return a boolean value, and it  **must be specified** if the
232
        `objective_function <OptimizationFunction.objective_function>` is not overridden.  If it is required and not
233
        specified, the optimization process executes exactly once (see `note <OptimizationFunction_Defaults>`).
234

235
    save_samples : bool
236
        specifies whether or not to save and return the values of the samples used to evalute `objective_function
237
        <OptimizationFunction.objective_function>` over all iterations of the `optimization process
238
        <OptimizationFunction_Procedure>`.
239

240
    save_values : bool
241
        specifies whether or not to save and return the values of `objective_function
242
        <OptimizationFunction.objective_function>` for samples evaluated in all iterations of the
243
        `optimization process <OptimizationFunction_Procedure>`.
244

245
    max_iterations : int : default 1000
246
        specifies the maximum number of times the `optimization process <OptimizationFunction_Procedure>` is allowed
247
        to iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
248

249

250
    Attributes
251
    ----------
252

253
    variable : ndarray
254
        first sample evaluated by `objective_function <OptimizationFunction.objective_function>` (i.e., one used to
255
        evaluate it in the first iteration of the `optimization process <OptimizationFunction_Procedure>`).
256

257
    objective_function : function or method
258
        used to evaluate the sample in each iteration of the `optimization process <OptimizationFunction_Procedure>`.
259

260
    search_function : function, method or None
261
        used to select a sample evaluated by `objective_function <OptimizationFunction.objective_function>`
262
        in each iteration of the `optimization process <OptimizationFunction_Procedure>`.  `NotImplemented` if
263
        the `objective_function <OptimizationFunction.objective_function>` generates its own samples.
264

265
    search_space : list or array of `SampleIterators <SampleIterator>`
266
        used by `search_function <OptimizationFunction.search_function>` to generate samples evaluated by
267
        `objective_function <OptimizationFunction.objective_function>` in each iteration of the `optimization process
268
        <OptimizationFunction_Procedure>`.  The number of SampleIterators in the list determines the dimensionality
269
        of each sample:  in each iteration of the `optimization process <OptimizationFunction_Procedure>`, each
270
        SampleIterator is called upon to provide the value for one of the dimensions of the sample
271
        if the `objective_function <OptimizationFunction.objective_function>` generates its own samples.  If it is
272
        required and not specified, the optimization process executes exactly once using the value passed as its
273
        `variable <OptimizationFunction.variable>` parameter (see `note <OptimizationFunction_Defaults>`).
274

275
    randomization_dimension : int or None
276
        the index of `search_space <OptimizationFunction.search_space>` containing the seeds for use in randomization
277
        over each estimate of a sample (see `num_estimates <OptimizationFunction.num_estimates>`);  if num_estimates
278
        is not specified, this is None, and only a single estimate is made for each sample.
279

280
    num_estimates : int or None
281
        the number of independent estimates evaluated (i.e., calls made to the OptimizationFunction's
282
        `objective_function <OptimizationFunction.objective_function>` for each sample, and aggregated over
283
        by its `aggregation_function <OptimizationFunction.aggregation_function>` to determine the estimated value
284
        for a given sample.  This is determined from the `search_space <OptimizationFunction.search_space>` by
285
        accessing its `randomization_dimension <OptimizationFunction.randomization_dimension>` and determining the
286
        the length of (i.e., number of elements specified for) that dimension.
287

288
    aggregation_function : function or method
289
        used to aggregate the values returned over the `num_estimates <OptimizationFunction.num_estimates>` calls to
290
        the `objective_function <OptimizationFunction.objective_function>` for a given sample in each iteration of
291
        the `optimization process <OptimizationFunction_Procedure>`.
292

293
    search_termination_function : function or method that returns a boolean value
294
        used to terminate iterations of the `optimization process <OptimizationFunction_Procedure>`; if it is required
295
        and not specified, the optimization process executes exactly once (see `note <OptimizationFunction_Defaults>`).
296

297
    iteration : int
298
        the current iteration of the `optimization process <OptimizationFunction_Procedure>`.
299

300
    max_iterations : int : default 1000
301
        specifies the maximum number of times the `optimization process <OptimizationFunction_Procedure>` is allowed
302
        to iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
303

304
    save_samples : bool
305
        determines whether or not to save the values of the samples used to evalute `objective_function
306
        <OptimizationFunction.objective_function>` over all iterations of the `optimization process
307
        <OptimizationFunction_Procedure>`.
308

309
    save_values : bool
310
        determines whether or not to save and return the values of `objective_function
311
        <OptimizationFunction.objective_function>` for samples evaluated in all iterations of the
312
        `optimization process <OptimizationFunction_Procedure>`.
313
    """
314

315
    componentType = OPTIMIZATION_FUNCTION_TYPE
1✔
316

317
    class Parameters(Function_Base.Parameters):
1✔
318
        """
319
            Attributes
320
            ----------
321

322
                variable
323
                    see `variable <OptimizationFunction.variable>`
324

325
                    :default value: numpy.array([0, 0, 0])
326
                    :type: ``numpy.ndarray``
327
                    :read only: True
328

329
                max_iterations
330
                    see `max_iterations <OptimizationFunction.max_iterations>`
331

332
                    :default value: None
333
                    :type:
334

335
                num_estimates
336
                    see `num_estimates <OptimizationFunction.num_estimates>`
337

338
                    :default value: None
339
                    :type: ``int``
340

341
                objective_function
342
                    see `objective_function <OptimizationFunction.objective_function>`
343

344
                    :default value: lambda x: 0
345
                    :type: ``types.FunctionType``
346

347
                randomization_dimension
348
                    see `randomization_dimension <OptimizationFunction.randomization_dimension>`
349
                    :default value: None
350
                    :type: ``int``
351

352
                save_samples
353
                    see `save_samples <OptimizationFunction.save_samples>`
354

355
                    :default value: False
356
                    :type: ``bool``
357

358
                save_values
359
                    see `save_values <OptimizationFunction.save_values>`
360

361
                    :default value: False
362
                    :type: ``bool``
363

364
                saved_samples
365
                    see `saved_samples <OptimizationFunction.saved_samples>`
366

367
                    :default value: []
368
                    :type: ``list``
369
                    :read only: True
370

371
                saved_values
372
                    see `saved_values <OptimizationFunction.saved_values>`
373

374
                    :default value: []
375
                    :type: ``list``
376
                    :read only: True
377

378
                search_function
379
                    see `search_function <OptimizationFunction.search_function>`
380

381
                    :default value: lambda x: x
382
                    :type: ``types.FunctionType``
383

384
                search_space
385
                    see `search_space <OptimizationFunction.search_space>`
386

387
                    :default value: [`SampleIterator`]
388
                    :type: ``list``
389

390
                search_termination_function
391
                    see `search_termination_function <OptimizationFunction.search_termination_function>`
392

393
                    :default value: lambda x, y, z: True
394
                    :type: ``types.FunctionType``
395
        """
396
        variable = Parameter(np.array([0.0, 0.0, 0.0]), read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
397

398
        objective_function = Parameter(lambda x: 0.0, stateful=False, loggable=False)
1✔
399
        aggregation_function = Parameter(lambda x: np.mean(x, axis=1), stateful=False, loggable=False)
1✔
400
        search_function = Parameter(lambda x: x, stateful=False, loggable=False)
1✔
401
        search_termination_function = Parameter(lambda x, y, z: True, stateful=False, loggable=False)
1✔
402
        search_space = Parameter([SampleIterator([0])], stateful=False, loggable=False)
1✔
403
        randomization_dimension = Parameter(None, stateful=False, loggable=False)
1✔
404
        num_estimates = Parameter(None, stateful=True, loggable=True, read_only=True,
1✔
405
                                  dependencies=[randomization_dimension, search_space],
406
                                  getter=_num_estimates_getter)
407

408
        save_samples = Parameter(False, pnl_internal=True)
1✔
409
        save_values = Parameter(False, pnl_internal=True)
1✔
410

411
        # these are created as parameter ports, but should they be?
412
        max_iterations = Parameter(None, modulable=True)
1✔
413

414
        saved_samples = Parameter([], read_only=True, pnl_internal=True)
1✔
415
        saved_values = Parameter([], read_only=True, pnl_internal=True)
1✔
416

417
        grid = Parameter(None)
1✔
418

419
    @check_user_specified
1✔
420
    @beartype
1✔
421
    def __init__(
1✔
422
        self,
423
        default_variable=None,
424
        objective_function:Optional[Callable] = None,
425
        aggregation_function:Optional[Callable] = None,
426
        search_function:Optional[Callable] = None,
427
        search_space=None,
428
        randomization_dimension=None,
429
        search_termination_function:Optional[Callable] = None,
430
        save_samples:Optional[bool]=None,
431
        save_values:Optional[bool]=None,
432
        max_iterations:Optional[int]=None,
433
        params=None,
434
        owner=None,
435
        prefs=None,
436
        context=None,
437
        **kwargs
438
    ):
439

440
        self._unspecified_args = []
1✔
441

442
        if objective_function is None:
1✔
443
            self._unspecified_args.append(OBJECTIVE_FUNCTION)
1✔
444

445
        if aggregation_function is None:
1!
446
            self._unspecified_args.append(AGGREGATION_FUNCTION)
1✔
447

448
        if search_function is None:
1✔
449
            self._unspecified_args.append(SEARCH_FUNCTION)
1✔
450

451
        if search_termination_function is None:
1✔
452
            self._unspecified_args.append(SEARCH_TERMINATION_FUNCTION)
1✔
453

454
        if search_space and randomization_dimension is not None:
1!
455
            # Make randomization dimension of search_space last for standardization of treatment
NEW
456
            search_space.append(search_space.pop(search_space.index(randomization_dimension)))
×
NEW
457
            randomization_dimension = len(search_space)
×
458

459
        super().__init__(
1✔
460
            default_variable=default_variable,
461
            save_samples=save_samples,
462
            save_values=save_values,
463
            max_iterations=max_iterations,
464
            randomization_dimension=randomization_dimension,
465
            search_space=search_space,
466
            objective_function=objective_function,
467
            aggregation_function=aggregation_function,
468
            search_function=search_function,
469
            search_termination_function=search_termination_function,
470
            params=params,
471
            owner=owner,
472
            prefs=prefs,
473
            context=context,
474
            **kwargs
475
        )
476

477
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
478

479
        # super()._validate_params(request_set=request_set, target_set=target_set, context=context)
480

481
        if OBJECTIVE_FUNCTION in request_set and request_set[OBJECTIVE_FUNCTION] is not None:
1!
482
            if not is_function_type(request_set[OBJECTIVE_FUNCTION]):
1✔
483
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
484
                                                format(repr(OBJECTIVE_FUNCTION), self.__class__.__name__,
485
                                                       request_set[OBJECTIVE_FUNCTION].__name__))
486

487
        if AGGREGATION_FUNCTION in request_set and request_set[AGGREGATION_FUNCTION] is not None:
1✔
488
            if not is_function_type(request_set[AGGREGATION_FUNCTION]):
1✔
489
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
490
                                                format(repr(AGGREGATION_FUNCTION), self.__class__.__name__,
491
                                                       request_set[AGGREGATION_FUNCTION].__name__))
492

493
        if SEARCH_FUNCTION in request_set and request_set[SEARCH_FUNCTION] is not None:
1✔
494
            if not is_function_type(request_set[SEARCH_FUNCTION]):
1✔
495
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
496
                                                format(repr(SEARCH_FUNCTION), self.__class__.__name__,
497
                                                       request_set[SEARCH_FUNCTION].__name__))
498

499
        if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
1!
500
            search_space = request_set[SEARCH_SPACE]
1✔
501
            if not all(isinstance(s, (SampleIterator, type(None), list, tuple, np.ndarray)) for s in search_space):
1✔
502
                raise OptimizationFunctionError("All entries in list specified for {} arg of {} must be a {}".
503
                                                format(repr(SEARCH_SPACE),
504
                                                       self.__class__.__name__,
505
                                                       "SampleIterator, list, tuple, or ndarray"))
506

507
        if SEARCH_TERMINATION_FUNCTION in request_set and request_set[SEARCH_TERMINATION_FUNCTION] is not None:
1✔
508
            if not is_function_type(request_set[SEARCH_TERMINATION_FUNCTION]):
1✔
509
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
510
                                                format(repr(SEARCH_TERMINATION_FUNCTION), self.__class__.__name__,
511
                                                       request_set[SEARCH_TERMINATION_FUNCTION].__name__))
512

513
            try:
1✔
514
                b = request_set[SEARCH_TERMINATION_FUNCTION]()
1✔
515
                if not isinstance(b, bool):
×
516
                    raise OptimizationFunctionError("Function ({}) specified for {} arg of {} must return a boolean value".
517
                                                    format(request_set[SEARCH_TERMINATION_FUNCTION].__name__,
518
                                                           repr(SEARCH_TERMINATION_FUNCTION),
519
                                                           self.__class__.__name__))
520
            except TypeError as e:
1✔
521
                # we cannot validate arbitrary functions here if they
522
                # require arguments
523
                if 'required positional arguments' not in str(e):
1!
524
                    raise
×
525

526
    @handle_external_context(fallback_most_recent=True)
1✔
527
    def reset(
1✔
528
        self,
529
        default_variable=None,
530
        objective_function=None,
531
        aggregation_function=None,
532
        search_function=None,
533
        search_termination_function=None,
534
        search_space=None,
535
        randomization_dimension=None,
536
        context=None
537
    ):
538
        """Reset parameters of the OptimizationFunction
539

540
        Parameters to be reset should be specified in a parameter specification dictionary, in which they key
541
        for each entry is the name of one of the following parameters, and its value is the value to be assigned to the
542
        parameter.  The following parameters can be reset:
543

544
            * `default_variable <OptimizationFunction.default_variable>`
545
            * `objective_function <OptimizationFunction.objective_function>`
546
            * `search_function <OptimizationFunction.search_function>`
547
            * `search_termination_function <OptimizationFunction.search_termination_function>`
548
        """
549
        self._validate_params(
1✔
550
            request_set={
551
                'default_variable': default_variable,
552
                'objective_function': objective_function,
553
                'aggregation_function': aggregation_function,
554
                RANDOMIZATION_DIMENSION : randomization_dimension,
555
                'search_function': search_function,
556
                'search_termination_function': search_termination_function,
557
                'search_space': search_space,
558
            }
559
        )
560

561
        if default_variable is not None:
1✔
562
            self._update_default_variable(default_variable, context)
1✔
563
        if objective_function is not None:
1!
564
            self.parameters.objective_function._set(objective_function, context)
1✔
565
            if OBJECTIVE_FUNCTION in self._unspecified_args:
1✔
566
                del self._unspecified_args[self._unspecified_args.index(OBJECTIVE_FUNCTION)]
1✔
567
        if aggregation_function is not None:
1!
568
            self.parameters.aggregation_function._set(aggregation_function, context)
×
569
            if AGGREGATION_FUNCTION in self._unspecified_args:
×
570
                del self._unspecified_args[self._unspecified_args.index(AGGREGATION_FUNCTION)]
×
571
        if search_function is not None:
1!
572
            self.parameters.search_function._set(search_function, context)
×
573
            if SEARCH_FUNCTION in self._unspecified_args:
×
574
                del self._unspecified_args[self._unspecified_args.index(SEARCH_FUNCTION)]
×
575
        if search_termination_function is not None:
1!
576
            self.parameters.search_termination_function._set(search_termination_function, context)
×
577
            if SEARCH_TERMINATION_FUNCTION in self._unspecified_args:
×
578
                del self._unspecified_args[self._unspecified_args.index(SEARCH_TERMINATION_FUNCTION)]
×
579
        if search_space is not None:
1!
580
            self.parameters.search_space._set(search_space, context)
1✔
581
            if SEARCH_SPACE in self._unspecified_args:
1!
582
                del self._unspecified_args[self._unspecified_args.index(SEARCH_SPACE)]
×
583
        if randomization_dimension is not None:
1✔
584
            self.parameters.randomization_dimension._set(np.asarray(randomization_dimension), context)
1✔
585

586
    def _function(self,
1✔
587
                 variable=None,
588
                 context=None,
589
                 params=None,
590
                 **kwargs):
591
        """Find the sample that yields the optimal value of `objective_function
592
        <OptimizationFunction.objective_function>`.
593

594
        See `optimization process <OptimizationFunction_Procedure>` for details.
595

596
        Returns
597
        -------
598

599
        optimal sample, optimal value, saved_samples, saved_values : array, array, list, list
600
            first array contains sample that yields the optimal value of the `optimization process
601
            <OptimizationFunction_Procedure>`, and second array contains the value of `objective_function
602
            <OptimizationFunction.objective_function>` for that sample.  If `save_samples
603
            <OptimizationFunction.save_samples>` is `True`, first list contains all the values sampled in the order
604
            they were evaluated; otherwise it is empty.  If `save_values <OptimizationFunction.save_values>` is `True`,
605
            second list contains the values returned by `objective_function <OptimizationFunction.objective_function>`
606
            for all the samples in the order they were evaluated; otherwise it is empty.
607
        """
608

609
        raise NotImplementedError("OptimizationFunction._function is not implemented and "
610
                                  "should be overridden by subclasses.")
611

612
    def _evaluate(self, variable=None, context=None, params=None, fit_evaluate=False):
1✔
613
        """
614
        Evaluate all the sample in a `search_space <OptimizationFunction.search_space>` with the agent_rep. The
615
        evaluation is done either serially (_sequential_evaluate) or in parallel (_grid_evaluate). This method should
616
        be invoked by subclasses in their `_function` method to evaluate the samples before searching for the optimal
617
        value.
618

619
        Returns
620
        -------
621

622
        optimal sample, optimal value, saved_samples, saved_values : array, array, list, list
623
            first array contains sample that yields the optimal value of the `optimization process
624
            <OptimizationFunction_Procedure>`, and second array contains the value of `objective_function
625
            <OptimizationFunction.objective_function>` for that sample.  If `save_samples
626
            <OptimizationFunction.save_samples>` is `True`, first list contains all the values sampled in the order
627
            they were evaluated; otherwise it is empty.  If `save_values <OptimizationFunction.save_values>` is `True`,
628
            second list contains the values returned by `objective_function <OptimizationFunction.objective_function>`
629
            for all the samples in the order they were evaluated; otherwise it is empty.
630

631
        """
632

633
        if self._unspecified_args and self.initialization_status == ContextFlags.INITIALIZED:
1✔
634
            warnings.warn("The following arg(s) were not specified for {}: {} -- using default(s)".
1✔
635
                          format(self.name, ', '.join(self._unspecified_args)))
636
            assert all([not getattr(self.parameters, x)._user_specified for x in self._unspecified_args])
1✔
637
            self._unspecified_args = []
1✔
638

639
        # EVALUATE ALL SAMPLES IN SEARCH SPACE
640
        # Evaluate all estimates of all samples in search_space
641

642
        # Run compiled mode if requested by parameter and everything is initialized
643
        if self.owner and self.owner.parameters.comp_execution_mode._get(context) != 'Python' and \
1✔
644
          ContextFlags.PROCESSING in context.flags:
645
            all_samples = list(itertools.product(*self.search_space))
1✔
646
            all_values, num_evals = self._grid_evaluate(self.owner, context, fit_evaluate)
1✔
647
            assert len(all_values) == num_evals
1✔
648
            assert len(all_samples) == num_evals
1✔
649

650
            if fit_evaluate:
1✔
651
                all_values = np.ctypeslib.as_array(all_values)
1✔
652
                # print("OLD DTYPE:", all_values.dtype)
653
                # print("OLD SHAPE:", all_values.shape)
654

655
                def _get_builtin_dtype(dtype):
1✔
656
    #                print("CHECKING:", dtype, "FIELDS:", dtype.names, "SUBDTYPE:", dtype.subdtype, "BASE:", dtype.base)
657
                    if dtype.isbuiltin:
1!
658
                        return dtype
×
659

660
                    if dtype.subdtype is not None:
1✔
661
                        return dtype.base
1✔
662

663
                    subdtypes = (v[0] for v in dtype.fields.values())
1✔
664
                    first_builtin = _get_builtin_dtype(next(subdtypes))
1✔
665
                    assert all(_get_builtin_dtype(sdt) is first_builtin for sdt in subdtypes)
1✔
666
                    return first_builtin
1✔
667

668
                dtype = _get_builtin_dtype(all_values.dtype)
1✔
669
                # Ignore the shape of the output structure
670
                all_values = all_values.view(dtype=dtype).reshape((*all_values.shape[0:2], -1))
1✔
671
                # print("NEW DTYPE:", all_values.dtype)
672
                # print("NEW SHAPE:", all_values.shape)
673

674
                # Re-arrange dimensions to match Python
675
                all_values = np.transpose(all_values, (1, 2, 0))
1✔
676

677
            last_sample = last_value = None
1✔
678
        # Otherwise, default sequential sampling
679
        else:
680
            # Get initial sample in case it is needed by _search_space_evaluate (e.g., for gradient initialization)
681
            initial_sample = self._check_args(variable=variable, context=context, params=params)
1✔
682
            try:
1✔
683
                initial_value = self.owner.objective_mechanism.parameters.value._get(context)
1✔
684
            except AttributeError:
1✔
685
                initial_value = np.array(0)
1✔
686

687
            last_sample, last_value, all_samples, all_values = self._sequential_evaluate(initial_sample,
1✔
688
                                                                                         initial_value,
689
                                                                                         context)
690

691
        # If  aggregation_function is specified and there is a randomization dimension specified
692
        # in the control signals; use the aggregation function to aggregate over the samples generated
693
        # for different randomized values of the control signal
694
        if self.aggregation_function and \
1✔
695
                self.parameters.randomization_dimension._get(context) and \
696
                self.parameters.num_estimates._get(context) is not None:
697

698
            # Reshape all_values so that aggregation can be performed over randomization dimension
699
            num_estimates = np.array(int(self.parameters.num_estimates._get(context)))
1✔
700
            num_evals = np.prod([d.num for d in self.search_space])
1✔
701
            num_param_combs = num_evals // num_estimates
1✔
702

703
            # if in compiled model, all_values comes from _grid_evaluate, so convert ctype double array to numpy
704
            if self.owner and self.owner.parameters.comp_execution_mode._get(context) != 'Python':
1!
705
                num_outcomes = len(all_values) // num_evals
×
706
                all_values = np.ctypeslib.as_array(all_values).reshape((num_outcomes, num_evals))
×
707
                all_samples = np.array(all_samples).transpose()
×
708
            else:
709
                num_outcomes = all_values.shape[0]
1✔
710

711
            all_values = np.reshape(all_values.transpose(), (num_param_combs, num_estimates, num_outcomes))
1✔
712

713
            # Since we are aggregating over the randomized value of the control allocation, we also need to drop the
714
            # randomized dimension from the samples. That is, we don't want to return num_estimates samples for each
715
            # control allocation. This line below just grabs the first one (seed == 1) for each control allocation.
716
            all_samples = all_samples[:, all_samples[self.randomization_dimension, :] == all_samples[self.randomization_dimension, 0]]
1✔
717

718
            # If num_estimates is not None, then one of the control signals is modulating the random seed. We will
719
            # aggregate over this dimension.
720
            aggregated_values = np.atleast_2d(self.aggregation_function(all_values))
1✔
721

722
            # Transpose the aggregated values matrix so it is (num_outputs, num_param_combs), this matches all_samples then
723
            returned_values = np.transpose(aggregated_values)
1✔
724

725
        else:
726
            returned_values = all_values
1✔
727

728
        # Return list of unique samples and aggregated values over them
729
        return last_sample, last_value, all_samples, returned_values
1✔
730

731
    def _sequential_evaluate(self, initial_sample, initial_value, context):
1✔
732
        """Sequentially evaluate every sample in search_space.
733
        Return arrays with all samples evaluated, and array with all values of those samples.
734
        """
735

736
        # Initialize variables used in while loop
737
        iteration = 0
1✔
738
        current_sample = initial_sample
1✔
739
        current_value = initial_value
1✔
740
        all_samples = []
1✔
741
        all_values = []
1✔
742

743
        # Set up progress bar
744
        _show_progress = False
1✔
745
        if hasattr(self, OWNER) and self.owner and self.owner.prefs.reportOutputPref is SIMULATION_PROGRESS:
1!
746
            _show_progress = True
×
747
            _progress_bar_char = '.'
×
748
            _progress_bar_rate_str = ""
×
749
            _search_space_size = len(self.search_space)
×
750
            _progress_bar_rate = int(10**(np.log10(_search_space_size) - 2))
×
751
            if _progress_bar_rate > 1:
×
752
                _progress_bar_rate_str = str(_progress_bar_rate) + " "
×
753
            print("\n{} executing optimization process (one {} for each {}of {} samples): ".
×
754
                  format(self.owner.name, repr(_progress_bar_char), _progress_bar_rate_str, _search_space_size))
755
            _progress_bar_count = 0
×
756

757
        # Iterate over samples until search_termination_function returns True
758
        evaluated_samples = []
1✔
759
        estimated_values = []
1✔
760
        while not call_with_pruned_args(self.search_termination_function,
1✔
761
                                        current_sample,
762
                                        current_value, iteration,
763
                                        context=context):
764
            if _show_progress:
1!
765
                increment_progress_bar = (_progress_bar_rate < 1) or not (_progress_bar_count % _progress_bar_rate)
×
766
                if increment_progress_bar:
×
767
                    print(_progress_bar_char, end='', flush=True)
×
768
                _progress_bar_count +=1
×
769

770
            # Get next sample
771
            current_sample = call_with_pruned_args(self.search_function, current_sample, iteration, context=context)
1✔
772
            # Get value of sample
773
            current_value = call_with_pruned_args(self.objective_function, current_sample, context=context)
1✔
774

775
            # If the value returned by the objective function is a tuple, then we are using PEC and the
776
            # evaluate_agent_rep function is returning the net_outcome, results tuple. We want the results
777
            # in this case.
778
            if type(current_value) is tuple:
1✔
779
                current_value = np.squeeze(np.array(current_value[1]))
1✔
780

781
            # Convert the sample and values to numpy arrays even if they are scalars
782
            current_sample = np.atleast_1d(current_sample)
1✔
783
            current_value = np.atleast_1d(current_value)
1✔
784

785
            evaluated_samples.append(current_sample)
1✔
786
            estimated_values.append(current_value)
1✔
787

788
            # self._report_value(current_value)
789
            iteration += 1
1✔
790
            max_iterations = self.parameters.max_iterations._get(context)
1✔
791
            if max_iterations and iteration > max_iterations:
1!
792
                warnings.warn(f"{self.name} of {self.owner.name} exceeded max iterations {max_iterations}.")
×
793
                break
×
794

795
            # Change randomization for next sample if specified (relies on randomization being last dimension)
796
            if self.owner and self.owner.parameters.same_seed_for_all_allocations is False:
1!
797
                self.search_space[self.parameters.randomization_dimension._get(context)].start += 1
×
798
                self.search_space[self.parameters.randomization_dimension._get(context)].stop += 1
×
799

800
        if self.parameters.save_samples._get(context):
1!
801
            self.parameters.saved_samples._set(all_samples, context)
×
802
        if self.parameters.save_values._get(context):
1✔
803
            self.parameters.saved_values._set(all_values, context)
1✔
804

805
        # Convert evaluated_samples and estimated_values to numpy arrays, stack along the last dimension
806
        estimated_values = np.stack(estimated_values, axis=-1)
1✔
807
        evaluated_samples = np.stack(evaluated_samples, axis=-1)
1✔
808

809
        # FIX: 11/3/21: ??MODIFY TO RETURN SAME AS _grid_evaluate
810
        return current_sample, current_value, evaluated_samples, estimated_values
1✔
811

812
    def _grid_evaluate(self, ocm, context, get_results:bool):
1✔
813
        """Helper method for evaluation of a grid of samples from search space via LLVM backends."""
814
        # If execution mode is not Python, the search space has to be static
815
        def _is_static(it:SampleIterator):
1✔
816
            if isinstance(it.start, Number) and isinstance(it.stop, Number):
1✔
817
                return True
1✔
818

819
            if isinstance(it.generator, list):
1!
820
                return True
1✔
821

822
            return False
×
823

824
        assert all(_is_static(sample_iterator) for sample_iterator in self.search_space)
1✔
825

826
        assert ocm is ocm.agent_rep.controller
1✔
827

828
        # Compiled evaluate expects the same variable as composition
829
        state_features = ocm.parameters.state_feature_values._get(context)
1✔
830
        inputs, num_inputs_sets = ocm.agent_rep._parse_run_inputs(state_features, context)
1✔
831

832
        num_evals = np.prod([d._num for d in self.search_space])
1✔
833

834
        # Map allocations to values
835
        comp_exec = pnlvm.execution.CompExecution(ocm.agent_rep, context)
1✔
836
        execution_mode = ocm.parameters.comp_execution_mode._get(context)
1✔
837
        if execution_mode == "PTX":
1!
838
            outcomes = comp_exec.cuda_evaluate(inputs, num_inputs_sets, num_evals, get_results)
×
839
        elif execution_mode == "LLVM":
1✔
840
            outcomes = comp_exec.thread_evaluate(inputs, num_inputs_sets, num_evals, get_results)
1✔
841
        else:
842
            assert False, f"Unknown execution mode for {ocm.name}: {execution_mode}."
843

844
        return outcomes, num_evals
1✔
845

846
    def reset_grid(self, context):
1✔
847
        """Reset iterators in `search_space <GridSearch.search_space>`"""
848
        for s in self.search_space:
1✔
849
            s.reset()
1✔
850
        self.parameters.grid._set((s for s in itertools.product(*[s for s in self.search_space])), context)
1✔
851

852
    def _traverse_grid(self, variable, sample_num, context=None):
1✔
853
        """Get next sample from grid.
854
        This is assigned as the `search_function <OptimizationFunction.search_function>` of the `OptimizationFunction`.
855
        """
856
        if self.is_initializing:
1✔
857
            return convert_to_np_array([signal._start for signal in self.search_space])
1✔
858
        try:
1✔
859
            sample = np.asarray(next(self.parameters.grid._get(context)))
1✔
860
        except StopIteration:
×
861
            raise OptimizationFunctionError("Expired grid in {} run from {} "
862
                                            "(execution_count: {}; num_iterations: {})".
863
                format(self.__class__.__name__, self.owner.name,
864
                       self.owner.parameters.execution_count.get(), self.num_iterations))
865
        return sample
1✔
866

867
    def _grid_complete(self, variable, value, iteration, context=None):
1✔
868
        """Return False when search of grid is complete
869
        This is assigned as the `search_termination_function <OptimizationFunction.search_termination_function>`
870
        of the `OptimizationFunction`.
871
        """
872
        try:
1✔
873
            return iteration == self.num_iterations
1✔
874
        except AttributeError:
×
875
            return True
×
876

877
    def _report_value(self, new_value):
1✔
878
        """Report value returned by `objective_function <OptimizationFunction.objective_function>` for sample."""
879
        pass
×
880

881
    @property
1✔
882
    def num_estimates(self):
1✔
883
        if self.randomization_dimension is None:
1✔
884
            return 1
1✔
885
        else:
886
            return self.search_space[self.randomization_dimension].num
1✔
887

888

889
ASCENT = 'ascent'
1✔
890
DESCENT = 'descent'
1✔
891

892

893
class GradientOptimization(OptimizationFunction):
1✔
894
    """
895
    GradientOptimization(            \
896
        default_variable=None,       \
897
        objective_function=None,     \
898
        gradient_function=None,      \
899
        direction=ASCENT,            \
900
        search_space=None,           \
901
        step_size=1.0,               \
902
        annealing_function=None,     \
903
        convergence_criterion=VALUE, \
904
        convergence_threshold=.001,  \
905
        max_iterations=1000,         \
906
        save_samples=False,          \
907
        save_values=False,           \
908
        params=None,                 \
909
        owner=None,                  \
910
        prefs=None                   \
911
        )
912

913
    Sample variable by following gradient with respect to the value of `objective_function
914
    <GradientOptimization.objective_function>` it generates, and return the sample that generates either the
915
    highest (**direction** = *ASCENT*) or lowest (**direction** = *DESCENT*) value.
916

917
    .. _GradientOptimization_Procedure:
918

919
    **Optimization Procedure**
920

921
    When `function <GradientOptimization.function>` is executed, it iterates over the folowing steps:
922

923
        - `compute gradient <GradientOptimization_Gradient_Calculation>` using the `gradient_function
924
          <GradientOptimization.gradient_function>`;
925
        ..
926
        - adjust `variable <GradientOptimization.variable>` based on the gradient, in the specified
927
          `direction <GradientOptimization.direction>` and by an amount specified by `step_size
928
          <GradientOptimization.step_size>` and possibly `annealing_function
929
          <GradientOptimization.annealing_function>`;
930
        ..
931
        - compute value of `objective_function <GradientOptimization.objective_function>` using the adjusted value of
932
          `variable <GradientOptimization.variable>`;
933
        ..
934
        - adjust `step_size <GradientOptimization.udpate_rate>` using `annealing_function
935
          <GradientOptimization.annealing_function>`, if specified, for use in the next iteration;
936
        ..
937
        - evaluate `convergence_criterion <GradientOptimization.convergence_criterion>` and test whether it is below
938
          the `convergence_threshold <GradientOptimization.convergence_threshold>`.
939

940
    The current iteration is contained in `iteration <GradientOptimization.iteration>`. Iteration continues until
941
    `convergence_criterion <GradientOptimization.convergence_criterion>` falls below `convergence_threshold
942
    <GradientOptimization.convergence_threshold>` or the number of iterations exceeds `max_iterations
943
    <GradientOptimization.max_iterations>`.  The `function <GradientOptimization.function>` returns the last sample
944
    evaluated by `objective_function <GradientOptimization.objective_function>` (presumed to be the optimal one),
945
    the value of the function, as well as lists that may contain all of the samples evaluated and their values,
946
    depending on whether `save_samples <OptimizationFunction.save_samples>` and/or `save_vales
947
    <OptimizationFunction.save_values>` are `True`, respectively.
948

949
    .. _GradientOptimization_Gradient_Calculation:
950

951
    **Gradient Calculation**
952

953
    The gradient is evaluated by `gradient_function <GradientOptimization.gradient_function>`,
954
    which should be the derivative of the `objective_function <GradientOptimization.objective_function>`
955
    with respect to `variable <GradientOptimization.variable>` at its current value:
956
    :math:`\\frac{d(objective\\_function(variable))}{d(variable)}`.  If the **gradient_function* argument of the
957
    constructor is not specified, then an attempt is made to use PyTorch functional
958
    `autograd's <https://pytorch.org/docs/stable/generated/torch.func.grad.html>`_ `grad <torch.func.grad>`
959
    method to generate `gradient_function <GradientOptimization.gradient_function>`.  If that fails,
960
    an error occurs.  The **search_space** argument can be used to specify lower and/or upper bounds for each dimension
961
    of the sample; if the gradient causes a value of the sample to exceed a bound along a dimenson, the value of the
962
    bound is used for that dimension, unless/until the gradient shifts and causes it to return back within the bound.
963

964
    Arguments
965
    ---------
966

967
    default_variable : list or ndarray : default None
968
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
969
        `objective_function <GradientOptimization.objective_function>`.
970

971
    objective_function : function or method
972
        specifies function used to evaluate `variable <GradientOptimization.variable>`
973
        in each iteration of the `optimization process  <GradientOptimization_Procedure>`;
974
        it must be specified and it must return a scalar value.
975

976
    gradient_function : function
977
        specifies function used to compute the gradient in each iteration of the `optimization process
978
        <GradientOptimization_Procedure>`;  if it is not specified, an attempt is made to compute it using
979
        `PyTorch autograd's <https://pytorch.org/docs/stable/generated/torch.func.grad.html>`_ `grad <torch.func.grad>`.
980

981
    direction : ASCENT or DESCENT : default ASCENT
982
        specifies the direction of gradient optimization: if *ASCENT*, movement is attempted in the positive direction
983
        (i.e., "up" the gradient);  if *DESCENT*, movement is attempted in the negative direction (i.e. "down"
984
        the gradient).
985

986
    step_size : int or float : default 1.0
987
        specifies the rate at which the `variable <GradientOptimization.variable>` is updated in each
988
        iteration of the `optimization process <GradientOptimization_Procedure>`;  if `annealing_function
989
        <GradientOptimization.annealing_function>` is specified, **step_size** specifies the intial value of
990
        `step_size <GradientOptimization.step_size>`.
991

992
    search_space : list or array : default None
993
        specifies bounds of the samples used to evaluate `objective_function <GaussianProcess.objective_function>`
994
        along each dimension of `variable <GaussianProcess.variable>`;  each item must be a list or tuple,
995
        or a `SampleIterator` that resolves to one.  If the item has two elements, they are used as the lower and
996
        upper bounds respectively, and the lower must be less than the upper;  None can be used in either place,
997
        in which case that bound is ignored.  If an item has more than two elements, the min is used as the lower
998
        bound and the max is used as the upper bound; none of the elements can be None.
999

1000
    annealing_function : function or method : default None
1001
        specifies function used to adapt `step_size <GradientOptimization.step_size>` in each
1002
        iteration of the `optimization process <GradientOptimization_Procedure>`;  must take accept two parameters —
1003
        `step_size <GradientOptimization.step_size>` and `iteration <GradientOptimization_Procedure>`, in that
1004
        order — and return a scalar value, that is used for the next iteration of optimization.
1005

1006
    convergence_criterion : *VARIABLE* or *VALUE* : default *VALUE*
1007
        specifies the parameter used to terminate the `optimization process <GradientOptimization_Procedure>`.
1008
        *VARIABLE*: process terminates when the most recent sample differs from the previous one by less than
1009
        `convergence_threshold <GradientOptimization.convergence_threshold>`;  *VALUE*: process terminates when the
1010
        last value returned by `objective_function <GradientOptimization.objective_function>` differs from the
1011
        previous one by less than `convergence_threshold <GradientOptimization.convergence_threshold>`.
1012

1013
    convergence_threshold : int or float : default 0.001
1014
        specifies the change in value of `convergence_criterion` below which the optimization process is terminated.
1015

1016
    max_iterations : int : default 1000
1017
        specifies the maximum number of times the `optimization process<GradientOptimization_Procedure>` is allowed to
1018
        iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
1019

1020
    save_samples : bool
1021
        specifies whether or not to save and return all of the samples used to evaluate `objective_function
1022
        <GradientOptimization.objective_function>` in the `optimization process<GradientOptimization_Procedure>`.
1023

1024
    save_values : bool
1025
        specifies whether or not to save and return the values of `objective_function
1026
        <GradientOptimization.objective_function>` for all samples evaluated in the `optimization
1027
        process<GradientOptimization_Procedure>`
1028

1029
    Attributes
1030
    ----------
1031

1032
    variable : ndarray
1033
        sample used as the starting point for the `optimization process <GradientOptimization_Procedure>` (i.e., one
1034
        used to evaluate `objective_function <GradientOptimization.objective_function>` in the first iteration).
1035

1036
    objective_function : function or method
1037
        function used to evaluate `variable <GradientOptimization.variable>`
1038
        in each iteration of the `optimization process <GradientOptimization_Procedure>`;
1039
        it must be specified and it must return a scalar value.
1040

1041
    gradient_function : function
1042
        function used to compute the gradient in each iteration of the `optimization process
1043
        <GradientOptimization_Procedure>` (see `Gradient Calculation <GradientOptimization_Gradient_Calculation>` for
1044
        details).
1045

1046
    direction : ASCENT or DESCENT
1047
        direction of gradient optimization:  if *ASCENT*, movement is attempted in the positive direction
1048
        (i.e., "up" the gradient);  if *DESCENT*, movement is attempted in the negative direction (i.e. "down"
1049
        the gradient).
1050

1051
    step_size : int or float
1052
        determines the rate at which the `variable <GradientOptimization.variable>` is updated in each
1053
        iteration of the `optimization process <GradientOptimization_Procedure>`;  if `annealing_function
1054
        <GradientOptimization.annealing_function>` is specified, `step_size <GradientOptimization.step_size>`
1055
        determines the initial value.
1056

1057
    search_space : list or array
1058
        contains tuples specifying bounds within which each dimension of `variable <GaussianProcess.variable>` is
1059
        sampled, and used to evaluate `objective_function <GaussianProcess.objective_function>` in iterations of the
1060
        `optimization process <GaussianProcess_Procedure>`.
1061

1062
    bounds : tuple
1063
        contains two 2d arrays; the 1st contains the lower bounds for each dimension of the sample (`variable
1064
        <GradientOptimization.variable>`), and the 2nd the upper bound of each.
1065

1066
    annealing_function : function or method
1067
        function used to adapt `step_size <GradientOptimization.step_size>` in each iteration of the `optimization
1068
        process <GradientOptimization_Procedure>`;  if `None`, no call is made and the same `step_size
1069
        <GradientOptimization.step_size>` is used in each iteration.
1070

1071
    iteration : int
1072
        the currention iteration of the `optimization process <GradientOptimization_Procedure>`.
1073

1074
    convergence_criterion : VARIABLE or VALUE
1075
        determines parameter used to terminate the `optimization process<GradientOptimization_Procedure>`.
1076
        *VARIABLE*: process terminates when the most recent sample differs from the previous one by less than
1077
        `convergence_threshold <GradientOptimization.convergence_threshold>`;  *VALUE*: process terminates when the
1078
        last value returned by `objective_function <GradientOptimization.objective_function>` differs from the
1079
        previous one by less than `convergence_threshold <GradientOptimization.convergence_threshold>`.
1080

1081
    convergence_threshold : int or float
1082
        determines the change in value of `convergence_criterion` below which the `optimization process
1083
        <GradientOptimization_Procedure>` is terminated.
1084

1085
    max_iterations : int
1086
        determines the maximum number of times the `optimization process<GradientOptimization_Procedure>` is allowed to
1087
        iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
1088

1089
    save_samples : bool
1090
        determines whether or not to save and return all of the samples used to evaluate `objective_function
1091
        <GradientOptimization.objective_function>` in the `optimization process<GradientOptimization_Procedure>`.
1092

1093
    save_values : bool
1094
        determines whether or not to save and return the values of `objective_function
1095
        <GradientOptimization.objective_function>` for all samples evaluated in the `optimization
1096
        process<GradientOptimization_Procedure>`
1097
    """
1098

1099
    componentName = GRADIENT_OPTIMIZATION_FUNCTION
1✔
1100
    bounds = None
1✔
1101

1102
    class Parameters(OptimizationFunction.Parameters):
1✔
1103
        """
1104
            Attributes
1105
            ----------
1106

1107
                variable
1108
                    see `variable <GradientOptimization.variable>`
1109

1110
                    :default value: [[0], [0]]
1111
                    :type: ``list``
1112
                    :read only: True
1113

1114
                annealing_function
1115
                    see `annealing_function <GradientOptimization.annealing_function>`
1116

1117
                    :default value: None
1118
                    :type:
1119

1120
                convergence_criterion
1121
                    see `convergence_criterion <GradientOptimization.convergence_criterion>`
1122

1123
                    :default value: `VALUE`
1124
                    :type: ``str``
1125

1126
                convergence_threshold
1127
                    see `convergence_threshold <GradientOptimization.convergence_threshold>`
1128

1129
                    :default value: 0.001
1130
                    :type: ``float``
1131

1132
                direction
1133
                    see `direction <GradientOptimization.direction>`
1134

1135
                    :default value: `ASCENT`
1136
                    :type: ``str``
1137

1138
                gradient_function
1139
                    see `gradient_function <GradientOptimization.gradient_function>`
1140

1141
                    :default value: None
1142
                    :type:
1143

1144
                max_iterations
1145
                    see `max_iterations <GradientOptimization.max_iterations>`
1146

1147
                    :default value: 1000
1148
                    :type: ``int``
1149

1150
                previous_value
1151
                    see `previous_value <GradientOptimization.previous_value>`
1152

1153
                    :default value: [[0], [0]]
1154
                    :type: ``list``
1155
                    :read only: True
1156

1157
                previous_variable
1158
                    see `previous_variable <GradientOptimization.previous_variable>`
1159

1160
                    :default value: [[0], [0]]
1161
                    :type: ``list``
1162
                    :read only: True
1163

1164
                step_size
1165
                    see `step_size <GradientOptimization.step_size>`
1166

1167
                    :default value: 1.0
1168
                    :type: ``float``
1169
        """
1170
        variable = Parameter([[0], [0]], read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
1171

1172
        # these should be removed and use switched to .get_previous()
1173
        previous_variable = Parameter([[0], [0]], read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
1174
        previous_value = Parameter([[0], [0]], read_only=True, initializer='initializer')
1✔
1175

1176
        gradient_function = Parameter(None, stateful=False, loggable=False)
1✔
1177
        step_size = Parameter(1.0, modulable=True)
1✔
1178
        annealing_function = Parameter(None, stateful=False, loggable=False)
1✔
1179
        convergence_threshold = Parameter(.001, modulable=True)
1✔
1180
        max_iterations = Parameter(1000, modulable=True)
1✔
1181
        search_space = Parameter([SampleIterator([0, 0])], stateful=False, loggable=False)
1✔
1182

1183
        direction = ASCENT
1✔
1184
        convergence_criterion = Parameter(VALUE, pnl_internal=True)
1✔
1185

1186
        def _parse_direction(self, direction):
1✔
1187
            if direction == ASCENT:
1✔
1188
                return np.array(1)
1✔
1189
            else:
1190
                return np.array(-1)
1✔
1191

1192
    @check_user_specified
1✔
1193
    @beartype
1✔
1194
    def __init__(self,
1✔
1195
                 default_variable=None,
1196
                 objective_function: Optional[Callable] = None,
1197
                 gradient_function: Optional[Callable] = None,
1198
                 direction: Optional[Literal['ascent', 'descent']] = None,
1199
                 search_space=None,
1200
                 step_size: Optional[Union[int, float]] = None,
1201
                 annealing_function: Optional[Callable] = None,
1202
                 convergence_criterion: Optional[Literal['variable', 'value']] = None,
1203
                 convergence_threshold: Optional[Union[int, float]] = None,
1204
                 max_iterations: Optional[int] = None,
1205
                 save_samples: Optional[bool] = None,
1206
                 save_values: Optional[bool] = None,
1207
                 params=None,
1208
                 owner=None,
1209
                 prefs=None):
1210

1211
        search_function = self._follow_gradient
1✔
1212
        search_termination_function = self._convergence_condition
1✔
1213

1214
        super().__init__(
1✔
1215
            default_variable=default_variable,
1216
            objective_function=objective_function,
1217
            search_function=search_function,
1218
            search_space=search_space,
1219
            search_termination_function=search_termination_function,
1220
            max_iterations=max_iterations,
1221
            save_samples=save_samples,
1222
            save_values=save_values,
1223
            step_size=step_size,
1224
            convergence_criterion=convergence_criterion,
1225
            convergence_threshold=convergence_threshold,
1226
            gradient_function=gradient_function,
1227
            annealing_function=annealing_function,
1228
            params=params,
1229
            owner=owner,
1230
            prefs=prefs,
1231
        )
1232

1233
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
1234

1235
        super()._validate_params(request_set=request_set, target_set=target_set, context=context)
1✔
1236

1237
        if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
1!
1238
            search_space = request_set[SEARCH_SPACE]
1✔
1239
            if all(s is None for s in search_space):
1✔
1240
                return
1✔
1241
            # If search space is a single 2-item list or tuple with numbers (i.e., bounds),
1242
            #     wrap in list for handling below
1243
            if len(search_space)==2 and all(isinstance(i, Number) for i in search_space):
1!
1244
                search_space = [search_space]
×
1245
            for s in search_space:
1✔
1246
                if isinstance(s, SampleIterator):
1!
1247
                    s = s()
1✔
1248
                if len(s) != 2:
1!
1249
                    owner_str = ''
×
1250
                    if self.owner:
×
1251
                        owner_str = f' of {self.owner.name}'
×
1252
                    raise OptimizationFunctionError(f"All items in {repr(SEARCH_SPACE)} arg for {self.name}{owner_str} "
1253
                                                    f"must be or resolve to a 2-item list or tuple; this doesn't: {s}.")
1254

1255
    @handle_external_context(fallback_most_recent=True)
1✔
1256
    def reset(self, default_variable=None, objective_function=None, context=None, **kwargs):
1✔
1257
        super().reset(
1✔
1258
            objective_function=objective_function,
1259
            context=context,
1260
            **kwargs
1261
        )
1262

1263
        # Differentiate objective_function using torch.func.grad()
1264
        if objective_function is not None and not self.gradient_function:
1!
1265

1266
            if torch is None:
1✔
1267
                raise ValueError("PyTorch is not installed. Please install PyTorch to use GradientOptimization without "
1268
                                 "specifying a gradient_function.")
1269

1270
            if 'func' not in dir(torch):
1✔
1271
                raise ValueError("torch.func.grad not found. PyTorch version is probably too old. Please upgrade "
1272
                                 "PyTorch to >= 2.0 to use GradientOptimization without specifying a "
1273
                                 "gradient_function.")
1274

1275
            try:
1✔
1276
                # Need to wrap objective_function in a lambda to pass to grad because it needs to return a torch tensor
1277
                def func_wrapper(x, context):
1✔
1278
                    return torch.tensor(self.objective_function(x, context))
1✔
1279

1280
                # Get the gradient of the objective function with pytorch autograd
1281
                gradient_func = torch.func.grad(func_wrapper)
1✔
1282

1283
                # We need to wrap the gradient function in a lambda as well because we need to convert back to numpy
1284
                def gradient_func_wrapper(x, context):
1✔
1285
                    return gradient_func(torch.from_numpy(x), context).detach().numpy()
1✔
1286

1287
                self.parameters.gradient_function._set(gradient_func_wrapper, context)
1✔
1288

1289
            except Exception as ex:
×
1290

1291
                raise OptimizationFunctionError("Unable to use PyTorch autograd with {} specified for {} Function: {}.".
1292
                                                format(repr(OBJECTIVE_FUNCTION), self.__class__.__name__,
1293
                                                       objective_function.__name__)) from ex
1294
        search_space = self.search_space
1✔
1295
        bounds = None
1✔
1296

1297
        if self.owner:
1!
1298
            owner_str = ' of {self.owner.name}'
1✔
1299
        else:
1300
            owner_str = ''
×
1301

1302
        # Get bounds from search_space if it has any non-None entries
1303
        if any(i is not None for i in self.search_space):
1!
1304
            # Get min and max of each dimension of search space
1305
            #    and assign to corresponding elements of lower and upper items of bounds
1306
            lower = []
×
1307
            upper = []
×
1308
            bounds = (lower, upper)
×
1309
            for i in search_space:
×
1310
                if i is None:
×
1311
                    lower.append(None)
×
1312
                    upper.append(None)
×
1313
                else:
1314
                    if isinstance(i, SampleIterator):
×
1315
                        i = i()
×
1316
                    # Spec is bound (tuple or list with two values: lower and uppper)
1317
                    if len(i)==2:
×
1318
                        lower.append(i[0])
×
1319
                        upper.append(i[1])
×
1320
                    else:
1321
                        lower.append(min(i))
×
1322
                        upper.append(max(i))
×
1323

1324
        # Validate bounds and reformat into arrays for lower and upper bounds, for use in _follow_gradient
1325
        #     (each should be same length as sample), and replace any None's with + or - inf)
1326
        if bounds:
1!
1327
            if bounds[0] is None and bounds[1] is None:
×
1328
                bounds = None
×
1329
            else:
1330
                sample_len = len(default_variable)
×
1331
                lower = np.atleast_1d(bounds[0])
×
1332
                if len(lower)==1:
×
1333
                    # Single value specified for lower bound, so distribute over array with length = sample_len
1334
                    lower = np.full(sample_len, lower).reshape(sample_len,1)
×
1335
                elif len(lower)!=sample_len:
×
1336
                    raise OptimizationFunctionError(f"Array used for lower value of {repr(BOUNDS)} arg ({lower}) in "
1337
                                                    f"{self.name}{owner_str} must have the same number of elements "
1338
                                                    f"({sample_len}) as the sample over which optimization is being "
1339
                                                    f"performed.")
1340
                # Array specified for lower bound, so replace any None's with -inf
1341
                lower = np.array([[-float('inf')] if n[0] is None else n for n in lower.reshape(sample_len,1)])
×
1342

1343
                upper = np.atleast_1d(bounds[1])
×
1344
                if len(upper)==1:
×
1345
                    # Single value specified for upper bound, so distribute over array with length = sample_len
1346
                    upper = np.full(sample_len, upper).reshape(sample_len,1)
×
1347
                elif len(upper)!=sample_len:
×
1348
                    raise OptimizationFunctionError(f"Array used for upper value of {repr(BOUNDS)} arg ({upper}) in "
1349
                                                    f"{self.name}{owner_str} must have the same number of elements "
1350
                                                    f"({sample_len}) as the sample over which optimization is being "
1351
                                                    f"performed.")
1352
                # Array specified for upper bound, so replace any None's with +inf
1353
                upper = np.array([[float('inf')] if n[0] is None else n for n in upper.reshape(sample_len,1)])
×
1354

1355
                if not all(lower <= upper):
×
1356
                    raise OptimizationFunctionError(f"Specification of {repr(BOUNDS)} arg ({bounds}) for {self.name}"
1357
                                                    f"{owner_str} resulted in lower > corresponding upper for one or "
1358
                                                    f"more elements (lower: {lower.tolist()}; uuper: {upper.tolist()}).")
1359

1360
                bounds = (lower,upper)
×
1361

1362
        self.bounds = bounds
1✔
1363

1364
    def _function(self,
1✔
1365
                 variable=None,
1366
                 context=None,
1367
                 params=None,
1368
                 **kwargs):
1369
        """Return the sample that yields the optimal value of `objective_function
1370
        <GradientOptimization.objective_function>`, and possibly all samples evaluated and their corresponding values.
1371

1372
        Optimal value is defined by `direction <GradientOptimization.direction>`:
1373
        - if *ASCENT*, returns greatest value
1374
        - if *DESCENT*, returns least value
1375

1376
        Returns
1377
        -------
1378

1379
        optimal sample, optimal value, saved_samples, saved_values : ndarray, list, list
1380
            first array contains sample that yields the highest or lowest value of `objective_function
1381
            <GradientOptimization.objective_function>`, depending on `direction <GradientOptimization.direction>`,
1382
            and the second array contains the value of the function for that sample.
1383
            If `save_samples <GradientOptimization.save_samples>` is `True`, first list contains all the values
1384
            sampled in the order they were evaluated; otherwise it is empty.  If `save_values
1385
            <GradientOptimization.save_values>` is `True`, second list contains the values returned by
1386
            `objective_function <GradientOptimization.objective_function>` for all the samples in the order they were
1387
            evaluated; otherwise it is empty.
1388
        """
1389

1390
        optimal_sample, optimal_value, all_samples, all_values = super()._evaluate(variable=variable,
1✔
1391
                                                                                  context=context,
1392
                                                                                  params=params,
1393
                                                                                  )
1394
        return_all_samples = return_all_values = []
1✔
1395
        if self.parameters.save_samples._get(context):
1!
1396
            return_all_samples = all_samples
×
1397
        if self.parameters.save_values._get(context):
1!
1398
            return_all_values = all_values
×
1399
        # return last_variable
1400
        return optimal_sample, optimal_value, return_all_samples, return_all_values
1✔
1401

1402
    def _follow_gradient(self, sample, sample_num, context=None):
1✔
1403

1404
        if self.gradient_function is None:
1✔
1405
            return sample
1✔
1406

1407
        # Index from 1 rather than 0
1408
        # Update step_size
1409
        step_size = self.parameters.step_size._get(context)
1✔
1410
        if sample_num == 0:
1!
1411
            # Start from initial value (sepcified by user in step_size arg)
1412
            step_size = self.parameters.step_size.default_value
1✔
1413
            self.parameters.step_size._set(step_size, context)
1✔
1414
        if self.annealing_function:
1!
1415
            step_size = call_with_pruned_args(self.annealing_function, step_size, sample_num, context=context)
×
1416
            self.parameters.step_size._set(step_size, context)
×
1417

1418
        # Compute gradients with respect to current sample
1419
        _gradients = call_with_pruned_args(self.gradient_function, sample, context=context)
1✔
1420

1421
        # Get new sample based on new gradients
1422
        new_sample = sample + self.parameters.direction._get(context) * step_size * np.array(_gradients)
×
1423

1424
        # Constrain new sample to be within bounds
1425
        if self.bounds:
×
1426
            new_sample = np.array(np.maximum(self.bounds[0],
×
1427
                                             np.minimum(self.bounds[1], new_sample))).reshape(sample.shape)
1428

1429
        return new_sample
×
1430

1431
    def _convergence_condition(self, variable, value, iteration, context=None):
1✔
1432
        previous_variable = self.parameters.previous_variable._get(context)
1✔
1433
        previous_value = self.parameters.previous_value._get(context)
1✔
1434

1435
        if iteration == 0:
1✔
1436
            # self._convergence_metric = self.convergence_threshold + EPSILON
1437
            self.parameters.previous_variable._set(variable, context)
1✔
1438
            self.parameters.previous_value._set(value, context)
1✔
1439
            return False
1✔
1440

1441
        # Evaluate for convergence
1442
        if self.convergence_criterion == VALUE:
1!
1443
            convergence_metric = np.abs(value - previous_value)
1✔
1444
        else:
1445
            convergence_metric = np.max(np.abs(np.array(variable) -
×
1446
                                               np.array(previous_variable)))
1447

1448
        self.parameters.previous_variable._set(variable, context)
1✔
1449
        self.parameters.previous_value._set(value, context)
1✔
1450

1451
        return convergence_metric <= self.parameters.convergence_threshold._get(context)
1✔
1452

1453

1454
MAXIMIZE = 'maximize'
1✔
1455
MINIMIZE = 'minimize'
1✔
1456

1457

1458
class GridSearch(OptimizationFunction):
1✔
1459
    """
1460
    GridSearch(                      \
1461
        default_variable=None,       \
1462
        objective_function=None,     \
1463
        direction=MAXIMIZE,          \
1464
        max_iterations=1000,         \
1465
        save_samples=False,          \
1466
        save_values=False,           \
1467
        params=None,                 \
1468
        owner=None,                  \
1469
        prefs=None                   \
1470
        )
1471

1472
    Search over all samples generated by `search_space <GridSearch.search_space>` for the one that optimizes the
1473
    value of `objective_function <GridSearch.objective_function>`.
1474

1475
    .. _GridSearch_Procedure:
1476

1477
    **Grid Search Procedure**
1478

1479
    When `function <GridSearch.function>` is executed, it iterates over the following steps:
1480

1481
        - get next sample from `search_space <GridSearch.search_space>`;
1482
        ..
1483
        - compute value of `objective_function <GridSearch.objective_function>` for that sample;
1484

1485
    The current iteration is contained in `iteration <GridSearch.iteration>` and the total number comprising the
1486
    `search_space <GridSearch.search_space>2` is contained in `num_iterations <GridSearch.num_iterations>`).
1487
    Iteration continues until all values in `search_space <GridSearch.search_space>` have been evaluated (i.e.,
1488
    `num_iterations <GridSearch.num_iterations>` is reached), or `max_iterations <GridSearch.max_iterations>` is
1489
    exceeded.  The function returns the sample that yielded either the highest (if `direction <GridSearch.direction>`
1490
    is *MAXIMIZE*) or lowest (if `direction <GridSearch.direction>` is *MINIMIZE*) value of the `objective_function
1491
    <GridSearch.objective_function>`, along with the value for that sample, as well as lists containing all of the
1492
    samples evaluated and their values if either `save_samples <GridSearch.save_samples>` or `save_values
1493
    <GridSearch.save_values>` is `True`, respectively.
1494

1495
    Arguments
1496
    ---------
1497

1498
    default_variable : list or ndarray : default None
1499
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
1500
        `objective_function <GridSearch.objective_function>`.
1501

1502
    objective_function : function or method
1503
        specifies function used to evaluate sample in each iteration of the `optimization process <GridSearch_Procedure>`;
1504
        it must be specified and must return a scalar value.
1505

1506
    search_space : list or array of SampleIterators
1507
        specifies `SampleIterators <SampleIterator>` used to generate samples evaluated by `objective_function
1508
        <GridSearch.objective_function>`;  all of the iterators be finite (i.e., must have a `num <SampleIterator>`
1509
        attribute;  see `SampleSpec` for additional details).
1510

1511
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
1512
        specifies the direction of optimization:  if *MAXIMIZE*, the highest value of `objective_function
1513
        <GridSearch.objective_function>` is sought;  if *MINIMIZE*, the lowest value is sought.
1514

1515
    max_iterations : int : default 1000
1516
        specifies the maximum number of times the `optimization process<GridSearch_Procedure>` is allowed to iterate;
1517
        if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
1518

1519
    save_samples : bool
1520
        specifies whether or not to return all of the samples used to evaluate `objective_function
1521
        <GridSearch.objective_function>` in the `optimization process <GridSearch_Procedure>`
1522
        (i.e., a copy of the samples generated from the `search_space <GridSearch.search_space>`.
1523

1524
    save_values : bool
1525
        specifies whether or not to save and return the values of `objective_function <GridSearch.objective_function>`
1526
        for all samples evaluated in the `optimization process <GridSearch_Procedure>`.
1527

1528
    Attributes
1529
    ----------
1530

1531
    variable : ndarray
1532
        first sample evaluated by `objective_function <GridSearch.objective_function>` (i.e., one used to evaluate it
1533
        in the first iteration of the `optimization process <GridSearch_Procedure>`).
1534

1535
    objective_function : function or method
1536
        function used to evaluate sample in each iteration of the `optimization process <GridSearch_Procedure>`.
1537

1538
    search_space : list or array of Sampleiterators
1539
        contains `SampleIterators <SampleIterator>` for generating samples evaluated by `objective_function
1540
        <GridSearch.objective_function>` in iterations of the `optimization process <GridSearch_Procedure>`;
1541

1542
    grid : iterator
1543
        generates samples from the Cartesian product of `SampleIterators in `search_space <GridSearch.search_sapce>`.
1544

1545
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
1546
        determines the direction of optimization:  if *MAXIMIZE*, the greatest value of `objective_function
1547
        <GridSearch.objective_function>` is sought;  if *MINIMIZE*, the least value is sought.
1548

1549
    iteration : int
1550
        the currention iteration of the `optimization process <GridSearch_Procedure>`.
1551

1552
    num_iterations : int
1553
        number of iterations required to complete the entire grid search;  equal to the produce of all the `num
1554
        <SampleIterator.num>` attributes of the `SampleIterators <SampleIterator>` in the `search_space
1555
        <GridSearch.search_space>`.
1556

1557
    max_iterations : int
1558
        determines the maximum number of times the `optimization process<GridSearch_Procedure>` is allowed to iterate;
1559
        if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
1560

1561
    save_samples : True
1562
        determines whether or not to save and return all samples generated from `search_space <GridSearch.search_space>`
1563
        and evaluated by the  `objective_function <GridSearch.objective_function>` in the `optimization process
1564
        <GridSearch_Procedure>`.
1565

1566
    save_values : bool
1567
        determines whether or not to save and return the value of `objective_function
1568
        <GridSearch.objective_function>` for all samples evaluated in the `optimization process <GridSearch_Procedure>`.
1569
    """
1570

1571
    componentName = GRID_SEARCH_FUNCTION
1✔
1572

1573
    class Parameters(OptimizationFunction.Parameters):
1✔
1574
        """
1575
            Attributes
1576
            ----------
1577

1578
                direction
1579
                    see `direction <GridSearch.direction>`
1580

1581
                    :default value: `MAXIMIZE`
1582
                    :type: ``str``
1583

1584
                grid
1585
                    see `grid <GridSearch.grid>`
1586

1587
                    :default value: None
1588
                    :type:
1589

1590
                random_state
1591
                    see `random_state <GridSearch.random_state>`
1592

1593
                    :default value: None
1594
                    :type: ``numpy.random.RandomState``
1595

1596
                save_samples
1597
                    see `save_samples <GridSearch.save_samples>`
1598

1599
                    :default value: True
1600
                    :type: ``bool``
1601

1602
                save_values
1603
                    see `save_values <GridSearch.save_values>`
1604

1605
                    :default value: True
1606
                    :type: ``bool``
1607
        """
1608
        save_samples = Parameter(False, pnl_internal=True)
1✔
1609
        save_values = Parameter(False, pnl_internal=True)
1✔
1610
        random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
1✔
1611
        seed = Parameter(DEFAULT_SEED(), modulable=True, fallback_value=DEFAULT, setter=_seed_setter)
1✔
1612
        select_randomly_from_optimal_values = Parameter(False)
1✔
1613

1614
        direction = MAXIMIZE
1✔
1615

1616
    # TODO: should save_values be in the constructor if it's ignored?
1617
    # is False or True the correct value?
1618
    @check_user_specified
1✔
1619
    @beartype
1✔
1620
    def __init__(self,
1✔
1621
                 default_variable=None,
1622
                 objective_function: Optional[Callable] = None,
1623
                 search_space=None,
1624
                 direction: Optional[Literal['maximize', 'minimize']] = None,
1625
                 save_samples: Optional[bool] = None,
1626
                 save_values: Optional[bool] = None,
1627
                 # tolerance=0.,
1628
                 select_randomly_from_optimal_values=None,
1629
                 seed=None,
1630
                 params=None,
1631
                 owner=None,
1632
                 prefs=None,
1633
                 **kwargs):
1634

1635
        search_function = self._traverse_grid
1✔
1636
        search_termination_function = self._grid_complete
1✔
1637
        self._return_values = save_values
1✔
1638
        self._return_samples = save_values
1✔
1639
        try:
1✔
1640
            search_space = [x if isinstance(x, SampleIterator) else SampleIterator(x) for x in search_space]
1✔
1641
        except TypeError:
1✔
1642
            pass
1✔
1643

1644
        self.num_iterations = 1 if search_space is None else np.prod([i.num for i in search_space])
1✔
1645
        # self.tolerance = tolerance
1646

1647
        super().__init__(
1✔
1648
            default_variable=default_variable,
1649
            objective_function=objective_function,
1650
            search_function=search_function,
1651
            search_termination_function=search_termination_function,
1652
            search_space=search_space,
1653
            select_randomly_from_optimal_values=select_randomly_from_optimal_values,
1654
            save_samples=save_samples,
1655
            save_values=save_values,
1656
            seed=seed,
1657
            direction=direction,
1658
            params=params,
1659
            owner=owner,
1660
            prefs=prefs,
1661
        )
1662

1663
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
1664

1665
        super()._validate_params(request_set=request_set, target_set=target_set, context=context)
1✔
1666
        if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
1!
1667
            search_space = request_set[SEARCH_SPACE]
1✔
1668

1669
            # Check that all iterators are finite (i.e., with num!=None)
1670
            if not all(s.num is not None for s in search_space if (s is not None and s.num)):
1✔
1671
                raise OptimizationFunctionError("All {}s in {} arg of {} must be finite (i.e., SampleIteror.num!=None)".
1672
                                                format(SampleIterator.__name__,
1673
                                                       repr(SEARCH_SPACE),
1674
                                                       self.__class__.__name__))
1675

1676
            # # Check that all finite iterators (i.e., with num!=None) are of the same length:
1677
            # finite_iterators = [s.num for s in search_space if s.num is not None]
1678
            # if not all(l==finite_iterators[0] for l in finite_iterators):
1679
            #     raise OptimizationFunctionError("All finite {}s in {} arg of {} must have the same number of steps".
1680
            #                                     format(SampleIterator.__name__,
1681
            #                                            repr(SEARCH_SPACE),
1682
            #                                            self.__class__.__name__,
1683
            #                                            ))
1684

1685
    @handle_external_context(fallback_most_recent=True)
1✔
1686
    def reset(self, search_space, context=None, **kwargs):
1✔
1687
        """Assign size of `search_space <GridSearch.search_space>`"""
1688
        super(GridSearch, self).reset(search_space=search_space, context=context, **kwargs)
1✔
1689
        sample_iterators = search_space
1✔
1690
        owner_str = ''
1✔
1691
        if self.owner:
1!
1692
            owner_str = f' of {self.owner.name}'
1✔
1693
        for i in sample_iterators:
1✔
1694
            if i is None:
1✔
1695
                raise OptimizationFunctionError(f"Invalid {repr(SEARCH_SPACE)} arg for {self.name}{owner_str}; "
1696
                                                f"every dimension must be assigned a {SampleIterator.__name__}.")
1697
            if i.num is None:
1✔
1698
                raise OptimizationFunctionError(f"Invalid {repr(SEARCH_SPACE)} arg for {self.name}{owner_str}; each "
1699
                                                f"{SampleIterator.__name__} must have a value for its 'num' attribute.")
1700

1701
        self.num_iterations = np.prod([i.num for i in sample_iterators])
1✔
1702

1703
    def _get_optimized_controller(self):
1✔
1704
        # self.objective_function may be a bound method of
1705
        # OptimizationControlMechanism
1706
        return getattr(self.objective_function, '__self__', None)
1✔
1707

1708
    def _gen_llvm_function(self, *, ctx:pnlvm.LLVMBuilderContext, tags:frozenset):
1✔
1709
        if "select_min" in tags:
1✔
1710
            return self._gen_llvm_select_min_function(ctx=ctx, tags=tags)
1✔
1711
        ocm = self._get_optimized_controller()
1✔
1712
        if ocm is not None:
1✔
1713
            # self.objective_function may be a bound method of
1714
            # OptimizationControlMechanism
1715
            extra_args = [ctx.get_param_struct_type(ocm.agent_rep).as_pointer(),
1✔
1716
                          ctx.get_state_struct_type(ocm.agent_rep).as_pointer(),
1717
                          ctx.get_data_struct_type(ocm.agent_rep).as_pointer()]
1718
        else:
1719
            extra_args = []
1✔
1720

1721
        f = super()._gen_llvm_function(ctx=ctx, extra_args=extra_args, tags=tags)
1✔
1722
        if len(extra_args) > 0:
1✔
1723
            for a in f.args[-len(extra_args):]:
1✔
1724
                a.attributes.add('nonnull')
1✔
1725

1726
        return f
1✔
1727

1728
    def _get_input_struct_type(self, ctx):
1✔
1729
        if self.owner is not None:
1✔
1730
            variable = [port.defaults.value for port in self.owner.input_ports]
1✔
1731
            # Python list does not care about ndarrays of different lengths
1732
            # we do care, so convert to tuple to create struct
1733
            if all(type(x) == np.ndarray for x in variable) and not all(len(x) == len(variable[0]) for x in variable):
1✔
1734
                variable = tuple(variable)
1✔
1735

1736
            warnings.warn("Shape mismatch: {} variable expected: {} vs. got: {}".format(
1✔
1737
                          self, variable, self.defaults.variable),
1738
                          pnlvm.PNLCompilerWarning)
1739

1740
        else:
1741
            variable = self.defaults.variable
1✔
1742

1743
        return ctx.convert_python_struct_to_llvm_ir(variable)
1✔
1744

1745
    def _get_output_struct_type(self, ctx):
1✔
1746
        val = self.defaults.value
1✔
1747
        # compiled version should never return 'all values'
1748
        if len(val[0]) != len(self.search_space):
1✔
1749
            val = list(val)
1✔
1750
            val[0] = [0.0] * len(self.search_space)
1✔
1751
        return ctx.convert_python_struct_to_llvm_ir((val[0], val[1]))
1✔
1752

1753
    def _gen_llvm_select_min_function(self, *, ctx:pnlvm.LLVMBuilderContext, tags:frozenset):
1✔
1754
        assert "select_min" in tags
1✔
1755
        ocm = self._get_optimized_controller()
1✔
1756
        if ocm is not None:
1✔
1757
            assert ocm.function is self
1✔
1758
            sample_t = ocm._get_evaluate_alloc_struct_type(ctx)
1✔
1759
            value_t = ocm._get_evaluate_output_struct_type(ctx, tags=tags)
1✔
1760
        else:
1761
            obj_func = ctx.import_llvm_function(self.objective_function)
1✔
1762
            sample_t = obj_func.args[2].type.pointee
1✔
1763
            value_t = obj_func.args[3].type.pointee
1✔
1764

1765
        args = [ctx.get_param_struct_type(self).as_pointer(),
1✔
1766
                ctx.get_state_struct_type(self).as_pointer(),
1767
                sample_t.as_pointer(),
1768
                sample_t.as_pointer(),
1769
                value_t.as_pointer(),
1770
                value_t.as_pointer(),
1771
                ctx.float_ty.as_pointer(),
1772
                ctx.int32_ty,
1773
                ctx.int32_ty]
1774
        builder = ctx.create_llvm_function(args, self, tags=tags)
1✔
1775

1776
        params, state, min_sample_ptr, samples_ptr, min_value_ptr, values_ptr, opt_count_ptr, start, stop = builder.function.args
1✔
1777
        for p in builder.function.args[:-2]:
1✔
1778
            p.attributes.add('noalias')
1✔
1779

1780
        # The creation helper function sets all pointers to non-null
1781
        # remove the attribute for 'samples_ptr'.
1782
        samples_ptr.attributes.remove('nonnull')
1✔
1783

1784
        random_state = ctx.get_random_state_ptr(builder, self, state, params)
1✔
1785
        select_random_ptr = ctx.get_param_or_state_ptr(builder, self, self.parameters.select_randomly_from_optimal_values, param_struct_ptr=params)
1✔
1786

1787
        select_random_val = builder.load(select_random_ptr)
1✔
1788
        select_random = builder.fcmp_ordered("!=", select_random_val,
1✔
1789
                                             select_random_val.type(0))
1790

1791
        rand_out_ptr = builder.alloca(ctx.float_ty)
1✔
1792

1793
        # KDM 8/22/19: nonstateful direction here - OK?
1794
        direction = "<" if self.direction == MINIMIZE else ">"
1✔
1795
        replace_ptr = builder.alloca(ctx.bool_ty)
1✔
1796

1797
        min_idx_ptr = builder.alloca(stop.type)
1✔
1798
        builder.store(stop.type(-1), min_idx_ptr)
1✔
1799

1800
        # Check the value against current min
1801
        with pnlvm.helpers.for_loop(builder, start, stop, stop.type(1), "compare_loop") as (b, idx):
1✔
1802
            value_ptr = b.gep(values_ptr, [idx])
1✔
1803
            value = b.load(value_ptr)
1✔
1804
            min_value = b.load(min_value_ptr)
1✔
1805

1806
            replace = b.fcmp_unordered(direction, value, min_value)
1✔
1807
            b.store(replace, replace_ptr)
1✔
1808

1809
            # Python does "is_close" check first.
1810
            # This implements reservoir sampling
1811
            with b.if_then(select_random):
1✔
1812
                close = pnlvm.helpers.is_close(ctx, b, value, min_value)
1✔
1813
                with b.if_else(close) as (tb, eb):
1✔
1814
                    with tb:
1✔
1815
                        opt_count = b.load(opt_count_ptr)
1✔
1816
                        opt_count = b.fadd(opt_count, opt_count.type(1))
1✔
1817
                        b.store(opt_count, opt_count_ptr)
1✔
1818

1819
                        # Roll a dice to see if we should replace the current min
1820
                        prob = b.fdiv(opt_count.type(1), opt_count)
1✔
1821
                        rand_f = ctx.get_uniform_dist_function_by_state(random_state)
1✔
1822
                        b.call(rand_f, [random_state, rand_out_ptr])
1✔
1823
                        rand_out = b.load(rand_out_ptr)
1✔
1824
                        replace = b.fcmp_ordered("<", rand_out, prob)
1✔
1825
                        b.store(replace, replace_ptr)
1✔
1826
                    with eb:
1✔
1827
                        # Reset the counter if we are replacing with new best value
1828
                        with b.if_then(b.load(replace_ptr)):
1✔
1829
                            b.store(opt_count_ptr.type.pointee(1), opt_count_ptr)
1✔
1830

1831
            with b.if_then(b.load(replace_ptr)):
1✔
1832
                b.store(idx, min_idx_ptr)
1✔
1833
                b.store(b.load(value_ptr), min_value_ptr)
1✔
1834

1835
        min_idx = builder.load(min_idx_ptr)
1✔
1836
        found_min = builder.icmp_signed("!=", min_idx, min_idx.type(-1))
1✔
1837

1838
        with builder.if_then(found_min):
1✔
1839
            gen_samples = builder.icmp_signed("==", samples_ptr, samples_ptr.type(None))
1✔
1840
            with builder.if_else(gen_samples) as (b_true, b_false):
1✔
1841
                with b_true:
1✔
1842
                    search_space = ctx.get_param_or_state_ptr(builder, self, self.parameters.search_space.name, param_struct_ptr=params)
1✔
1843
                    pnlvm.helpers.create_sample(b, min_sample_ptr, search_space, min_idx)
1✔
1844
                with b_false:
1✔
1845
                    sample_ptr = builder.gep(samples_ptr, [min_idx])
1✔
1846
                    builder.store(b.load(sample_ptr), min_sample_ptr)
1✔
1847

1848
        builder.ret_void()
1✔
1849
        return builder.function
1✔
1850

1851
    def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, *, tags:frozenset):
1✔
1852
        controller = self._get_optimized_controller()
1✔
1853
        if controller is not None:
1✔
1854
            assert controller.function is self
1✔
1855
            obj_func = ctx.import_llvm_function(controller, tags=tags.union({"evaluate", "evaluate_type_objective"}))
1✔
1856
            comp_args = builder.function.args[-3:]
1✔
1857
            obj_param_ptr = comp_args[0]
1✔
1858
            obj_state_ptr = comp_args[1]
1✔
1859

1860
            # Construct input
1861
            comp_input = builder.alloca(obj_func.args[4].type.pointee, name="sim_input")
1✔
1862

1863
            input_initialized = [False] * len(comp_input.type.pointee)
1✔
1864
            for src_idx, ip in enumerate(controller.input_ports):
1✔
1865
                if ip.shadow_inputs is None:
1✔
1866
                    continue
1✔
1867

1868
                # shadow inputs point to an input port of of a node.
1869
                # If that node takes direct input, it will have an associated
1870
                # (input_port, output_port) in the input_CIM.
1871
                # Take the former as an index to composition input variable.
1872
                cim_in_port = controller.agent_rep.input_CIM_ports[ip.shadow_inputs][0]
1✔
1873
                dst_idx = controller.agent_rep.input_CIM.input_ports.index(cim_in_port)
1✔
1874

1875
                # Check that all inputs are unique
1876
                assert not input_initialized[dst_idx], "Double initialization of input {}".format(dst_idx)
1✔
1877
                input_initialized[dst_idx] = True
1✔
1878

1879
                src = builder.gep(arg_in, [ctx.int32_ty(0), ctx.int32_ty(src_idx)])
1✔
1880
                # Destination is a struct of 2d arrays
1881
                dst = builder.gep(comp_input, [ctx.int32_ty(0),
1✔
1882
                                               ctx.int32_ty(dst_idx),
1883
                                               ctx.int32_ty(0)])
1884
                builder.store(builder.load(src), dst)
1✔
1885

1886
            # Assert that we have populated all inputs
1887
            assert all(input_initialized), \
1✔
1888
              "Not all inputs to the simulated composition are initialized: {}".format(input_initialized)
1889

1890
            num_inputs = builder.alloca(obj_func.args[6].type.pointee, name="num_sim_inputs")
1✔
1891
            builder.store(num_inputs.type.pointee(1), num_inputs)
1✔
1892

1893
            # Extra args: input, data, number of inputs
1894
            extra_args = [comp_input, comp_args[2], num_inputs]
1✔
1895
        else:
1896
            obj_func = ctx.import_llvm_function(self.objective_function)
1✔
1897
            obj_param_ptr, obj_state_ptr = ctx.get_param_or_state_ptr(builder, self, "objective_function",
1✔
1898
                                                                      param_struct_ptr=params, state_struct_ptr=state)
1899
            extra_args = []
1✔
1900

1901
        sample_t = obj_func.args[2].type.pointee
1✔
1902
        value_t = obj_func.args[3].type.pointee
1✔
1903
        min_sample_ptr = builder.alloca(sample_t)
1✔
1904
        min_value_ptr = builder.alloca(value_t)
1✔
1905
        sample_ptr = builder.alloca(sample_t)
1✔
1906
        value_ptr = builder.alloca(value_t)
1✔
1907

1908
        search_space_ptr = ctx.get_param_or_state_ptr(builder, self, self.parameters.search_space, param_struct_ptr=params)
1✔
1909

1910
        opt_count_ptr = builder.alloca(ctx.float_ty)
1✔
1911
        builder.store(opt_count_ptr.type.pointee(0), opt_count_ptr)
1✔
1912

1913
        # Use NaN here. fcmp_unordered below returns true if one of the
1914
        # operands is a NaN. This makes sure we always set min_*
1915
        # in the first iteration
1916
        builder.store(min_value_ptr.type.pointee(float("NaN")), min_value_ptr)
1✔
1917

1918
        b = builder
1✔
1919
        with contextlib.ExitStack() as stack:
1✔
1920
            for i in range(len(search_space_ptr.type.pointee)):
1✔
1921
                dimension = b.gep(search_space_ptr, [ctx.int32_ty(0), ctx.int32_ty(i)])
1✔
1922
                arg_elem = b.gep(sample_ptr, [ctx.int32_ty(0), ctx.int32_ty(i)])
1✔
1923
                if isinstance(dimension.type.pointee,  pnlvm.ir.ArrayType):
1✔
1924
                    b, idx = stack.enter_context(pnlvm.helpers.array_ptr_loop(b, dimension, "loop_" + str(i)))
1✔
1925
                    alloc_elem = b.gep(dimension, [ctx.int32_ty(0), idx])
1✔
1926
                    b.store(b.load(alloc_elem), arg_elem)
1✔
1927
                elif isinstance(dimension.type.pointee, pnlvm.ir.LiteralStructType):
1✔
1928
                    assert len(dimension.type.pointee) == 3
1✔
1929
                    start_ptr = b.gep(dimension, [ctx.int32_ty(0), ctx.int32_ty(0)])
1✔
1930
                    step_ptr = b.gep(dimension, [ctx.int32_ty(0), ctx.int32_ty(1)])
1✔
1931
                    num_ptr = b.gep(dimension, [ctx.int32_ty(0), ctx.int32_ty(2)])
1✔
1932
                    start = b.load(start_ptr)
1✔
1933
                    step = b.load(step_ptr)
1✔
1934
                    num = b.load(num_ptr)
1✔
1935
                    b, idx = stack.enter_context(pnlvm.helpers.for_loop_zero_inc(b, num, "loop_" + str(i)))
1✔
1936
                    val = b.uitofp(idx, start.type)
1✔
1937
                    val = b.fmul(val, step)
1✔
1938
                    val = b.fadd(val, start)
1✔
1939
                    b.store(val, arg_elem)
1✔
1940
                else:
1941
                    assert False, "Unknown dimension type: {}".format(dimension.type)
1942

1943
            # We are in the inner most loop now with sample_ptr setup for execution
1944
            b.call(obj_func, [obj_param_ptr, obj_state_ptr, sample_ptr,
1✔
1945
                              value_ptr] + extra_args)
1946

1947
            # Check if smaller than current best.
1948
            # the argument pointers are already offset, so use range <0,1)
1949
            min_tags = tags.union({"select_min", "evaluate_type_objective"})
1✔
1950
            select_min_f = ctx.import_llvm_function(self, tags=min_tags)
1✔
1951
            b.call(select_min_f, [params, state, min_sample_ptr, sample_ptr,
1✔
1952
                                  min_value_ptr, value_ptr, opt_count_ptr,
1953
                                  ctx.int32_ty(0), ctx.int32_ty(1)])
1954

1955
            builder = b
1✔
1956

1957
        # Produce output
1958
        out_sample_ptr = builder.gep(arg_out, [ctx.int32_ty(0), ctx.int32_ty(0)])
1✔
1959
        out_value_ptr = builder.gep(arg_out, [ctx.int32_ty(0), ctx.int32_ty(1)])
1✔
1960
        builder.store(builder.load(min_sample_ptr), out_sample_ptr)
1✔
1961
        builder.store(builder.load(min_value_ptr), out_value_ptr)
1✔
1962
        return builder
1✔
1963

1964
    def _function(self,
1✔
1965
                 variable=None,
1966
                 context=None,
1967
                 params=None,
1968
                 **kwargs):
1969
        """Return the sample that yields the optimal value of `objective_function <GridSearch.objective_function>`,
1970
        and possibly all samples evaluated and their corresponding values.
1971

1972
        Optimal value is defined by `direction <GridSearch.direction>`:
1973
        - if *MAXIMIZE*, returns greatest value
1974
        - if *MINIMIZE*, returns least value
1975

1976
        Returns
1977
        -------
1978

1979
        optimal sample, optimal value, saved_samples, saved_values : ndarray, list, list
1980
            first array contains sample that yields the highest or lowest value of `objective_function
1981
            <GridSearch.objective_function>`, depending on `direction <GridSearch.direction>`, and the
1982
            second array contains the value of the function for that sample. If `save_samples
1983
            <GridSearch.save_samples>` is `True`, first list contains all the values sampled in the order they were
1984
            evaluated; otherwise it is empty.  If `save_values <GridSearch.save_values>` is `True`, second list
1985
            contains the values returned by `objective_function <GridSearch.objective_function>` for all the samples
1986
            in the order they were evaluated; otherwise it is empty.
1987
        """
1988

1989
        self.reset_grid(context)
1✔
1990
        return_all_samples = return_all_values = []
1✔
1991

1992
        direction = self.parameters.direction._get(context)
1✔
1993
        if MPI_IMPLEMENTATION:
1!
1994

1995
            from mpi4py import MPI
×
1996

1997
            Comm = MPI.COMM_WORLD
×
1998
            rank = Comm.Get_rank()
×
1999
            size = Comm.Get_size()
×
2000

2001
            self.search_space = np.atleast_2d(self.search_space)
×
2002

2003
            chunk_size = (len(self.search_space) + (size - 1)) // size
×
2004
            start = chunk_size * rank
×
2005
            stop = chunk_size * (rank + 1)
×
2006
            if start > len(self.search_space):
×
2007
                start = len(self.search_space)
×
2008
            if stop > len(self.search_space):
×
2009
                stop = len(self.search_space)
×
2010

2011
            # FIX:  INITIALIZE TO FULL LENGTH AND ASSIGN DEFAULT VALUES (MORE EFFICIENT):
2012
            samples = np.array([[]])
×
2013
            optimal_sample = np.empty_like(self.search_space[0])
×
2014
            values = np.array([])
×
2015
            optimal_value = float('-Infinity')
×
2016
            sample_value_max_tuple = (optimal_sample, optimal_value)
×
2017

2018
            # Set up progress bar
2019
            _show_progress = False
×
2020
            if hasattr(self, OWNER) and self.owner and self.owner.prefs.reportOutputPref is SIMULATION_PROGRESS:
×
2021
                _show_progress = True
×
2022
                _progress_bar_char = '.'
×
2023
                _progress_bar_rate_str = ""
×
2024
                _search_space_size = len(self.search_space)
×
2025
                _progress_bar_rate = int(10**(np.log10(_search_space_size) - 2))
×
2026
                if _progress_bar_rate > 1:
×
2027
                    _progress_bar_rate_str = str(_progress_bar_rate) + " "
×
2028
                print("\n{} executing optimization process (one {} for each {}of {} samples): ".
×
2029
                      format(self.owner.name, repr(_progress_bar_char), _progress_bar_rate_str, _search_space_size))
2030
                _progress_bar_count = 0
×
2031

2032
            for sample in self.search_space[start:stop,:]:
×
2033

2034
                if _show_progress:
×
2035
                    increment_progress_bar = (_progress_bar_rate < 1) or not (_progress_bar_count % _progress_bar_rate)
×
2036
                    if increment_progress_bar:
×
2037
                        print(_progress_bar_char, end='', flush=True)
×
2038
                    _progress_bar_count +=1
×
2039

2040
                # Evaluate objective_function for current sample
2041
                value = self.objective_function(sample, context=context)
×
2042

2043
                # Evaluate for optimal value
2044
                if direction == MAXIMIZE:
×
2045
                    optimal_value = max(value, optimal_value)
×
2046
                elif direction == MINIMIZE:
×
2047
                    optimal_value = min(value, optimal_value)
×
2048
                else:
2049
                    assert False, "PROGRAM ERROR: bad value for {} arg of {}: {}".\
2050
                        format(repr(DIRECTION),self.name,direction)
2051

2052
                # FIX: PUT ERROR HERE IF value AND/OR value_max ARE EMPTY (E.G., WHEN EXECUTION_ID IS WRONG)
2053
                # If value is optimal, store corresponing sample
2054
                if value == optimal_value:
×
2055
                    # Keep track of port values and allocation policy associated with EVC max
2056
                    optimal_sample = sample
×
2057
                    sample_value_max_tuple = (optimal_sample, optimal_value)
×
2058

2059
                # Save samples and/or values if specified
2060
                if self.save_values:
×
2061
                    # FIX:  ASSIGN BY INDEX (MORE EFFICIENT)
2062
                    values = np.append(values, np.atleast_1d(value), axis=0)
×
2063
                if self.save_samples:
×
2064
                    if len(samples[0])==0:
×
2065
                        samples = np.atleast_2d(sample)
×
2066
                    else:
2067
                        samples = np.append(samples, np.atleast_2d(sample), axis=0)
×
2068

2069
            # Aggregate, reduce and assign global results
2070
            # combine max result tuples from all processes and distribute to all processes
2071
            max_tuples = Comm.allgather(sample_value_max_tuple)
×
2072
            # get tuple with "value_max of maxes"
2073
            max_value_of_max_tuples = max(max_tuples, key=lambda max_tuple: max_tuple[1])
×
2074
            # get optimal_value, port values and allocation policy associated with "max of maxes"
2075
            return_optimal_sample = max_value_of_max_tuples[0]
×
2076
            return_optimal_value = max_value_of_max_tuples[1]
×
2077

2078
            if self.parameters.save_samples._get(context):
×
2079
                return_all_samples = np.concatenate(Comm.allgather(samples), axis=0)
×
2080
            if self.parameters.save_values._get(context):
×
2081
                return_all_values = np.concatenate(Comm.allgather(values), axis=0)
×
2082

2083
        else:
2084
            assert direction == MAXIMIZE or direction == MINIMIZE, \
1✔
2085
                "PROGRAM ERROR: bad value for {} arg of {}: {}, {}". \
2086
                    format(repr(DIRECTION), self.name, direction)
2087

2088
            # Evaluate objective_function for each sample
2089
            last_sample, last_value, all_samples, all_values = self._evaluate(
1✔
2090
                variable=variable,
2091
                context=context,
2092
                params=params,
2093
            )
2094

2095
            # Compiled version
2096
            ocm = self._get_optimized_controller()
1✔
2097
            # if ocm is not None and ocm.parameters.comp_execution_mode._get(context) in {"PTX", "LLVM"}:
2098
            if ocm is not None and ocm.parameters.comp_execution_mode._get(context) in {"PTX", "LLVM"}:
1✔
2099

2100
                # Reduce array of values to min/max
2101
                # select_min params are:
2102
                # params, state, min_sample_ptr, sample_ptr, min_value_ptr, value_ptr, opt_count_ptr, start, stop
2103
                min_tags = frozenset({"select_min", "evaluate_type_objective"})
1✔
2104
                bin_func = pnlvm.LLVMBinaryFunction.from_obj(self, tags=min_tags, ctype_ptr_args=(0, 1, 3), dynamic_size_args=(5,))
1✔
2105

2106
                ct_param = bin_func.byref_arg_types[0](*self._get_param_initializer(context))
1✔
2107
                ct_state = bin_func.byref_arg_types[1](*self._get_state_initializer(context))
1✔
2108
                optimal_sample = bin_func.np_buffer_for_arg(2)
1✔
2109
                optimal_value = bin_func.np_buffer_for_arg(4)
1✔
2110
                number_of_optimal_values = bin_func.np_buffer_for_arg(6, fill_value=0)
1✔
2111

2112
                bin_func(ct_param,
1✔
2113
                         ct_state,
2114
                         optimal_sample,
2115
                         None,                      # samples. NULL, it's generated by the function.
2116
                         optimal_value,
2117
                         all_values,
2118
                         number_of_optimal_values,
2119
                         0,                         # start
2120
                         len(all_values))           # stop
2121

2122
            # Python version
2123
            else:
2124

2125
                if all_values.shape[-1] != all_samples.shape[-1]:
1✔
2126
                    raise ValueError(f"GridSearch Error: {self}._evaluate returned mismatched sizes for "
2127
                                     f"samples and values. This is likely due to a bug in the implementation of "
2128
                                     f"{self.__class__} _evaluate method.")
2129

2130
                if all_values.shape[0] > 1:
1✔
2131
                    raise ValueError(f"GridSearch Error: {self}._evaluate returned values with more than one element. "
2132
                                     "GridSearch currently does not support optimizing over multiple output values.")
2133

2134
                # Find the optimal value(s)
2135
                optimal_value_count = 1
1✔
2136
                value_sample_pairs = zip(all_values.flatten(),
1✔
2137
                                         [all_samples[:,i] for i in range(all_samples.shape[1])])
2138
                optimal_value, optimal_sample = next(value_sample_pairs)
1✔
2139

2140
                # The algorithm below implements "Reservoir sampling"[0]. This
2141
                # matches the compiled implementation of "select_min". The
2142
                # advantage of reservoir sampling is constant memory requirements
2143
                # and a single pass over the evaluated values.
2144
                # The disadvantage is multiple calls to the PRNG.
2145
                # https://en.wikipedia.org/wiki/Reservoir_sampling
2146
                select_randomly = self.parameters.select_randomly_from_optimal_values._get(context)
1✔
2147
                for value, sample in value_sample_pairs:
1✔
2148
                    if select_randomly and np.allclose(value, optimal_value):
1✔
2149
                        optimal_value_count += 1
1✔
2150

2151
                        # swap with probability = 1/optimal_value_count in order to achieve
2152
                        # uniformly random selection from identical outcomes
2153
                        probability = 1 / optimal_value_count
1✔
2154
                        random_state = self._get_current_parameter_value("random_state", context)
1✔
2155
                        random_value = random_state.rand()
1✔
2156

2157
                        if random_value < probability:
1✔
2158
                            optimal_value, optimal_sample = value, sample
1✔
2159

2160
                    elif (value > optimal_value and direction == MAXIMIZE) or \
1✔
2161
                            (value < optimal_value and direction == MINIMIZE):
2162
                        optimal_value, optimal_sample = value, sample
1✔
2163
                        optimal_value_count = 1
1✔
2164

2165
            if self.parameters.save_samples._get(context):
1!
2166
                self.parameters.saved_samples._set(all_samples, context)
×
2167
                return_all_samples = all_samples
×
2168
            if self.parameters.save_values._get(context):
1✔
2169
                self.parameters.saved_values._set(all_values, context)
1✔
2170
                return_all_values = all_values
1✔
2171

2172
        return optimal_sample, optimal_value, return_all_samples, return_all_values
1✔
2173

2174

2175
class GaussianProcess(OptimizationFunction):
1✔
2176
    """
2177
    GaussianProcess(                 \
2178
        default_variable=None,       \
2179
        objective_function=None,     \
2180
        search_space=None,           \
2181
        direction=MAXIMIZE,          \
2182
        max_iterations=1000,         \
2183
        save_samples=False,          \
2184
        save_values=False,           \
2185
        params=None,                 \
2186
        owner=None,                  \
2187
        prefs=None                   \
2188
        )
2189

2190
    Draw samples with dimensionality and bounds specified by `search_space <GaussianProcess.search_space>` and
2191
    return one that optimizes the value of `objective_function <GaussianProcess.objective_function>`.
2192

2193
    .. _GaussianProcess_Procedure:
2194

2195
    **Gaussian Process Procedure**
2196

2197
    The number of items (`SampleIterators <SampleIteartor>` in `search_space <GaussianProcess.search_space>` determines
2198
    the dimensionality of each sample to evaluate by `objective_function <GaussianProcess.objective_function>`,
2199
    with the `start <SampleIterator.start>` and `stop <SampleIterator.stop>` attributes of each `SampleIterator`
2200
    specifying the bounds for sampling along the corresponding dimension.
2201

2202
    When `function <GaussianProcess.function>` is executed, it iterates over the folowing steps:
2203

2204
        - draw sample along each dimension of `search_space <GaussianProcess.search_space>`, within bounds
2205
          specified by `start <SampleIterator.start>` and `stop <SampleIterator.stop>` attributes of each
2206
          `SampleIterator` in the `search_space <GaussianProcess.search_space>` list.
2207
        ..
2208
        - compute value of `objective_function <GaussianProcess.objective_function>` for that sample;
2209

2210
    The current iteration is contained in `iteration <GaussianProcess.iteration>`. Iteration continues until [
2211
    FRED: FILL IN THE BLANK], or `max_iterations <GaussianProcess.max_iterations>` is execeeded.  The function
2212
    returns the sample that yielded either the highest (if `direction <GaussianProcess.direction>`
2213
    is *MAXIMIZE*) or lowest (if `direction <GaussianProcess.direction>` is *MINIMIZE*) value of the `objective_function
2214
    <GaussianProcess.objective_function>`, along with the value for that sample, as well as lists containing all of the
2215
    samples evaluated and their values if either `save_samples <GaussianProcess.save_samples>` or `save_values
2216
    <GaussianProcess.save_values>` is `True`, respectively.
2217

2218
    Arguments
2219
    ---------
2220

2221
    default_variable : list or ndarray : default None
2222
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
2223
        `objective_function <GaussianProcess.objective_function>`.
2224

2225
    objective_function : function or method
2226
        specifies function used to evaluate sample in each iteration of the `optimization process
2227
        <GaussianProcess_Procedure>`; it must be specified and must return a scalar value.
2228

2229
    search_space : list or array
2230
        specifies bounds of the samples used to evaluate `objective_function <GaussianProcess.objective_function>`
2231
        along each dimension of `variable <GaussianProcess.variable>`;  each item must be a tuple the first element
2232
        of which specifies the lower bound and the second of which specifies the upper bound.
2233

2234
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
2235
        specifies the direction of optimization:  if *MAXIMIZE*, the highest value of `objective_function
2236
        <GaussianProcess.objective_function>` is sought;  if *MINIMIZE*, the lowest value is sought.
2237

2238
    max_iterations : int : default 1000
2239
        specifies the maximum number of times the `optimization process<GaussianProcess_Procedure>` is allowed to
2240
        iterate; if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
2241

2242
    save_samples : bool
2243
        specifies whether or not to return all of the samples used to evaluate `objective_function
2244
        <GaussianProcess.objective_function>` in the `optimization process <GaussianProcess_Procedure>`
2245
        (i.e., a copy of the `search_space <GaussianProcess.search_space>`.
2246

2247
    save_values : bool
2248
        specifies whether or not to save and return the values of `objective_function <GaussianProcess.objective_function>`
2249
        for all samples evaluated in the `optimization process <GaussianProcess_Procedure>`.
2250

2251
    Attributes
2252
    ----------
2253

2254
    variable : ndarray
2255
        template for sample evaluated by `objective_function <GaussianProcess.objective_function>`.
2256

2257
    objective_function : function or method
2258
        function used to evaluate sample in each iteration of the `optimization process <GaussianProcess_Procedure>`.
2259

2260
    search_space : list or array
2261
        contains tuples specifying bounds within which each dimension of `variable <GaussianProcess.variable>` is
2262
        sampled, and used to evaluate `objective_function <GaussianProcess.objective_function>` in iterations of the
2263
        `optimization process <GaussianProcess_Procedure>`.
2264

2265
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
2266
        determines the direction of optimization:  if *MAXIMIZE*, the greatest value of `objective_function
2267
        <GaussianProcess.objective_function>` is sought;  if *MINIMIZE*, the least value is sought.
2268

2269
    iteration : int
2270
        the currention iteration of the `optimization process <GaussianProcess_Procedure>`.
2271

2272
    max_iterations : int
2273
        determines the maximum number of times the `optimization process<GaussianProcess_Procedure>` is allowed to iterate;
2274
        if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
2275

2276
    save_samples : True
2277
        determines whether or not to save and return all samples evaluated by the `objective_function
2278
        <GaussianProcess.objective_function>` in the `optimization process <GaussianProcess_Procedure>` (if the process
2279
        completes, this should be identical to `search_space <GaussianProcess.search_space>`.
2280

2281
    save_values : bool
2282
        determines whether or not to save and return the value of `objective_function
2283
        <GaussianProcess.objective_function>` for all samples evaluated in the `optimization process <GaussianProcess_Procedure>`.
2284
    """
2285

2286
    componentName = GAUSSIAN_PROCESS_FUNCTION
1✔
2287

2288
    class Parameters(OptimizationFunction.Parameters):
1✔
2289
        """
2290
            Attributes
2291
            ----------
2292

2293
                variable
2294
                    see `variable <GaussianProcess.variable>`
2295

2296
                    :default value: [[0], [0]]
2297
                    :type: ``list``
2298
                    :read only: True
2299

2300
                direction
2301
                    see `direction <GaussianProcess.direction>`
2302

2303
                    :default value: `MAXIMIZE`
2304
                    :type: ``str``
2305

2306
                save_samples
2307
                    see `save_samples <GaussianProcess.save_samples>`
2308

2309
                    :default value: True
2310
                    :type: ``bool``
2311

2312
                save_values
2313
                    see `save_values <GaussianProcess.save_values>`
2314

2315
                    :default value: True
2316
                    :type: ``bool``
2317
        """
2318
        variable = Parameter([[0], [0]], read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
2319

2320
        save_samples = True
1✔
2321
        save_values = True
1✔
2322

2323
        direction = MAXIMIZE
1✔
2324

2325
    # TODO: should save_values be in the constructor if it's ignored?
2326
    # is False or True the correct value?
2327
    @check_user_specified
1✔
2328
    @beartype
1✔
2329
    def __init__(self,
1✔
2330
                 default_variable=None,
2331
                 objective_function: Optional[Callable] = None,
2332
                 search_space=None,
2333
                 direction: Optional[Literal['maximize', 'minimize']] = None,
2334
                 save_values: Optional[bool] = None,
2335
                 params=None,
2336
                 owner=None,
2337
                 prefs=None,
2338
                 **kwargs):
2339

2340
        search_function = self._gaussian_process_sample
1✔
2341
        search_termination_function = self._gaussian_process_satisfied
1✔
2342
        self._return_values = save_values
1✔
2343
        self._return_samples = save_values
1✔
2344
        self.direction = direction
1✔
2345

2346
        super().__init__(
1✔
2347
            default_variable=default_variable,
2348
            objective_function=objective_function,
2349
            search_function=search_function,
2350
            search_space=search_space,
2351
            search_termination_function=search_termination_function,
2352
            save_samples=True,
2353
            save_values=save_values,
2354
            params=params,
2355
            owner=owner,
2356
            prefs=prefs,
2357
        )
2358

2359
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
2360
        super()._validate_params(request_set=request_set, target_set=target_set,context=context)
1✔
2361
        # if SEARCH_SPACE in request_set:
2362
        #     search_space = request_set[SEARCH_SPACE]
2363
        #     # search_space must be specified
2364
        #     if search_space is None:
2365
        #         raise OptimizationFunctionError("The {} arg must be specified for a {}".
2366
        #                                         format(repr(SEARCH_SPACE), self.__class__.__name__))
2367
        #     # must be a list or array
2368
        #     if not isinstance(search_space, (list, np.ndarray)):
2369
        #         raise OptimizationFunctionError("The specification for the {} arg of {} must be a list or array".
2370
        #                                         format(repr(SEARCH_SPACE), self.__class__.__name__))
2371
        #     # must have same number of items as variable
2372
        #     if len(search_space) != len(self.defaults.variable):
2373
        #         raise OptimizationFunctionError("The number of items in {} for {} ([]) must equal that of its {} ({})".
2374
        #                                         format(repr(SEARCH_SPACE), self.__class__.__name__, len(search_space),
2375
        #                                                repr(VARIABLE), len(self.defaults.variable)))
2376
        #     # every item must be a tuple with two elements, both of which are scalars, and first must be <= second
2377
        #     for i in search_space:
2378
        #         if not isinstance(i, tuple) or len(i) != 2:
2379
        #             raise OptimizationFunctionError("Item specified for {} of {} ({}) is not a tuple with two items".
2380
        #                                             format(repr(SEARCH_SPACE), self.__class__.__name__, i))
2381
        #         if not all([np.isscalar(j) for j in i]):
2382
        #             raise OptimizationFunctionError("Both elements of item specified for {} of {} ({}) must be scalars".
2383
        #                                             format(repr(SEARCH_SPACE), self.__class__.__name__, i))
2384
        #         if not i[0] < i[1]:
2385
        #             raise OptimizationFunctionError("First element of item in {} specified for {} ({}) "
2386
        #                                             "must be less than or equal to its second element".
2387
        #                                             format(repr(SEARCH_SPACE), self.__class__.__name__, i))
2388

2389
    def _function(self,
1✔
2390
                 variable=None,
2391
                 context=None,
2392
                 params=None,
2393
                 **kwargs):
2394
        """Return the sample that yields the optimal value of `objective_function <GaussianProcess.objective_function>`,
2395
        and possibly all samples evaluated and their corresponding values.
2396

2397
        Optimal value is defined by `direction <GaussianProcess.direction>`:
2398
        - if *MAXIMIZE*, returns greatest value
2399
        - if *MINIMIZE*, returns least value
2400

2401
        Returns
2402
        -------
2403

2404
        optimal sample, optimal value, saved_samples, saved_values : ndarray, list, list
2405
            first array contains sample that yields the highest or lowest value of `objective_function
2406
            <GaussianProcess.objective_function>`, depending on `direction <GaussianProcess.direction>`, and the
2407
            second array contains the value of the function for that sample. If `save_samples
2408
            <GaussianProcess.save_samples>` is `True`, first list contains all the values sampled in the order they were
2409
            evaluated; otherwise it is empty.  If `save_values <GaussianProcess.save_values>` is `True`, second list
2410
            contains the values returned by `objective_function <GaussianProcess.objective_function>` for all the
2411
            samples in the order they were evaluated; otherwise it is empty.
2412
        """
2413

2414
        return_all_samples = return_all_values = []
1✔
2415

2416
        # Enforce no MPI for now
2417
        MPI_IMPLEMENTATION = False
1✔
2418
        if MPI_IMPLEMENTATION:
1!
2419
            # FIX: WORRY ABOUT THIS LATER
2420
            pass
×
2421

2422
        else:
2423
            last_sample, last_value, all_samples, all_values = super()._function(
1✔
2424
                    variable=variable,
2425
                    context=context,
2426
                    params=params,
2427

2428
            )
2429

2430
            return_optimal_value = max(all_values)
×
2431
            return_optimal_sample = all_samples[all_values.index(return_optimal_value)]
×
2432
            if self._return_samples:
×
2433
                return_all_samples = all_samples
×
2434
            if self._return_values:
×
2435
                return_all_values = all_values
×
2436

2437
        return return_optimal_sample, return_optimal_value, return_all_samples, return_all_values
×
2438

2439
    # FRED: THESE ARE THE SHELLS FOR THE METHODS I BELIEVE YOU NEED:
2440
    def _gaussian_process_sample(self, variable, sample_num, context=None):
1✔
2441
        """Draw and return sample from search_space."""
2442
        # FRED: YOUR CODE HERE;  THIS IS THE search_function METHOD OF OptimizationControlMechanism (i.e., PARENT)
2443
        # NOTES:
2444
        #   This method is assigned as the search function of GaussianProcess,
2445
        #     and should return a sample that will be evaluated in the call to GaussianProcess' `objective_function`
2446
        #     (in the context of use with an OptimizationControlMechanism, a sample is a control_allocation,
2447
        #     and the objective_function is the evaluate method of the agent_rep).
2448
        #   You have accessible:
2449
        #     variable arg:  the last sample evaluated
2450
        #     sample_num:  number of current iteration in the search/sampling process
2451
        #     self.search_space:  self.parameters.search_space._get(context), which you can assume will be a
2452
        #                         list of tuples, each of which contains the sampling bounds for each dimension;
2453
        #                         so its length = length of a sample
2454
        #     (the extra stuff in getting the search space is to support statefulness in parallelization of sims)
2455
        # return self._opt.ask() # [SAMPLE:  VECTOR SAME SHAPE AS VARIABLE]
2456
        return variable
×
2457

2458
    def _gaussian_process_satisfied(self, variable, value, iteration, context=None):
1✔
2459
        """Determine whether search should be terminated;  return `True` if so, `False` if not."""
2460
        # FRED: YOUR CODE HERE;    THIS IS THE search_termination_function METHOD OF OptimizationControlMechanism (
2461
        # i.e., PARENT)
2462
        return iteration==2# [BOOLEAN, SPECIFIYING WHETHER TO END THE SEARCH/SAMPLING PROCESS]
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc