• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

PrincetonUniversity / PsyNeuLink / 4057691242

pending completion
4057691242

Pull #2594

github-actions

GitHub
Merge b7ca74955 into 2e55a2e5a
Pull Request #2594: ParameterEstimationComposition

11843 of 14787 branches covered (80.09%)

Branch coverage included in aggregate %.

493 of 493 new or added lines in 16 files covered. (100.0%)

29569 of 34287 relevant lines covered (86.24%)

0.86 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

73.27
/psyneulink/core/components/functions/nonstateful/optimizationfunctions.py
1
#
2
# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License");
3
# you may not use this file except in compliance with the License.  You may obtain a copy of the License at:
4
#     http://www.apache.org/licenses/LICENSE-2.0
5
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
6
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
7
# See the License for the specific language governing permissions and limitations under the License.
8
#
9
#
10
# ******************************************   OPTIMIZATION FUNCTIONS **************************************************
11
"""
1✔
12
Contents
13
--------
14

15
* `OptimizationFunction`
16
* `GradientOptimization`
17
* `GridSearch`
18
* `GaussianProcess`
19

20
Overview
21
--------
22

23
Functions that return the sample of a variable yielding the optimized value of an objective_function.
24

25
"""
26

27
import contextlib
1✔
28
# from fractions import Fraction
29
import itertools
1✔
30
import warnings
1✔
31
from numbers import Number
1✔
32

33
import numpy as np
1✔
34
import typecheck as tc
1✔
35

36
from psyneulink.core import llvm as pnlvm
1✔
37
from psyneulink.core.components.functions.function import (
1✔
38
    DEFAULT_SEED, Function_Base, _random_state_getter,
39
    _seed_setter, is_function_type,
40
)
41
from psyneulink.core.globals.context import ContextFlags, handle_external_context
1✔
42
from psyneulink.core.globals.defaults import MPI_IMPLEMENTATION
1✔
43
from psyneulink.core.globals.keywords import \
1✔
44
    BOUNDS, GRADIENT_OPTIMIZATION_FUNCTION, GRID_SEARCH_FUNCTION, GAUSSIAN_PROCESS_FUNCTION, \
45
    OPTIMIZATION_FUNCTION_TYPE, OWNER, VALUE, VARIABLE
46
from psyneulink.core.globals.parameters import Parameter, check_user_specified
1✔
47
from psyneulink.core.globals.sampleiterator import SampleIterator
1✔
48
from psyneulink.core.globals.utilities import call_with_pruned_args
1✔
49

50
__all__ = ['OptimizationFunction', 'GradientOptimization', 'GridSearch', 'GaussianProcess',
1✔
51
           'ASCENT', 'DESCENT', 'DIRECTION', 'MAXIMIZE', 'MINIMIZE', 'OBJECTIVE_FUNCTION', 'SEARCH_FUNCTION',
52
           'SEARCH_SPACE', 'RANDOMIZATION_DIMENSION', 'SEARCH_TERMINATION_FUNCTION', 'SIMULATION_PROGRESS'
53
           ]
54

55
OBJECTIVE_FUNCTION = 'objective_function'
1✔
56
AGGREGATION_FUNCTION = 'aggregation_function'
1✔
57
SEARCH_FUNCTION = 'search_function'
1✔
58
SEARCH_SPACE = 'search_space'
1✔
59
RANDOMIZATION_DIMENSION = 'randomization_dimension'
1✔
60
SEARCH_TERMINATION_FUNCTION = 'search_termination_function'
1✔
61
DIRECTION = 'direction'
1✔
62
SIMULATION_PROGRESS = 'simulation_progress'
1✔
63

64
class OptimizationFunctionError(Exception):
1✔
65
    def __init__(self, error_value):
1✔
66
        self.error_value = error_value
×
67

68

69
def _num_estimates_getter(owning_component, context):
1✔
70
    if owning_component.parameters.randomization_dimension._get(context) is None:
1✔
71
        return 1
1✔
72
    else:
73
        return owning_component.parameters.search_space._get(context)[owning_component.randomization_dimension].num
1✔
74

75

76
class OptimizationFunction(Function_Base):
1✔
77
    """
78
    OptimizationFunction(                            \
79
    default_variable=None,                           \
80
    objective_function=lambda x:0,                   \
81
    search_function=lambda x:x,                      \
82
    search_space=[0],                                \
83
    randomization_dimension=None,                    \
84
    search_termination_function=lambda x,y,z:True,   \
85
    save_samples=False,                              \
86
    save_values=False,                               \
87
    max_iterations=None,                             \
88
    params=Nonse,                                    \
89
    owner=Nonse,                                     \
90
    prefs=None)
91

92
    Provides an interface to subclasses and external optimization functions. The default `function
93
    <OptimizationFunction.function>` raises a not implemented exception. Subclasses must implement
94
    the default function. The `_evaluate <OptimizationFunction._evaluate>` method implements the default procedure
95
    of generating samples from `search_space <OptimizationFunction.search_space>` using
96
    `search_function <OptimizationFunction.search_function>`, evaluating them using
97
    `objective_function <OptimizationFunction.objective_function>`, and reporting the value of each using
98
    `report_value <OptimizationFunction.report_value>` until terminated by
99
    `search_termination_function <OptimizationFunction.search_termination_function>`. Subclasses must override
100
    `function <OptimizationFunction.function>` to implement their own optimization function or call an external one.
101
    The base class method `_evaluate <OptimizationFunction._evaluate>` maybe be used to implement the optimization
102
    procedure.
103

104
    Samples in `search_space <OptimizationFunction.search_space>` are assumed to be a list of one or more
105
    `SampleIterator` objects.
106

107
    .. _OptimizationFunction_Procedure:
108

109
    **Default Optimization Procedure**
110

111
    When `_evaluate <OptimizationFunction._evaluate>` is executed, it iterates over the following steps:
112

113
        - get sample from `search_space <OptimizationFunction.search_space>` by calling `search_function
114
          <OptimizationFunction.search_function>`;
115
        ..
116
        - estimate the value of `objective_function <OptimizationFunction.objective_function>` for the sample
117
          by calling `objective_function <OptimizationFunction.objective_function>` the number of times
118
          specified in its `num_estimates <OptimizationFunction.num_estimates>` attribute;
119
        ..
120
        - aggregate value of the estimates using `aggregation_function <OptimizationFunction.aggregation_function>`
121
          (the default is to average the values; if `aggregation_function <OptimizationFunction.aggregation_function>`
122
          is not specified, the entire list of estimates is returned);
123
        ..
124
        - report the aggregated value for the sample by calling `report_value <OptimizationFunction.report_value>`;
125
        ..
126
        - evaluate `search_termination_function <OptimizationFunction.search_termination_function>`.
127

128
    The current iteration number is contained in `iteration <OptimizationFunction.iteration>`. Iteration continues
129
    until all values of `search_space <OptimizationFunction.search_space>` have been evaluated and/or
130
    `search_termination_function <OptimizationFunction.search_termination_function>` returns `True`.  The `function
131
    <OptimizationFunction.function>` returns:
132

133
    - the last sample evaluated (which may or may not be the optimal value, depending on the `objective_function
134
      <OptimizationFunction.objective_function>`);
135

136
    - the value of `objective_function <OptimizationFunction.objective_function>` associated with the last sample;
137

138
    - two lists, that may contain all of the samples evaluated and their values, depending on whether `save_samples
139
      <OptimizationFunction.save_samples>` and/or `save_vales <OptimizationFunction.save_values>` are `True`,
140
      respectively.
141

142
    .. _OptimizationFunction_Defaults:
143

144
    .. note::
145

146
        An OptimizationFunction or any of its subclasses can be created by calling its constructor.  This provides
147
        runnable defaults for all of its arguments (see below). However, these do not yield useful results, and are
148
        meant simply to allow the  constructor of the OptimziationFunction to be used to specify some but not all of
149
        its parameters when specifying the OptimizationFunction in the constructor for another Component. For
150
        example, an OptimizationFunction may use for its `objective_function <OptimizationFunction.objective_function>`
151
        or `search_function <OptimizationFunction.search_function>` a method of the Component to which it is being
152
        assigned;  however, those methods will not yet be available, as the Component itself has not yet been
153
        constructed. This can be handled by calling the OptimizationFunction's `reset
154
        <OptimizationFunction.reset>` method after the Component has been instantiated, with a parameter
155
        specification dictionary with a key for each entry that is the name of a parameter and its value the value to
156
        be assigned to the parameter.  This is done automatically for Mechanisms that take an ObjectiveFunction as
157
        their `function <Mechanism_Base.function>` (such as the `OptimizationControlMechanism`), but will require it be
158
        done explicitly for Components for which that is not the case. A warning is issued if defaults are used for
159
        the arguments of an OptimizationFunction or its subclasses;  this can be suppressed by specifying the
160
        relevant argument(s) as `NotImplemented`.
161

162
    .. technical_note::
163
       - Constructors of subclasses should include **kwargs in their constructor method, to accommodate arguments
164
         required by some subclasses but not others (e.g., search_space needed by `GridSearch` but not
165
         `GradientOptimization`) so that subclasses can be used interchangeably by OptimizationControlMechanism.
166

167
       - Subclasses with attributes that depend on one of the OptimizationFunction's parameters should implement the
168
         `reset <OptimizationFunction.reset>` method, that calls super().reset(*args) and then
169
         reassigns the values of the dependent attributes accordingly.  If an argument is not needed for the subclass,
170
         `NotImplemented` should be passed as the argument's value in the call to super (i.e.,
171
         the OptimizationFunction's
172
         constructor).
173

174

175
    Arguments
176
    ---------
177

178
    default_variable : list or ndarray : default None
179
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
180
        `objective_function <OptimizationFunction.objective_function>`.
181

182
    objective_function : function or method : default None
183
        specifies function used to make a single estimate for a sample, `num_estimates
184
        <OptimizationFunction.num_estimates>` of which are made for a given sample in each iteration of the
185
        `optimization process <OptimizationFunction_Procedure>`; if it is not specified, a default function is used
186
        that simply returns the value passed as its `variable <OptimizationFunction.variable>` parameter (see `note
187
        <OptimizationFunction_Defaults>`).
188

189
    aggregation_function : function or method : default None
190
        specifies function used to aggregate the values returned over the `num_estimates
191
        <OptimizationFunction.num_estimates>` calls to the `objective_function
192
        <OptimizationFunction.objective_function>` for a given sample in each iteration of the `optimization
193
        process <OptimizationFunction_Procedure>`; if it is not specified, a default function is used that simply
194
        returns the value passed as its `variable <OptimizationFunction.variable>` parameter (see `note
195
        <OptimizationFunction_Defaults>`).
196

197
    search_function : function or method : default None
198
        specifies function used to select a sample for `objective_function <OptimizationFunction.objective_function>`
199
        in each iteration of the `optimization process <OptimizationFunction_Procedure>`.  It **must be specified**
200
        if the `objective_function <OptimizationFunction.objective_function>` does not generate samples on its own
201
        (e.g., as does `GradientOptimization`).  If it is required and not specified, the optimization process
202
        executes exactly once using the value passed as its `variable <OptimizationFunction.variable>` parameter
203
        (see `note <OptimizationFunction_Defaults>`).
204

205
    search_space : list or array of SampleIterators : default None
206
        specifies iterators used by `search_function <OptimizationFunction.search_function>` to generate samples
207
        evaluated `objective_function <OptimizationFunction.objective_function>` in each iteration of the
208
        `optimization process <OptimizationFunction_Procedure>`. It **must be specified**
209
        if the `objective_function <OptimizationFunction.objective_function>` does not generate samples on its own
210
        (e.g., as does `GradientOptimization`). If it is required and not specified, the optimization process
211
        executes exactly once using the value passed as its `variable <OptimizationFunction.variable>` parameter
212
        (see `note <OptimizationFunction_Defaults>`).
213

214
    randomization_dimension : int
215
        specifies the index of `search_space <OptimizationFunction.search_space>` containing the seeds for use in
216
        randomization over each estimate of a sample (see `num_estimates <OptimizationFunction.num_estimates>`).
217

218
    search_termination_function : function or method : None
219
        specifies function used to terminate iterations of the `optimization process <OptimizationFunction_Procedure>`.
220
        It must return a boolean value, and it  **must be specified** if the
221
        `objective_function <OptimizationFunction.objective_function>` is not overridden.  If it is required and not
222
        specified, the optimization process executes exactly once (see `note <OptimizationFunction_Defaults>`).
223

224
    save_samples : bool
225
        specifies whether or not to save and return the values of the samples used to evalute `objective_function
226
        <OptimizationFunction.objective_function>` over all iterations of the `optimization process
227
        <OptimizationFunction_Procedure>`.
228

229
    save_values : bool
230
        specifies whether or not to save and return the values of `objective_function
231
        <OptimizationFunction.objective_function>` for samples evaluated in all iterations of the
232
        `optimization process <OptimizationFunction_Procedure>`.
233

234
    max_iterations : int : default 1000
235
        specifies the maximum number of times the `optimization process <OptimizationFunction_Procedure>` is allowed
236
        to iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
237

238

239
    Attributes
240
    ----------
241

242
    variable : ndarray
243
        first sample evaluated by `objective_function <OptimizationFunction.objective_function>` (i.e., one used to
244
        evaluate it in the first iteration of the `optimization process <OptimizationFunction_Procedure>`).
245

246
    objective_function : function or method
247
        used to evaluate the sample in each iteration of the `optimization process <OptimizationFunction_Procedure>`.
248

249
    search_function : function, method or None
250
        used to select a sample evaluated by `objective_function <OptimizationFunction.objective_function>`
251
        in each iteration of the `optimization process <OptimizationFunction_Procedure>`.  `NotImplemented` if
252
        the `objective_function <OptimizationFunction.objective_function>` generates its own samples.
253

254
    search_space : list or array of `SampleIterators <SampleIterator>`
255
        used by `search_function <OptimizationFunction.search_function>` to generate samples evaluated by
256
        `objective_function <OptimizationFunction.objective_function>` in each iteration of the `optimization process
257
        <OptimizationFunction_Procedure>`.  The number of SampleIterators in the list determines the dimensionality
258
        of each sample:  in each iteration of the `optimization process <OptimizationFunction_Procedure>`, each
259
        SampleIterator is called upon to provide the value for one of the dimensions of the sample
260
        if the `objective_function <OptimizationFunction.objective_function>` generates its own samples.  If it is
261
        required and not specified, the optimization process executes exactly once using the value passed as its
262
        `variable <OptimizationFunction.variable>` parameter (see `note <OptimizationFunction_Defaults>`).
263

264
    randomization_dimension : int or None
265
        the index of `search_space <OptimizationFunction.search_space>` containing the seeds for use in randomization
266
        over each estimate of a sample (see `num_estimates <OptimizationFunction.num_estimates>`);  if num_estimates
267
        is not specified, this is None, and only a single estimate is made for each sample.
268

269
    num_estimates : int or None
270
        the number of independent estimates evaluated (i.e., calls made to the OptimizationFunction's
271
        `objective_function <OptimizationFunction.objective_function>` for each sample, and aggregated over
272
        by its `aggregation_function <OptimizationFunction.aggregation_function>` to determine the estimated value
273
        for a given sample.  This is determined from the `search_space <OptimizationFunction.search_space>` by
274
        accessing its `randomization_dimension <OptimizationFunction.randomization_dimension>` and determining the
275
        the length of (i.e., number of elements specified for) that dimension.
276

277
    aggregation_function : function or method
278
        used to aggregate the values returned over the `num_estimates <OptimizationFunction.num_estimates>` calls to
279
        the `objective_function <OptimizationFunction.objective_function>` for a given sample in each iteration of
280
        the `optimization process <OptimizationFunction_Procedure>`.
281

282
    search_termination_function : function or method that returns a boolean value
283
        used to terminate iterations of the `optimization process <OptimizationFunction_Procedure>`; if it is required
284
        and not specified, the optimization process executes exactly once (see `note <OptimizationFunction_Defaults>`).
285

286
    iteration : int
287
        the current iteration of the `optimization process <OptimizationFunction_Procedure>`.
288

289
    max_iterations : int : default 1000
290
        specifies the maximum number of times the `optimization process <OptimizationFunction_Procedure>` is allowed
291
        to iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
292

293
    save_samples : bool
294
        determines whether or not to save the values of the samples used to evalute `objective_function
295
        <OptimizationFunction.objective_function>` over all iterations of the `optimization process
296
        <OptimizationFunction_Procedure>`.
297

298
    save_values : bool
299
        determines whether or not to save and return the values of `objective_function
300
        <OptimizationFunction.objective_function>` for samples evaluated in all iterations of the
301
        `optimization process <OptimizationFunction_Procedure>`.
302
    """
303

304
    componentType = OPTIMIZATION_FUNCTION_TYPE
1✔
305

306
    class Parameters(Function_Base.Parameters):
1✔
307
        """
308
            Attributes
309
            ----------
310

311
                variable
312
                    see `variable <OptimizationFunction.variable>`
313

314
                    :default value: numpy.array([0, 0, 0])
315
                    :type: ``numpy.ndarray``
316
                    :read only: True
317

318
                max_iterations
319
                    see `max_iterations <OptimizationFunction.max_iterations>`
320

321
                    :default value: None
322
                    :type:
323

324
                num_estimates
325
                    see `num_estimates <OptimizationFunction.num_estimates>`
326

327
                    :default value: None
328
                    :type: ``int``
329

330
                objective_function
331
                    see `objective_function <OptimizationFunction.objective_function>`
332

333
                    :default value: lambda x: 0
334
                    :type: ``types.FunctionType``
335

336
                randomization_dimension
337
                    see `randomization_dimension <OptimizationFunction.randomization_dimension>`
338
                    :default value: None
339
                    :type: ``int``
340

341
                save_samples
342
                    see `save_samples <OptimizationFunction.save_samples>`
343

344
                    :default value: False
345
                    :type: ``bool``
346

347
                save_values
348
                    see `save_values <OptimizationFunction.save_values>`
349

350
                    :default value: False
351
                    :type: ``bool``
352

353
                saved_samples
354
                    see `saved_samples <OptimizationFunction.saved_samples>`
355

356
                    :default value: []
357
                    :type: ``list``
358
                    :read only: True
359

360
                saved_values
361
                    see `saved_values <OptimizationFunction.saved_values>`
362

363
                    :default value: []
364
                    :type: ``list``
365
                    :read only: True
366

367
                search_function
368
                    see `search_function <OptimizationFunction.search_function>`
369

370
                    :default value: lambda x: x
371
                    :type: ``types.FunctionType``
372

373
                search_space
374
                    see `search_space <OptimizationFunction.search_space>`
375

376
                    :default value: [`SampleIterator`]
377
                    :type: ``list``
378

379
                search_termination_function
380
                    see `search_termination_function <OptimizationFunction.search_termination_function>`
381

382
                    :default value: lambda x, y, z: True
383
                    :type: ``types.FunctionType``
384
        """
385
        variable = Parameter(np.array([0.0, 0.0, 0.0]), read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
386

387
        objective_function = Parameter(lambda x: 0.0, stateful=False, loggable=False)
1✔
388
        aggregation_function = Parameter(lambda x: np.mean(x, axis=1), stateful=False, loggable=False)
1✔
389
        search_function = Parameter(lambda x: x, stateful=False, loggable=False)
1!
390
        search_termination_function = Parameter(lambda x, y, z: True, stateful=False, loggable=False)
1!
391
        search_space = Parameter([SampleIterator([0])], stateful=False, loggable=False)
1✔
392
        randomization_dimension = Parameter(None, stateful=False, loggable=False)
1✔
393
        num_estimates = Parameter(None, stateful=True, loggable=True, read_only=True,
1✔
394
                                  dependencies=[randomization_dimension, search_space],
395
                                  getter=_num_estimates_getter)
396

397
        save_samples = Parameter(False, pnl_internal=True)
1✔
398
        save_values = Parameter(False, pnl_internal=True)
1✔
399

400
        # these are created as parameter ports, but should they be?
401
        max_iterations = Parameter(None, modulable=True)
1✔
402

403
        saved_samples = Parameter([], read_only=True, pnl_internal=True)
1✔
404
        saved_values = Parameter([], read_only=True, pnl_internal=True)
1✔
405

406
    @check_user_specified
1✔
407
    @tc.typecheck
1✔
408
    def __init__(
1✔
409
        self,
410
        default_variable=None,
411
        objective_function:tc.optional(is_function_type)=None,
412
        aggregation_function:tc.optional(is_function_type)=None,
413
        search_function:tc.optional(is_function_type)=None,
414
        search_space=None,
415
        randomization_dimension=None,
416
        search_termination_function:tc.optional(is_function_type)=None,
417
        save_samples:tc.optional(bool)=None,
418
        save_values:tc.optional(bool)=None,
419
        max_iterations:tc.optional(int)=None,
420
        params=None,
421
        owner=None,
422
        prefs=None,
423
        context=None,
424
        **kwargs
425
    ):
426

427
        self._unspecified_args = []
1✔
428

429
        if objective_function is None:
1✔
430
            self._unspecified_args.append(OBJECTIVE_FUNCTION)
1✔
431

432
        if aggregation_function is None:
1!
433
            self._unspecified_args.append(AGGREGATION_FUNCTION)
1✔
434

435
        if search_function is None:
1!
436
            self._unspecified_args.append(SEARCH_FUNCTION)
×
437

438
        if search_termination_function is None:
1!
439
            self._unspecified_args.append(SEARCH_TERMINATION_FUNCTION)
×
440

441
        self.randomization_dimension = randomization_dimension
1✔
442
        if self.search_space:
1!
443
            # Make randomization dimension of search_space last for standardization of treatment
444
            self.search_space.append(self.search_space.pop(self.search_space.index(self.randomization_dimension)))
×
445
            self.randomization_dimension = len(self.search_space)
×
446

447
        super().__init__(
1✔
448
            default_variable=default_variable,
449
            save_samples=save_samples,
450
            save_values=save_values,
451
            max_iterations=max_iterations,
452
            search_space=search_space,
453
            objective_function=objective_function,
454
            aggregation_function=aggregation_function,
455
            search_function=search_function,
456
            search_termination_function=search_termination_function,
457
            params=params,
458
            owner=owner,
459
            prefs=prefs,
460
            context=context,
461
            **kwargs
462
        )
463

464
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
465

466
        # super()._validate_params(request_set=request_set, target_set=target_set, context=context)
467

468
        if OBJECTIVE_FUNCTION in request_set and request_set[OBJECTIVE_FUNCTION] is not None:
1✔
469
            if not is_function_type(request_set[OBJECTIVE_FUNCTION]):
1✔
470
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
471
                                                format(repr(OBJECTIVE_FUNCTION), self.__class__.__name__,
472
                                                       request_set[OBJECTIVE_FUNCTION].__name__))
473

474
        if AGGREGATION_FUNCTION in request_set and request_set[AGGREGATION_FUNCTION] is not None:
1✔
475
            if not is_function_type(request_set[AGGREGATION_FUNCTION]):
1✔
476
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
477
                                                format(repr(AGGREGATION_FUNCTION), self.__class__.__name__,
478
                                                       request_set[AGGREGATION_FUNCTION].__name__))
479

480
        if SEARCH_FUNCTION in request_set and request_set[SEARCH_FUNCTION] is not None:
1✔
481
            if not is_function_type(request_set[SEARCH_FUNCTION]):
1✔
482
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
483
                                                format(repr(SEARCH_FUNCTION), self.__class__.__name__,
484
                                                       request_set[SEARCH_FUNCTION].__name__))
485

486
        if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
1✔
487
            search_space = request_set[SEARCH_SPACE]
1✔
488
            if not all(isinstance(s, (SampleIterator, type(None), list, tuple, np.ndarray)) for s in search_space):
1✔
489
                raise OptimizationFunctionError("All entries in list specified for {} arg of {} must be a {}".
490
                                                format(repr(SEARCH_SPACE),
491
                                                       self.__class__.__name__,
492
                                                       "SampleIterator, list, tuple, or ndarray"))
493

494
        if SEARCH_TERMINATION_FUNCTION in request_set and request_set[SEARCH_TERMINATION_FUNCTION] is not None:
1✔
495
            if not is_function_type(request_set[SEARCH_TERMINATION_FUNCTION]):
1✔
496
                raise OptimizationFunctionError("Specification of {} arg for {} ({}) must be a function or method".
497
                                                format(repr(SEARCH_TERMINATION_FUNCTION), self.__class__.__name__,
498
                                                       request_set[SEARCH_TERMINATION_FUNCTION].__name__))
499

500
            try:
1✔
501
                b = request_set[SEARCH_TERMINATION_FUNCTION]()
1✔
502
                if not isinstance(b, bool):
×
503
                    raise OptimizationFunctionError("Function ({}) specified for {} arg of {} must return a boolean value".
504
                                                    format(request_set[SEARCH_TERMINATION_FUNCTION].__name__,
505
                                                           repr(SEARCH_TERMINATION_FUNCTION),
506
                                                           self.__class__.__name__))
507
            except TypeError as e:
1✔
508
                # we cannot validate arbitrary functions here if they
509
                # require arguments
510
                if 'required positional arguments' not in str(e):
1!
511
                    raise
×
512

513
    @handle_external_context(fallback_most_recent=True)
1✔
514
    def reset(
1✔
515
        self,
516
        default_variable=None,
517
        objective_function=None,
518
        aggregation_function=None,
519
        search_function=None,
520
        search_termination_function=None,
521
        search_space=None,
522
        randomization_dimension=None,
523
        context=None
524
    ):
525
        """Reset parameters of the OptimizationFunction
526

527
        Parameters to be reset should be specified in a parameter specification dictionary, in which they key
528
        for each entry is the name of one of the following parameters, and its value is the value to be assigned to the
529
        parameter.  The following parameters can be reset:
530

531
            * `default_variable <OptimizationFunction.default_variable>`
532
            * `objective_function <OptimizationFunction.objective_function>`
533
            * `search_function <OptimizationFunction.search_function>`
534
            * `search_termination_function <OptimizationFunction.search_termination_function>`
535
        """
536
        self._validate_params(
1✔
537
            request_set={
538
                'default_variable': default_variable,
539
                'objective_function': objective_function,
540
                'aggregation_function': aggregation_function,
541
                RANDOMIZATION_DIMENSION : randomization_dimension,
542
                'search_function': search_function,
543
                'search_termination_function': search_termination_function,
544
                'search_space': search_space,
545
            }
546
        )
547

548
        if default_variable is not None:
1✔
549
            self._update_default_variable(default_variable, context)
1✔
550
        if objective_function is not None:
1!
551
            self.parameters.objective_function._set(objective_function, context)
1✔
552
            if OBJECTIVE_FUNCTION in self._unspecified_args:
1✔
553
                del self._unspecified_args[self._unspecified_args.index(OBJECTIVE_FUNCTION)]
1✔
554
        if aggregation_function is not None:
1!
555
            self.parameters.aggregation_function._set(aggregation_function, context)
×
556
            if AGGREGATION_FUNCTION in self._unspecified_args:
×
557
                del self._unspecified_args[self._unspecified_args.index(AGGREGATION_FUNCTION)]
×
558
        if search_function is not None:
1!
559
            self.parameters.search_function._set(search_function, context)
×
560
            if SEARCH_FUNCTION in self._unspecified_args:
×
561
                del self._unspecified_args[self._unspecified_args.index(SEARCH_FUNCTION)]
×
562
        if search_termination_function is not None:
1!
563
            self.parameters.search_termination_function._set(search_termination_function, context)
×
564
            if SEARCH_TERMINATION_FUNCTION in self._unspecified_args:
×
565
                del self._unspecified_args[self._unspecified_args.index(SEARCH_TERMINATION_FUNCTION)]
×
566
        if search_space is not None:
1!
567
            self.parameters.search_space._set(search_space, context)
1✔
568
            if SEARCH_SPACE in self._unspecified_args:
1!
569
                del self._unspecified_args[self._unspecified_args.index(SEARCH_SPACE)]
×
570
        if randomization_dimension is not None:
1✔
571
            self.parameters.randomization_dimension._set(randomization_dimension, context)
1✔
572

573
    def _function(self,
1✔
574
                 variable=None,
575
                 context=None,
576
                 params=None,
577
                 **kwargs):
578
        """Find the sample that yields the optimal value of `objective_function
579
        <OptimizationFunction.objective_function>`.
580

581
        See `optimization process <OptimizationFunction_Procedure>` for details.
582

583
        Returns
584
        -------
585

586
        optimal sample, optimal value, saved_samples, saved_values : array, array, list, list
587
            first array contains sample that yields the optimal value of the `optimization process
588
            <OptimizationFunction_Procedure>`, and second array contains the value of `objective_function
589
            <OptimizationFunction.objective_function>` for that sample.  If `save_samples
590
            <OptimizationFunction.save_samples>` is `True`, first list contains all the values sampled in the order
591
            they were evaluated; otherwise it is empty.  If `save_values <OptimizationFunction.save_values>` is `True`,
592
            second list contains the values returned by `objective_function <OptimizationFunction.objective_function>`
593
            for all the samples in the order they were evaluated; otherwise it is empty.
594
        """
595

596
        raise NotImplementedError("OptimizationFunction._function is not implemented and "
597
                                  "should be overridden by subclasses.")
598

599
    def _evaluate(self, variable=None, context=None, params=None, fit_evaluate=False):
1✔
600
        """
601
        Evaluate all the sample in a `search_space <OptimizationFunction.search_space>` with the agent_rep. The
602
        evaluation is done either serially (_sequential_evaluate) or in parallel (_grid_evaluate). This method should
603
        be invoked by subclasses in their `_function` method to evaluate the samples before searching for the optimal
604
        value.
605

606
        Returns
607
        -------
608

609
        optimal sample, optimal value, saved_samples, saved_values : array, array, list, list
610
            first array contains sample that yields the optimal value of the `optimization process
611
            <OptimizationFunction_Procedure>`, and second array contains the value of `objective_function
612
            <OptimizationFunction.objective_function>` for that sample.  If `save_samples
613
            <OptimizationFunction.save_samples>` is `True`, first list contains all the values sampled in the order
614
            they were evaluated; otherwise it is empty.  If `save_values <OptimizationFunction.save_values>` is `True`,
615
            second list contains the values returned by `objective_function <OptimizationFunction.objective_function>`
616
            for all the samples in the order they were evaluated; otherwise it is empty.
617

618
        """
619

620
        if self._unspecified_args and self.initialization_status == ContextFlags.INITIALIZED:
1✔
621
            warnings.warn("The following arg(s) were not specified for {}: {} -- using default(s)".
1✔
622
                          format(self.name, ', '.join(self._unspecified_args)))
623
            assert all([not getattr(self.parameters, x)._user_specified for x in self._unspecified_args])
1✔
624
            self._unspecified_args = []
1✔
625

626
        # EVALUATE ALL SAMPLES IN SEARCH SPACE
627
        # Evaluate all estimates of all samples in search_space
628

629
        # Run compiled mode if requested by parameter and everything is initialized
630
        if self.owner and self.owner.parameters.comp_execution_mode._get(context) != 'Python' and \
1✔
631
          ContextFlags.PROCESSING in context.flags:
632
            all_samples = [s for s in itertools.product(*self.search_space)]
1✔
633
            all_values, num_evals = self._grid_evaluate(self.owner, context, fit_evaluate)
1✔
634
            assert len(all_values) == num_evals
1✔
635
            assert len(all_samples) == num_evals
1✔
636

637
            if fit_evaluate:
1✔
638
                all_values = np.ctypeslib.as_array(all_values)
1✔
639
                # print("OLD DTYPE:", all_values.dtype)
640
                # print("OLD SHAPE:", all_values.shape)
641

642
                def _get_builtin_dtype(dtype):
1✔
643
    #                print("CHECKING:", dtype, "FIELDS:", dtype.names, "SUBDTYPE:", dtype.subdtype, "BASE:", dtype.base)
644
                    if dtype.isbuiltin:
1!
645
                        return dtype
×
646

647
                    if dtype.subdtype is not None:
1✔
648
                        return dtype.base
1✔
649

650
                    subdtypes = (v[0] for v in dtype.fields.values())
1✔
651
                    first_builtin = _get_builtin_dtype(next(subdtypes))
1✔
652
                    assert all(_get_builtin_dtype(sdt) is first_builtin for sdt in subdtypes)
1✔
653
                    return first_builtin
1✔
654

655
                dtype = _get_builtin_dtype(all_values.dtype)
1✔
656
                # Ignore the shape of the output structure
657
                all_values = all_values.view(dtype=dtype).reshape((*all_values.shape[0:2], -1))
1✔
658
                # print("NEW DTYPE:", all_values.dtype)
659
                # print("NEW SHAPE:", all_values.shape)
660

661
                # Re-arrange dimensions to match Python
662
                all_values = np.transpose(all_values, (1, 2, 0))
1✔
663

664
            last_sample = last_value = None
1✔
665
        # Otherwise, default sequential sampling
666
        else:
667
            # Get initial sample in case it is needed by _search_space_evaluate (e.g., for gradient initialization)
668
            initial_sample = self._check_args(variable=variable, context=context, params=params)
1✔
669
            try:
1✔
670
                initial_value = self.owner.objective_mechanism.parameters.value._get(context)
1✔
671
            except AttributeError:
1✔
672
                initial_value = 0
1✔
673

674
            last_sample, last_value, all_samples, all_values = self._sequential_evaluate(initial_sample,
1✔
675
                                                                                         initial_value,
676
                                                                                         context)
677

678
        # If  aggregation_function is specified and there is a randomization dimension specified
679
        # in the control signals; use the aggregation function aggregate over the samples generated
680
        # for different randomized values of the control signal
681
        if self.aggregation_function and \
1✔
682
                self.parameters.randomization_dimension._get(context) and \
683
                self.parameters.num_estimates._get(context) is not None:
684

685
            # FIXME: This is easy to support in hybrid mode. We just need to convert ctype results
686
            #        returned from _grid_evaluate to numpy
687
            assert not self.owner or self.owner.parameters.comp_execution_mode._get(context) == 'Python', \
1✔
688
                   "Aggregation function not supported in compiled mode!"
689

690
            # Reshape all the values we encountered to group those that correspond to the same parameter values
691
            # can be aggregated. After this we should have an array that is of shape
692
            # (number of parameter combinations (excluding randomization), num_estimates, number of output values)
693
            num_estimates = int(self.parameters.num_estimates._get(context))
1✔
694
            num_param_combs = all_values.shape[1] // num_estimates
1✔
695
            num_outputs = all_values.shape[0]
1✔
696
            all_values = np.reshape(all_values.transpose(), (num_param_combs, num_estimates, num_outputs))
1✔
697

698
            # Since we are aggregating over the randomized value of the control allocation, we also need to drop the
699
            # randomized dimension from the samples. That is, we don't want to return num_estimates samples for each
700
            # control allocation. This line below just grabs the first one (seed == 1) for each control allocation.
701
            all_samples = all_samples[:, all_samples[self.randomization_dimension, :] == all_samples[self.randomization_dimension, 0]]
1✔
702

703
            # If num_estimates is not None, then one of the control signals is modulating the random seed. We will
704
            # aggregate over this dimension.
705
            aggregated_values = np.atleast_2d(self.aggregation_function(all_values))
1✔
706

707
            # Transpose the aggregated values matrix so it is (num_outputs, num_param_combs), this matches all_samples then
708
            returned_values = np.transpose(aggregated_values)
1✔
709

710
        else:
711
            returned_values = all_values
1✔
712

713
        # Return list of unique samples and aggregated values over them
714
        return last_sample, last_value, all_samples, returned_values
1✔
715

716
    def _sequential_evaluate(self, initial_sample, initial_value, context):
1✔
717
        """Sequentially evaluate every sample in search_space.
718
        Return arrays with all samples evaluated, and array with all values of those samples.
719
        """
720

721
        # Initialize variables used in while loop
722
        iteration = 0
1✔
723
        current_sample = initial_sample
1✔
724
        current_value = initial_value
1✔
725
        all_samples = []
1✔
726
        all_values = []
1✔
727

728
        # Set up progress bar
729
        _show_progress = False
1✔
730
        if hasattr(self, OWNER) and self.owner and self.owner.prefs.reportOutputPref is SIMULATION_PROGRESS:
1!
731
            _show_progress = True
×
732
            _progress_bar_char = '.'
×
733
            _progress_bar_rate_str = ""
×
734
            _search_space_size = len(self.search_space)
×
735
            _progress_bar_rate = int(10**(np.log10(_search_space_size) - 2))
×
736
            if _progress_bar_rate > 1:
×
737
                _progress_bar_rate_str = str(_progress_bar_rate) + " "
×
738
            print("\n{} executing optimization process (one {} for each {}of {} samples): ".
×
739
                  format(self.owner.name, repr(_progress_bar_char), _progress_bar_rate_str, _search_space_size))
740
            _progress_bar_count = 0
×
741

742
        # Iterate over samples until search_termination_function returns True
743
        evaluated_samples = []
1✔
744
        estimated_values = []
1✔
745
        while not call_with_pruned_args(self.search_termination_function,
1✔
746
                                        current_sample,
747
                                        current_value, iteration,
748
                                        context=context):
749
            if _show_progress:
1!
750
                increment_progress_bar = (_progress_bar_rate < 1) or not (_progress_bar_count % _progress_bar_rate)
×
751
                if increment_progress_bar:
×
752
                    print(_progress_bar_char, end='', flush=True)
×
753
                _progress_bar_count +=1
×
754

755
            # Get next sample
756
            current_sample = call_with_pruned_args(self.search_function, current_sample, iteration, context=context)
1✔
757
            # Get value of sample
758
            current_value = call_with_pruned_args(self.objective_function, current_sample, context=context)
1✔
759

760
            # If the value returned by the objective function is a tuple, then we are data fitting and the
761
            # evaluate_agent_rep function is returning the net_outcome, results tuple. We want the results
762
            # in this case.
763
            if type(current_value) is tuple:
1!
764
                current_value = np.squeeze(np.array(current_value[1]))
×
765

766
            # Convert the sample and values to numpy arrays even if they are scalars
767
            current_sample = np.atleast_1d(current_sample)
1✔
768
            current_value = np.atleast_1d(current_value)
1✔
769

770
            evaluated_samples.append(current_sample)
1✔
771
            estimated_values.append(current_value)
1✔
772

773
            # self._report_value(current_value)
774
            iteration += 1
1✔
775
            max_iterations = self.parameters.max_iterations._get(context)
1✔
776
            if max_iterations and iteration > max_iterations:
1✔
777
                warnings.warn(f"{self.name} of {self.owner.name} exceeded max iterations {max_iterations}.")
1✔
778
                break
1✔
779

780
            # Change randomization for next sample if specified (relies on randomization being last dimension)
781
            if self.owner and self.owner.parameters.same_seed_for_all_allocations is False:
1!
782
                self.search_space[self.parameters.randomization_dimension._get(context)].start += 1
×
783
                self.search_space[self.parameters.randomization_dimension._get(context)].stop += 1
×
784

785
        if self.parameters.save_samples._get(context):
1!
786
            self.parameters.saved_samples._set(all_samples, context)
×
787
        if self.parameters.save_values._get(context):
1✔
788
            self.parameters.saved_values._set(all_values, context)
1✔
789

790
        # Convert evaluated_samples and estimated_values to numpy arrays, stack along the last dimension
791
        estimated_values = np.stack(estimated_values, axis=-1)
1✔
792
        evaluated_samples = np.stack(evaluated_samples, axis=-1)
1✔
793

794
        # FIX: 11/3/21: ??MODIFY TO RETURN SAME AS _grid_evaluate
795
        # return current_sample, current_value, evaluated_samples, estimated_values
796
        return current_sample, current_value, evaluated_samples, estimated_values
1✔
797

798
    def _grid_evaluate(self, ocm, context, get_results:bool):
1✔
799
        """Helper method for evaluation of a grid of samples from search space via LLVM backends."""
800
        # If execution mode is not Python, the search space has to be static
801
        def _is_static(it:SampleIterator):
1✔
802
            if isinstance(it.start, Number) and isinstance(it.stop, Number):
1✔
803
                return True
1✔
804

805
            if isinstance(it.generator, list):
1!
806
                return True
1✔
807

808
            return False
×
809

810
        assert all(_is_static(sample_iterator) for sample_iterator in self.search_space)
1✔
811

812
        assert ocm is ocm.agent_rep.controller
1✔
813

814
        # Compiled evaluate expects the same variable as composition
815
        state_features = ocm.parameters.state_feature_values._get(context)
1✔
816
        inputs, num_inputs_sets = ocm.agent_rep._parse_run_inputs(state_features, context)
1✔
817

818
        num_evals = np.prod([d.num for d in self.search_space])
1✔
819

820
        # Map allocations to values
821
        comp_exec = pnlvm.execution.CompExecution(ocm.agent_rep, [context.execution_id])
1✔
822
        execution_mode = ocm.parameters.comp_execution_mode._get(context)
1✔
823
        if execution_mode == "PTX":
1!
824
            outcomes = comp_exec.cuda_evaluate(inputs, num_inputs_sets, num_evals, get_results)
×
825
        elif execution_mode == "LLVM":
1✔
826
            outcomes = comp_exec.thread_evaluate(inputs, num_inputs_sets, num_evals, get_results)
1✔
827
        else:
828
            assert False, f"Unknown execution mode for {ocm.name}: {execution_mode}."
829

830
        return outcomes, num_evals
1✔
831

832
    def reset_grid(self):
1✔
833
        """Reset iterators in `search_space <GridSearch.search_space>"""
834
        for s in self.search_space:
1✔
835
            s.reset()
1✔
836
        self.grid = itertools.product(*[s for s in self.search_space])
1✔
837

838
    def _traverse_grid(self, variable, sample_num, context=None):
1✔
839
        """Get next sample from grid.
840
        This is assigned as the `search_function <OptimizationFunction.search_function>` of the `OptimizationFunction`.
841
        """
842
        if self.is_initializing:
1✔
843
            return [signal.start for signal in self.search_space]
1✔
844
        try:
1✔
845
            sample = next(self.grid)
1✔
846
        except StopIteration:
×
847
            raise OptimizationFunctionError("Expired grid in {} run from {} "
848
                                            "(execution_count: {}; num_iterations: {})".
849
                format(self.__class__.__name__, self.owner.name,
850
                       self.owner.parameters.execution_count.get(), self.num_iterations))
851
        return sample
1✔
852

853
    def _grid_complete(self, variable, value, iteration, context=None):
1✔
854
        """Return False when search of grid is complete
855
        This is assigned as the `search_termination_function <OptimizationFunction.search_termination_function>`
856
        of the `OptimizationFunction`.
857
        """
858
        try:
1✔
859
            return iteration == self.num_iterations
1✔
860
        except AttributeError:
×
861
            return True
×
862

863
    def _report_value(self, new_value):
1✔
864
        """Report value returned by `objective_function <OptimizationFunction.objective_function>` for sample."""
865
        pass
×
866

867
    @property
1✔
868
    def num_estimates(self):
1✔
869
        if self.randomization_dimension is None:
1!
870
            return 1
×
871
        else:
872
            return self.search_space[self.randomization_dimension].num
1✔
873

874

875
ASCENT = 'ascent'
1✔
876
DESCENT = 'descent'
1✔
877

878

879
class GradientOptimization(OptimizationFunction):
1✔
880
    """
881
    GradientOptimization(            \
882
        default_variable=None,       \
883
        objective_function=None,     \
884
        gradient_function=None,      \
885
        direction=ASCENT,            \
886
        search_space=None,           \
887
        step_size=1.0,               \
888
        annealing_function=None,     \
889
        convergence_criterion=VALUE, \
890
        convergence_threshold=.001,  \
891
        max_iterations=1000,         \
892
        save_samples=False,          \
893
        save_values=False,           \
894
        params=None,                 \
895
        owner=None,                  \
896
        prefs=None                   \
897
        )
898

899
    Sample variable by following gradient with respect to the value of `objective_function
900
    <GradientOptimization.objective_function>` it generates, and return the sample that generates either the
901
    highest (**direction=*ASCENT*) or lowest (**direction=*DESCENT*) value.
902

903
    .. _GradientOptimization_Procedure:
904

905
    **Optimization Procedure**
906

907
    When `function <GradientOptimization.function>` is executed, it iterates over the folowing steps:
908

909
        - `compute gradient <GradientOptimization_Gradient_Calculation>` using the `gradient_function
910
          <GradientOptimization.gradient_function>`;
911
        ..
912
        - adjust `variable <GradientOptimization.variable>` based on the gradient, in the specified
913
          `direction <GradientOptimization.direction>` and by an amount specified by `step_size
914
          <GradientOptimization.step_size>` and possibly `annealing_function
915
          <GradientOptimization.annealing_function>`;
916
        ..
917
        - compute value of `objective_function <GradientOptimization.objective_function>` using the adjusted value of
918
          `variable <GradientOptimization.variable>`;
919
        ..
920
        - adjust `step_size <GradientOptimization.udpate_rate>` using `annealing_function
921
          <GradientOptimization.annealing_function>`, if specified, for use in the next iteration;
922
        ..
923
        - evaluate `convergence_criterion <GradientOptimization.convergence_criterion>` and test whether it is below
924
          the `convergence_threshold <GradientOptimization.convergence_threshold>`.
925

926
    The current iteration is contained in `iteration <GradientOptimization.iteration>`. Iteration continues until
927
    `convergence_criterion <GradientOptimization.convergence_criterion>` falls below `convergence_threshold
928
    <GradientOptimization.convergence_threshold>` or the number of iterations exceeds `max_iterations
929
    <GradientOptimization.max_iterations>`.  The `function <GradientOptimization.function>` returns the last sample
930
    evaluated by `objective_function <GradientOptimization.objective_function>` (presumed to be the optimal one),
931
    the value of the function, as well as lists that may contain all of the samples evaluated and their values,
932
    depending on whether `save_samples <OptimizationFunction.save_samples>` and/or `save_vales
933
    <OptimizationFunction.save_values>` are `True`, respectively.
934

935
    .. _GradientOptimization_Gradient_Calculation:
936

937
    **Gradient Calculation**
938

939
    The gradient is evaluated by `gradient_function <GradientOptimization.gradient_function>`,
940
    which should be the derivative of the `objective_function <GradientOptimization.objective_function>`
941
    with respect to `variable <GradientOptimization.variable>` at its current value:
942
    :math:`\\frac{d(objective\\_function(variable))}{d(variable)}`.  If the **gradient_function* argument of the
943
    constructor is not specified, then an attempt is made to use `Autograd's <https://github.com/HIPS/autograd>`_ `grad
944
    <autograd.grad>` method to generate `gradient_function <GradientOptimization.gradient_function>`.  If that fails,
945
    an error occurs.  The **search_space** argument can be used to specify lower and/or upper bounds for each dimension
946
    of the sample; if the gradient causes a value of the sample to exceed a bound along a dimenson, the value of the
947
    bound is used for that dimension, unless/until the gradient shifts and causes it to return back within the bound.
948

949
    Arguments
950
    ---------
951

952
    default_variable : list or ndarray : default None
953
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
954
        `objective_function <GradientOptimization.objective_function>`.
955

956
    objective_function : function or method
957
        specifies function used to evaluate `variable <GradientOptimization.variable>`
958
        in each iteration of the `optimization process  <GradientOptimization_Procedure>`;
959
        it must be specified and it must return a scalar value.
960

961
    gradient_function : function
962
        specifies function used to compute the gradient in each iteration of the `optimization process
963
        <GradientOptimization_Procedure>`;  if it is not specified, an attempt is made to compute it using
964
        `autograd.grad <https://github.com/HIPS/autograd>`_.
965

966
    direction : ASCENT or DESCENT : default ASCENT
967
        specifies the direction of gradient optimization: if *ASCENT*, movement is attempted in the positive direction
968
        (i.e., "up" the gradient);  if *DESCENT*, movement is attempted in the negative direction (i.e. "down"
969
        the gradient).
970

971
    step_size : int or float : default 1.0
972
        specifies the rate at which the `variable <GradientOptimization.variable>` is updated in each
973
        iteration of the `optimization process <GradientOptimization_Procedure>`;  if `annealing_function
974
        <GradientOptimization.annealing_function>` is specified, **step_size** specifies the intial value of
975
        `step_size <GradientOptimization.step_size>`.
976

977
    search_space : list or array : default None
978
        specifies bounds of the samples used to evaluate `objective_function <GaussianProcess.objective_function>`
979
        along each dimension of `variable <GaussianProcess.variable>`;  each item must be a list or tuple,
980
        or a `SampleIterator` that resolves to one.  If the item has two elements, they are used as the lower and
981
        upper bounds respectively, and the lower must be less than the upper;  None can be used in either place,
982
        in which case that bound is ignored.  If an item has more than two elements, the min is used as the lower
983
        bound and the max is used as the upper bound; none of the elements can be None.
984

985
    annealing_function : function or method : default None
986
        specifies function used to adapt `step_size <GradientOptimization.step_size>` in each
987
        iteration of the `optimization process <GradientOptimization_Procedure>`;  must take accept two parameters —
988
        `step_size <GradientOptimization.step_size>` and `iteration <GradientOptimization_Procedure>`, in that
989
        order — and return a scalar value, that is used for the next iteration of optimization.
990

991
    convergence_criterion : *VARIABLE* or *VALUE* : default *VALUE*
992
        specifies the parameter used to terminate the `optimization process <GradientOptimization_Procedure>`.
993
        *VARIABLE*: process terminates when the most recent sample differs from the previous one by less than
994
        `convergence_threshold <GradientOptimization.convergence_threshold>`;  *VALUE*: process terminates when the
995
        last value returned by `objective_function <GradientOptimization.objective_function>` differs from the
996
        previous one by less than `convergence_threshold <GradientOptimization.convergence_threshold>`.
997

998
    convergence_threshold : int or float : default 0.001
999
        specifies the change in value of `convergence_criterion` below which the optimization process is terminated.
1000

1001
    max_iterations : int : default 1000
1002
        specifies the maximum number of times the `optimization process<GradientOptimization_Procedure>` is allowed to
1003
        iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
1004

1005
    save_samples : bool
1006
        specifies whether or not to save and return all of the samples used to evaluate `objective_function
1007
        <GradientOptimization.objective_function>` in the `optimization process<GradientOptimization_Procedure>`.
1008

1009
    save_values : bool
1010
        specifies whether or not to save and return the values of `objective_function
1011
        <GradientOptimization.objective_function>` for all samples evaluated in the `optimization
1012
        process<GradientOptimization_Procedure>`
1013

1014
    Attributes
1015
    ----------
1016

1017
    variable : ndarray
1018
        sample used as the starting point for the `optimization process <GradientOptimization_Procedure>` (i.e., one
1019
        used to evaluate `objective_function <GradientOptimization.objective_function>` in the first iteration).
1020

1021
    objective_function : function or method
1022
        function used to evaluate `variable <GradientOptimization.variable>`
1023
        in each iteration of the `optimization process <GradientOptimization_Procedure>`;
1024
        it must be specified and it must return a scalar value.
1025

1026
    gradient_function : function
1027
        function used to compute the gradient in each iteration of the `optimization process
1028
        <GradientOptimization_Procedure>` (see `Gradient Calculation <GradientOptimization_Gradient_Calculation>` for
1029
        details).
1030

1031
    direction : ASCENT or DESCENT
1032
        direction of gradient optimization:  if *ASCENT*, movement is attempted in the positive direction
1033
        (i.e., "up" the gradient);  if *DESCENT*, movement is attempted in the negative direction (i.e. "down"
1034
        the gradient).
1035

1036
    step_size : int or float
1037
        determines the rate at which the `variable <GradientOptimization.variable>` is updated in each
1038
        iteration of the `optimization process <GradientOptimization_Procedure>`;  if `annealing_function
1039
        <GradientOptimization.annealing_function>` is specified, `step_size <GradientOptimization.step_size>`
1040
        determines the initial value.
1041

1042
    search_space : list or array
1043
        contains tuples specifying bounds within which each dimension of `variable <GaussianProcess.variable>` is
1044
        sampled, and used to evaluate `objective_function <GaussianProcess.objective_function>` in iterations of the
1045
        `optimization process <GaussianProcess_Procedure>`.
1046

1047
    bounds : tuple
1048
        contains two 2d arrays; the 1st contains the lower bounds for each dimension of the sample (`variable
1049
        <GradientOptimization.variable>`), and the 2nd the upper bound of each.
1050

1051
    annealing_function : function or method
1052
        function used to adapt `step_size <GradientOptimization.step_size>` in each iteration of the `optimization
1053
        process <GradientOptimization_Procedure>`;  if `None`, no call is made and the same `step_size
1054
        <GradientOptimization.step_size>` is used in each iteration.
1055

1056
    iteration : int
1057
        the currention iteration of the `optimization process <GradientOptimization_Procedure>`.
1058

1059
    convergence_criterion : VARIABLE or VALUE
1060
        determines parameter used to terminate the `optimization process<GradientOptimization_Procedure>`.
1061
        *VARIABLE*: process terminates when the most recent sample differs from the previous one by less than
1062
        `convergence_threshold <GradientOptimization.convergence_threshold>`;  *VALUE*: process terminates when the
1063
        last value returned by `objective_function <GradientOptimization.objective_function>` differs from the
1064
        previous one by less than `convergence_threshold <GradientOptimization.convergence_threshold>`.
1065

1066
    convergence_threshold : int or float
1067
        determines the change in value of `convergence_criterion` below which the `optimization process
1068
        <GradientOptimization_Procedure>` is terminated.
1069

1070
    max_iterations : int
1071
        determines the maximum number of times the `optimization process<GradientOptimization_Procedure>` is allowed to
1072
        iterate; if exceeded, a warning is issued and the function returns the last sample evaluated.
1073

1074
    save_samples : bool
1075
        determines whether or not to save and return all of the samples used to evaluate `objective_function
1076
        <GradientOptimization.objective_function>` in the `optimization process<GradientOptimization_Procedure>`.
1077

1078
    save_values : bool
1079
        determines whether or not to save and return the values of `objective_function
1080
        <GradientOptimization.objective_function>` for all samples evaluated in the `optimization
1081
        process<GradientOptimization_Procedure>`
1082
    """
1083

1084
    componentName = GRADIENT_OPTIMIZATION_FUNCTION
1✔
1085
    bounds = None
1✔
1086

1087
    class Parameters(OptimizationFunction.Parameters):
1✔
1088
        """
1089
            Attributes
1090
            ----------
1091

1092
                variable
1093
                    see `variable <GradientOptimization.variable>`
1094

1095
                    :default value: [[0], [0]]
1096
                    :type: ``list``
1097
                    :read only: True
1098

1099
                annealing_function
1100
                    see `annealing_function <GradientOptimization.annealing_function>`
1101

1102
                    :default value: None
1103
                    :type:
1104

1105
                convergence_criterion
1106
                    see `convergence_criterion <GradientOptimization.convergence_criterion>`
1107

1108
                    :default value: `VALUE`
1109
                    :type: ``str``
1110

1111
                convergence_threshold
1112
                    see `convergence_threshold <GradientOptimization.convergence_threshold>`
1113

1114
                    :default value: 0.001
1115
                    :type: ``float``
1116

1117
                direction
1118
                    see `direction <GradientOptimization.direction>`
1119

1120
                    :default value: `ASCENT`
1121
                    :type: ``str``
1122

1123
                gradient_function
1124
                    see `gradient_function <GradientOptimization.gradient_function>`
1125

1126
                    :default value: None
1127
                    :type:
1128

1129
                max_iterations
1130
                    see `max_iterations <GradientOptimization.max_iterations>`
1131

1132
                    :default value: 1000
1133
                    :type: ``int``
1134

1135
                previous_value
1136
                    see `previous_value <GradientOptimization.previous_value>`
1137

1138
                    :default value: [[0], [0]]
1139
                    :type: ``list``
1140
                    :read only: True
1141

1142
                previous_variable
1143
                    see `previous_variable <GradientOptimization.previous_variable>`
1144

1145
                    :default value: [[0], [0]]
1146
                    :type: ``list``
1147
                    :read only: True
1148

1149
                step_size
1150
                    see `step_size <GradientOptimization.step_size>`
1151

1152
                    :default value: 1.0
1153
                    :type: ``float``
1154
        """
1155
        variable = Parameter([[0], [0]], read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
1156

1157
        # these should be removed and use switched to .get_previous()
1158
        previous_variable = Parameter([[0], [0]], read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
1159
        previous_value = Parameter([[0], [0]], read_only=True, initializer='initializer')
1✔
1160

1161
        gradient_function = Parameter(None, stateful=False, loggable=False)
1✔
1162
        step_size = Parameter(1.0, modulable=True)
1✔
1163
        annealing_function = Parameter(None, stateful=False, loggable=False)
1✔
1164
        convergence_threshold = Parameter(.001, modulable=True)
1✔
1165
        max_iterations = Parameter(1000, modulable=True)
1✔
1166
        search_space = Parameter([SampleIterator([0, 0])], stateful=False, loggable=False)
1✔
1167

1168
        direction = ASCENT
1✔
1169
        convergence_criterion = Parameter(VALUE, pnl_internal=True)
1✔
1170

1171
        def _parse_direction(self, direction):
1✔
1172
            if direction == ASCENT:
1✔
1173
                return 1
1✔
1174
            else:
1175
                return -1
1✔
1176

1177
    @check_user_specified
1✔
1178
    @tc.typecheck
1✔
1179
    def __init__(self,
1✔
1180
                 default_variable=None,
1181
                 objective_function:tc.optional(is_function_type)=None,
1182
                 gradient_function:tc.optional(is_function_type)=None,
1183
                 direction:tc.optional(tc.enum(ASCENT, DESCENT))=None,
1184
                 search_space=None,
1185
                 step_size:tc.optional(tc.any(int, float))=None,
1186
                 annealing_function:tc.optional(is_function_type)=None,
1187
                 convergence_criterion:tc.optional(tc.enum(VARIABLE, VALUE))=None,
1188
                 convergence_threshold:tc.optional(tc.any(int, float))=None,
1189
                 max_iterations:tc.optional(int)=None,
1190
                 save_samples:tc.optional(bool)=None,
1191
                 save_values:tc.optional(bool)=None,
1192
                 params=None,
1193
                 owner=None,
1194
                 prefs=None):
1195

1196
        search_function = self._follow_gradient
1✔
1197
        search_termination_function = self._convergence_condition
1✔
1198

1199
        super().__init__(
1✔
1200
            default_variable=default_variable,
1201
            objective_function=objective_function,
1202
            search_function=search_function,
1203
            search_space=search_space,
1204
            search_termination_function=search_termination_function,
1205
            max_iterations=max_iterations,
1206
            save_samples=save_samples,
1207
            save_values=save_values,
1208
            step_size=step_size,
1209
            convergence_criterion=convergence_criterion,
1210
            convergence_threshold=convergence_threshold,
1211
            gradient_function=gradient_function,
1212
            annealing_function=annealing_function,
1213
            params=params,
1214
            owner=owner,
1215
            prefs=prefs,
1216
        )
1217

1218
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
1219

1220
        super()._validate_params(request_set=request_set, target_set=target_set, context=context)
1✔
1221

1222
        if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
1✔
1223
            search_space = request_set[SEARCH_SPACE]
1✔
1224
            if all(s is None for s in search_space):
1✔
1225
                return
1✔
1226
            # If search space is a single 2-item list or tuple with numbers (i.e., bounds),
1227
            #     wrap in list for handling below
1228
            if len(search_space)==2 and all(isinstance(i, Number) for i in search_space):
1!
1229
                search_space = [search_space]
×
1230
            for s in search_space:
1✔
1231
                if isinstance(s, SampleIterator):
1!
1232
                    s = s()
1✔
1233
                if len(s) != 2:
1!
1234
                    owner_str = ''
×
1235
                    if self.owner:
×
1236
                        owner_str = f' of {self.owner.name}'
×
1237
                    raise OptimizationFunctionError(f"All items in {repr(SEARCH_SPACE)} arg for {self.name}{owner_str} "
1238
                                                    f"must be or resolve to a 2-item list or tuple; this doesn't: {s}.")
1239

1240
    @handle_external_context(fallback_most_recent=True)
1✔
1241
    def reset(self, default_variable=None, objective_function=None, context=None, **kwargs):
1✔
1242
        super().reset(
1✔
1243
            objective_function=objective_function,
1244
            context=context,
1245
            **kwargs
1246
        )
1247

1248
        # Differentiate objective_function using autograd.grad()
1249
        if objective_function is not None and not self.gradient_function:
1!
1250
            try:
1✔
1251
                from autograd import grad
1✔
1252
                self.parameters.gradient_function._set(grad(self.objective_function), context)
1✔
1253
            except:
×
1254
                raise OptimizationFunctionError("Unable to use autograd with {} specified for {} Function: {}.".
1255
                                                format(repr(OBJECTIVE_FUNCTION), self.__class__.__name__,
1256
                                                       objective_function.__name__))
1257
        search_space = self.search_space
1✔
1258
        bounds = None
1✔
1259

1260
        if self.owner:
1!
1261
            owner_str = ' of {self.owner.name}'
1✔
1262
        else:
1263
            owner_str = ''
×
1264

1265
        # Get bounds from search_space if it has any non-None entries
1266
        if any(i is not None for i in self.search_space):
1!
1267
            # Get min and max of each dimension of search space
1268
            #    and assign to corresponding elements of lower and upper items of bounds
1269
            lower = []
×
1270
            upper = []
×
1271
            bounds = (lower, upper)
×
1272
            for i in search_space:
×
1273
                if i is None:
×
1274
                    lower.append(None)
×
1275
                    upper.append(None)
×
1276
                else:
1277
                    if isinstance(i, SampleIterator):
×
1278
                        i = i()
×
1279
                    # Spec is bound (tuple or list with two values: lower and uppper)
1280
                    if len(i)==2:
×
1281
                        lower.append(i[0])
×
1282
                        upper.append(i[1])
×
1283
                    else:
1284
                        lower.append(min(i))
×
1285
                        upper.append(max(i))
×
1286

1287
        # Validate bounds and reformat into arrays for lower and upper bounds, for use in _follow_gradient
1288
        #     (each should be same length as sample), and replace any None's with + or - inf)
1289
        if bounds:
1!
1290
            if bounds[0] is None and bounds[1] is None:
×
1291
                bounds = None
×
1292
            else:
1293
                sample_len = len(default_variable)
×
1294
                lower = np.atleast_1d(bounds[0])
×
1295
                if len(lower)==1:
×
1296
                    # Single value specified for lower bound, so distribute over array with length = sample_len
1297
                    lower = np.full(sample_len, lower).reshape(sample_len,1)
×
1298
                elif len(lower)!=sample_len:
×
1299
                    raise OptimizationFunctionError(f"Array used for lower value of {repr(BOUNDS)} arg ({lower}) in "
1300
                                                    f"{self.name}{owner_str} must have the same number of elements "
1301
                                                    f"({sample_len}) as the sample over which optimization is being "
1302
                                                    f"performed.")
1303
                # Array specified for lower bound, so replace any None's with -inf
1304
                lower = np.array([[-float('inf')] if n[0] is None else n for n in lower.reshape(sample_len,1)])
×
1305

1306
                upper = np.atleast_1d(bounds[1])
×
1307
                if len(upper)==1:
×
1308
                    # Single value specified for upper bound, so distribute over array with length = sample_len
1309
                    upper = np.full(sample_len, upper).reshape(sample_len,1)
×
1310
                elif len(upper)!=sample_len:
×
1311
                    raise OptimizationFunctionError(f"Array used for upper value of {repr(BOUNDS)} arg ({upper}) in "
1312
                                                    f"{self.name}{owner_str} must have the same number of elements "
1313
                                                    f"({sample_len}) as the sample over which optimization is being "
1314
                                                    f"performed.")
1315
                # Array specified for upper bound, so replace any None's with +inf
1316
                upper = np.array([[float('inf')] if n[0] is None else n for n in upper.reshape(sample_len,1)])
×
1317

1318
                if not all(lower <= upper):
×
1319
                    raise OptimizationFunctionError(f"Specification of {repr(BOUNDS)} arg ({bounds}) for {self.name}"
1320
                                                    f"{owner_str} resulted in lower > corresponding upper for one or "
1321
                                                    f"more elements (lower: {lower.tolist()}; uuper: {upper.tolist()}).")
1322

1323
                bounds = (lower,upper)
×
1324

1325
        self.bounds = bounds
1✔
1326

1327
    def _function(self,
1✔
1328
                 variable=None,
1329
                 context=None,
1330
                 params=None,
1331
                 **kwargs):
1332
        """Return the sample that yields the optimal value of `objective_function
1333
        <GradientOptimization.objective_function>`, and possibly all samples evaluated and their corresponding values.
1334

1335
        Optimal value is defined by `direction <GradientOptimization.direction>`:
1336
        - if *ASCENT*, returns greatest value
1337
        - if *DESCENT*, returns least value
1338

1339
        Returns
1340
        -------
1341

1342
        optimal sample, optimal value, saved_samples, saved_values : ndarray, list, list
1343
            first array contains sample that yields the highest or lowest value of `objective_function
1344
            <GradientOptimization.objective_function>`, depending on `direction <GradientOptimization.direction>`,
1345
            and the second array contains the value of the function for that sample.
1346
            If `save_samples <GradientOptimization.save_samples>` is `True`, first list contains all the values
1347
            sampled in the order they were evaluated; otherwise it is empty.  If `save_values
1348
            <GradientOptimization.save_values>` is `True`, second list contains the values returned by
1349
            `objective_function <GradientOptimization.objective_function>` for all the samples in the order they were
1350
            evaluated; otherwise it is empty.
1351
        """
1352

1353
        optimal_sample, optimal_value, all_samples, all_values = super()._evaluate(variable=variable,
1✔
1354
                                                                                  context=context,
1355
                                                                                  params=params,
1356
                                                                                  )
1357
        return_all_samples = return_all_values = []
1✔
1358
        if self.parameters.save_samples._get(context):
1!
1359
            return_all_samples = all_samples
×
1360
        if self.parameters.save_values._get(context):
1!
1361
            return_all_values = all_values
×
1362
        # return last_variable
1363
        return optimal_sample, optimal_value, return_all_samples, return_all_values
1✔
1364

1365
    def _follow_gradient(self, sample, sample_num, context=None):
1✔
1366

1367
        if self.gradient_function is None:
1✔
1368
            return sample
1✔
1369

1370
        # Index from 1 rather than 0
1371
        # Update step_size
1372
        step_size = self.parameters.step_size._get(context)
1✔
1373
        if sample_num == 0:
1✔
1374
            # Start from initial value (sepcified by user in step_size arg)
1375
            step_size = self.parameters.step_size.default_value
1✔
1376
            self.parameters.step_size._set(step_size, context)
1✔
1377
        if self.annealing_function:
1!
1378
            step_size = call_with_pruned_args(self.annealing_function, step_size, sample_num, context=context)
×
1379
            self.parameters.step_size._set(step_size, context)
×
1380

1381
        # Compute gradients with respect to current sample
1382
        _gradients = call_with_pruned_args(self.gradient_function, sample, context=context)
1✔
1383

1384
        # Get new sample based on new gradients
1385
        new_sample = sample + self.parameters.direction._get(context) * step_size * np.array(_gradients)
1✔
1386

1387
        # Constrain new sample to be within bounds
1388
        if self.bounds:
1!
1389
            new_sample = np.array(np.maximum(self.bounds[0],
×
1390
                                             np.minimum(self.bounds[1], new_sample))).reshape(sample.shape)
1391

1392
        return new_sample
1✔
1393

1394
    def _convergence_condition(self, variable, value, iteration, context=None):
1✔
1395
        previous_variable = self.parameters.previous_variable._get(context)
1✔
1396
        previous_value = self.parameters.previous_value._get(context)
1✔
1397

1398
        if iteration == 0:
1✔
1399
            # self._convergence_metric = self.convergence_threshold + EPSILON
1400
            self.parameters.previous_variable._set(variable, context)
1✔
1401
            self.parameters.previous_value._set(value, context)
1✔
1402
            return False
1✔
1403

1404
        # Evaluate for convergence
1405
        if self.convergence_criterion == VALUE:
1!
1406
            convergence_metric = np.abs(value - previous_value)
1✔
1407
        else:
1408
            convergence_metric = np.max(np.abs(np.array(variable) -
×
1409
                                               np.array(previous_variable)))
1410

1411
        self.parameters.previous_variable._set(variable, context)
1✔
1412
        self.parameters.previous_value._set(value, context)
1✔
1413

1414
        return convergence_metric <= self.parameters.convergence_threshold._get(context)
1✔
1415

1416

1417
MAXIMIZE = 'maximize'
1✔
1418
MINIMIZE = 'minimize'
1✔
1419

1420

1421
class GridSearch(OptimizationFunction):
1✔
1422
    """
1423
    GridSearch(                      \
1424
        default_variable=None,       \
1425
        objective_function=None,     \
1426
        direction=MAXIMIZE,          \
1427
        max_iterations=1000,         \
1428
        save_samples=False,          \
1429
        save_values=False,           \
1430
        params=None,                 \
1431
        owner=None,                  \
1432
        prefs=None                   \
1433
        )
1434

1435
    Search over all samples generated by `search_space <GridSearch.search_space>` for the one that optimizes the
1436
    value of `objective_function <GridSearch.objective_function>`.
1437

1438
    .. _GridSearch_Procedure:
1439

1440
    **Grid Search Procedure**
1441

1442
    When `function <GridSearch.function>` is executed, it iterates over the following steps:
1443

1444
        - get next sample from `search_space <GridSearch.search_space>`;
1445
        ..
1446
        - compute value of `objective_function <GridSearch.objective_function>` for that sample;
1447

1448
    The current iteration is contained in `iteration <GridSearch.iteration>` and the total number comprising the
1449
    `search_space <GridSearch.search_space>2` is contained in `num_iterations <GridSearch.num_iterations>`).
1450
    Iteration continues until all values in `search_space <GridSearch.search_space>` have been evaluated (i.e.,
1451
    `num_iterations <GridSearch.num_iterations>` is reached), or `max_iterations <GridSearch.max_iterations>` is
1452
    exceeded.  The function returns the sample that yielded either the highest (if `direction <GridSearch.direction>`
1453
    is *MAXIMIZE*) or lowest (if `direction <GridSearch.direction>` is *MINIMIZE*) value of the `objective_function
1454
    <GridSearch.objective_function>`, along with the value for that sample, as well as lists containing all of the
1455
    samples evaluated and their values if either `save_samples <GridSearch.save_samples>` or `save_values
1456
    <GridSearch.save_values>` is `True`, respectively.
1457

1458
    Arguments
1459
    ---------
1460

1461
    default_variable : list or ndarray : default None
1462
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
1463
        `objective_function <GridSearch.objective_function>`.
1464

1465
    objective_function : function or method
1466
        specifies function used to evaluate sample in each iteration of the `optimization process <GridSearch_Procedure>`;
1467
        it must be specified and must return a scalar value.
1468

1469
    search_space : list or array of SampleIterators
1470
        specifies `SampleIterators <SampleIterator>` used to generate samples evaluated by `objective_function
1471
        <GridSearch.objective_function>`;  all of the iterators be finite (i.e., must have a `num <SampleIterator>`
1472
        attribute;  see `SampleSpec` for additional details).
1473

1474
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
1475
        specifies the direction of optimization:  if *MAXIMIZE*, the highest value of `objective_function
1476
        <GridSearch.objective_function>` is sought;  if *MINIMIZE*, the lowest value is sought.
1477

1478
    max_iterations : int : default 1000
1479
        specifies the maximum number of times the `optimization process<GridSearch_Procedure>` is allowed to iterate;
1480
        if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
1481

1482
    save_samples : bool
1483
        specifies whether or not to return all of the samples used to evaluate `objective_function
1484
        <GridSearch.objective_function>` in the `optimization process <GridSearch_Procedure>`
1485
        (i.e., a copy of the samples generated from the `search_space <GridSearch.search_space>`.
1486

1487
    save_values : bool
1488
        specifies whether or not to save and return the values of `objective_function <GridSearch.objective_function>`
1489
        for all samples evaluated in the `optimization process <GridSearch_Procedure>`.
1490

1491
    Attributes
1492
    ----------
1493

1494
    variable : ndarray
1495
        first sample evaluated by `objective_function <GridSearch.objective_function>` (i.e., one used to evaluate it
1496
        in the first iteration of the `optimization process <GridSearch_Procedure>`).
1497

1498
    objective_function : function or method
1499
        function used to evaluate sample in each iteration of the `optimization process <GridSearch_Procedure>`.
1500

1501
    search_space : list or array of Sampleiterators
1502
        contains `SampleIterators <SampleIterator>` for generating samples evaluated by `objective_function
1503
        <GridSearch.objective_function>` in iterations of the `optimization process <GridSearch_Procedure>`;
1504

1505
    grid : iterator
1506
        generates samples from the Cartesian product of `SampleIterators in `search_space <GridSearch.search_sapce>`.
1507

1508
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
1509
        determines the direction of optimization:  if *MAXIMIZE*, the greatest value of `objective_function
1510
        <GridSearch.objective_function>` is sought;  if *MINIMIZE*, the least value is sought.
1511

1512
    iteration : int
1513
        the currention iteration of the `optimization process <GridSearch_Procedure>`.
1514

1515
    num_iterations : int
1516
        number of iterations required to complete the entire grid search;  equal to the produce of all the `num
1517
        <SampleIterator.num>` attributes of the `SampleIterators <SampleIterator>` in the `search_space
1518
        <GridSearch.search_space>`.
1519

1520
    max_iterations : int
1521
        determines the maximum number of times the `optimization process<GridSearch_Procedure>` is allowed to iterate;
1522
        if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
1523

1524
    save_samples : True
1525
        determines whether or not to save and return all samples generated from `search_space <GridSearch.search_space>`
1526
        and evaluated by the  `objective_function <GridSearch.objective_function>` in the `optimization process
1527
        <GridSearch_Procedure>`.
1528

1529
    save_values : bool
1530
        determines whether or not to save and return the value of `objective_function
1531
        <GridSearch.objective_function>` for all samples evaluated in the `optimization process <GridSearch_Procedure>`.
1532
    """
1533

1534
    componentName = GRID_SEARCH_FUNCTION
1✔
1535

1536
    class Parameters(OptimizationFunction.Parameters):
1✔
1537
        """
1538
            Attributes
1539
            ----------
1540

1541
                direction
1542
                    see `direction <GridSearch.direction>`
1543

1544
                    :default value: `MAXIMIZE`
1545
                    :type: ``str``
1546

1547
                grid
1548
                    see `grid <GridSearch.grid>`
1549

1550
                    :default value: None
1551
                    :type:
1552

1553
                random_state
1554
                    see `random_state <GridSearch.random_state>`
1555

1556
                    :default value: None
1557
                    :type: ``numpy.random.RandomState``
1558

1559
                save_samples
1560
                    see `save_samples <GridSearch.save_samples>`
1561

1562
                    :default value: True
1563
                    :type: ``bool``
1564

1565
                save_values
1566
                    see `save_values <GridSearch.save_values>`
1567

1568
                    :default value: True
1569
                    :type: ``bool``
1570
        """
1571
        grid = Parameter(None)
1✔
1572
        save_samples = Parameter(False, pnl_internal=True)
1✔
1573
        save_values = Parameter(False, pnl_internal=True)
1✔
1574
        random_state = Parameter(None, loggable=False, getter=_random_state_getter, dependencies='seed')
1✔
1575
        seed = Parameter(DEFAULT_SEED, modulable=True, fallback_default=True, setter=_seed_setter)
1✔
1576
        select_randomly_from_optimal_values = Parameter(False)
1✔
1577

1578
        direction = MAXIMIZE
1✔
1579

1580
    # TODO: should save_values be in the constructor if it's ignored?
1581
    # is False or True the correct value?
1582
    @check_user_specified
1✔
1583
    @tc.typecheck
1✔
1584
    def __init__(self,
1✔
1585
                 default_variable=None,
1586
                 objective_function:tc.optional(is_function_type)=None,
1587
                 search_space=None,
1588
                 direction:tc.optional(tc.enum(MAXIMIZE, MINIMIZE))=None,
1589
                 save_samples:tc.optional(bool)=None,
1590
                 save_values:tc.optional(bool)=None,
1591
                 # tolerance=0.,
1592
                 select_randomly_from_optimal_values=None,
1593
                 seed=None,
1594
                 params=None,
1595
                 owner=None,
1596
                 prefs=None,
1597
                 **kwargs):
1598

1599
        search_function = self._traverse_grid
1✔
1600
        search_termination_function = self._grid_complete
1✔
1601
        self._return_values = save_values
1✔
1602
        self._return_samples = save_values
1✔
1603
        try:
1✔
1604
            search_space = [x if isinstance(x, SampleIterator) else SampleIterator(x) for x in search_space]
1✔
1605
        except TypeError:
1✔
1606
            pass
1✔
1607

1608
        self.num_iterations = 1 if search_space is None else np.product([i.num for i in search_space])
1✔
1609
        # self.tolerance = tolerance
1610

1611
        super().__init__(
1✔
1612
            default_variable=default_variable,
1613
            objective_function=objective_function,
1614
            search_function=search_function,
1615
            search_termination_function=search_termination_function,
1616
            search_space=search_space,
1617
            select_randomly_from_optimal_values=select_randomly_from_optimal_values,
1618
            save_samples=save_samples,
1619
            save_values=save_values,
1620
            seed=seed,
1621
            direction=direction,
1622
            params=params,
1623
            owner=owner,
1624
            prefs=prefs,
1625
        )
1626

1627
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
1628

1629
        super()._validate_params(request_set=request_set, target_set=target_set, context=context)
1✔
1630
        if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None:
1✔
1631
            search_space = request_set[SEARCH_SPACE]
1✔
1632

1633
            # Check that all iterators are finite (i.e., with num!=None)
1634
            if not all(s.num is not None for s in search_space if (s is not None and s.num)):
1✔
1635
                raise OptimizationFunctionError("All {}s in {} arg of {} must be finite (i.e., SampleIteror.num!=None)".
1636
                                                format(SampleIterator.__name__,
1637
                                                       repr(SEARCH_SPACE),
1638
                                                       self.__class__.__name__))
1639

1640
            # # Check that all finite iterators (i.e., with num!=None) are of the same length:
1641
            # finite_iterators = [s.num for s in search_space if s.num is not None]
1642
            # if not all(l==finite_iterators[0] for l in finite_iterators):
1643
            #     raise OptimizationFunctionError("All finite {}s in {} arg of {} must have the same number of steps".
1644
            #                                     format(SampleIterator.__name__,
1645
            #                                            repr(SEARCH_SPACE),
1646
            #                                            self.__class__.__name__,
1647
            #                                            ))
1648

1649
    @handle_external_context(fallback_most_recent=True)
1✔
1650
    def reset(self, search_space, context=None, **kwargs):
1✔
1651
        """Assign size of `search_space <GridSearch.search_space>"""
1652
        super(GridSearch, self).reset(search_space=search_space, context=context, **kwargs)
1✔
1653
        sample_iterators = search_space
1✔
1654
        owner_str = ''
1✔
1655
        if self.owner:
1!
1656
            owner_str = f' of {self.owner.name}'
1✔
1657
        for i in sample_iterators:
1✔
1658
            if i is None:
1✔
1659
                raise OptimizationFunctionError(f"Invalid {repr(SEARCH_SPACE)} arg for {self.name}{owner_str}; "
1660
                                                f"every dimension must be assigned a {SampleIterator.__name__}.")
1661
            if i.num is None:
1✔
1662
                raise OptimizationFunctionError(f"Invalid {repr(SEARCH_SPACE)} arg for {self.name}{owner_str}; each "
1663
                                                f"{SampleIterator.__name__} must have a value for its 'num' attribute.")
1664

1665
        self.num_iterations = np.product([i.num for i in sample_iterators])
1✔
1666

1667
    def _get_optimized_controller(self):
1✔
1668
        # self.objective_function may be a bound method of
1669
        # OptimizationControlMechanism
1670
        return getattr(self.objective_function, '__self__', None)
1✔
1671

1672
    def _gen_llvm_function(self, *, ctx:pnlvm.LLVMBuilderContext, tags:frozenset):
1✔
1673
        if "select_min" in tags:
1✔
1674
            return self._gen_llvm_select_min_function(ctx=ctx, tags=tags)
1✔
1675
        ocm = self._get_optimized_controller()
1✔
1676
        if ocm is not None:
1✔
1677
            # self.objective_function may be a bound method of
1678
            # OptimizationControlMechanism
1679
            extra_args = [ctx.get_param_struct_type(ocm.agent_rep).as_pointer(),
1✔
1680
                          ctx.get_state_struct_type(ocm.agent_rep).as_pointer(),
1681
                          ctx.get_data_struct_type(ocm.agent_rep).as_pointer()]
1682
        else:
1683
            extra_args = []
1✔
1684

1685
        f = super()._gen_llvm_function(ctx=ctx, extra_args=extra_args, tags=tags)
1✔
1686
        if len(extra_args) > 0:
1✔
1687
            for a in f.args[-len(extra_args):]:
1✔
1688
                a.attributes.add('nonnull')
1✔
1689

1690
        return f
1✔
1691

1692
    def _get_input_struct_type(self, ctx):
1✔
1693
        if self.owner is not None:
1✔
1694
            variable = [port.defaults.value for port in self.owner.input_ports]
1✔
1695
            # Python list does not care about ndarrays of different lengths
1696
            # we do care, so convert to tuple to create struct
1697
            if all(type(x) == np.ndarray for x in variable) and not all(len(x) == len(variable[0]) for x in variable):
1✔
1698
                variable = tuple(variable)
1✔
1699

1700
            warnings.warn("Shape mismatch: {} variable expected: {} vs. got: {}".format(self, variable, self.defaults.variable))
1✔
1701

1702
        else:
1703
            variable = self.defaults.variable
1✔
1704

1705
        return ctx.convert_python_struct_to_llvm_ir(variable)
1✔
1706

1707
    def _get_output_struct_type(self, ctx):
1✔
1708
        val = self.defaults.value
1✔
1709
        # compiled version should never return 'all values'
1710
        if len(val[0]) != len(self.search_space):
1✔
1711
            val = list(val)
1✔
1712
            val[0] = [0.0] * len(self.search_space)
1✔
1713
        return ctx.convert_python_struct_to_llvm_ir((val[0], val[1]))
1✔
1714

1715
    def _gen_llvm_select_min_function(self, *, ctx:pnlvm.LLVMBuilderContext, tags:frozenset):
1✔
1716
        assert "select_min" in tags
1✔
1717
        ocm = self._get_optimized_controller()
1✔
1718
        if ocm is not None:
1✔
1719
            assert ocm.function is self
1✔
1720
            sample_t = ocm._get_evaluate_alloc_struct_type(ctx)
1✔
1721
            value_t = ocm._get_evaluate_output_struct_type(ctx, tags=tags)
1✔
1722
        else:
1723
            obj_func = ctx.import_llvm_function(self.objective_function)
1✔
1724
            sample_t = obj_func.args[2].type.pointee
1✔
1725
            value_t = obj_func.args[3].type.pointee
1✔
1726

1727
        args = [ctx.get_param_struct_type(self).as_pointer(),
1✔
1728
                ctx.get_state_struct_type(self).as_pointer(),
1729
                sample_t.as_pointer(),
1730
                sample_t.as_pointer(),
1731
                value_t.as_pointer(),
1732
                value_t.as_pointer(),
1733
                ctx.float_ty.as_pointer(),
1734
                ctx.int32_ty,
1735
                ctx.int32_ty]
1736
        builder = ctx.create_llvm_function(args, self, tags=tags)
1✔
1737

1738
        params, state_features, min_sample_ptr, samples_ptr, min_value_ptr, values_ptr, opt_count_ptr, start, stop = builder.function.args
1✔
1739
        for p in builder.function.args[:-2]:
1✔
1740
            p.attributes.add('noalias')
1✔
1741

1742
        # The creation helper function sets all pointers to non-null
1743
        # remove the attribute for 'samples_ptr'.
1744
        samples_ptr.attributes.remove('nonnull')
1✔
1745

1746
        random_state = pnlvm.helpers.get_state_ptr(builder, self, state_features,
1✔
1747
                                                   self.parameters.random_state.name)
1748
        select_random_ptr = pnlvm.helpers.get_param_ptr(builder, self, params,
1✔
1749
                                                        self.parameters.select_randomly_from_optimal_values.name)
1750

1751
        select_random_val = builder.load(select_random_ptr)
1✔
1752
        select_random = builder.fcmp_ordered("!=", select_random_val,
1✔
1753
                                             select_random_val.type(0))
1754

1755
        rand_out_ptr = builder.alloca(ctx.float_ty)
1✔
1756

1757
        # KDM 8/22/19: nonstateful direction here - OK?
1758
        direction = "<" if self.direction == MINIMIZE else ">"
1✔
1759
        replace_ptr = builder.alloca(ctx.bool_ty)
1✔
1760

1761
        min_idx_ptr = builder.alloca(stop.type)
1✔
1762
        builder.store(stop.type(-1), min_idx_ptr)
1✔
1763

1764
        # Check the value against current min
1765
        with pnlvm.helpers.for_loop(builder, start, stop, stop.type(1), "compare_loop") as (b, idx):
1✔
1766
            value_ptr = b.gep(values_ptr, [idx])
1✔
1767
            value = b.load(value_ptr)
1✔
1768
            min_value = b.load(min_value_ptr)
1✔
1769

1770
            replace = b.fcmp_unordered(direction, value, min_value)
1✔
1771
            b.store(replace, replace_ptr)
1✔
1772

1773
            # Python does "is_close" check first.
1774
            # This implements reservoir sampling
1775
            with b.if_then(select_random):
1✔
1776
                close = pnlvm.helpers.is_close(ctx, b, value, min_value)
1✔
1777
                with b.if_else(close) as (tb, eb):
1✔
1778
                    with tb:
1✔
1779
                        opt_count = b.load(opt_count_ptr)
1✔
1780
                        opt_count = b.fadd(opt_count, opt_count.type(1))
1✔
1781
                        b.store(opt_count, opt_count_ptr)
1✔
1782

1783
                        # Roll a dice to see if we should replace the current min
1784
                        prob = b.fdiv(opt_count.type(1), opt_count)
1✔
1785
                        rand_f = ctx.get_uniform_dist_function_by_state(random_state)
1✔
1786
                        b.call(rand_f, [random_state, rand_out_ptr])
1✔
1787
                        rand_out = b.load(rand_out_ptr)
1✔
1788
                        replace = b.fcmp_ordered("<", rand_out, prob)
1✔
1789
                        b.store(replace, replace_ptr)
1✔
1790
                    with eb:
1✔
1791
                        # Reset the counter if we are replacing with new best value
1792
                        with b.if_then(b.load(replace_ptr)):
1✔
1793
                            b.store(opt_count_ptr.type.pointee(1), opt_count_ptr)
1✔
1794

1795
            with b.if_then(b.load(replace_ptr)):
1✔
1796
                b.store(idx, min_idx_ptr)
1✔
1797
                b.store(b.load(value_ptr), min_value_ptr)
1✔
1798

1799
        min_idx = builder.load(min_idx_ptr)
1✔
1800
        found_min = builder.icmp_signed("!=", min_idx, min_idx.type(-1))
1✔
1801

1802
        with builder.if_then(found_min):
1✔
1803
            gen_samples = builder.icmp_signed("==", samples_ptr, samples_ptr.type(None))
1✔
1804
            with builder.if_else(gen_samples) as (b_true, b_false):
1✔
1805
                with b_true:
1✔
1806
                    search_space = pnlvm.helpers.get_param_ptr(builder, self, params,
1✔
1807
                                                               self.parameters.search_space.name)
1808
                    pnlvm.helpers.create_sample(b, min_sample_ptr, search_space, min_idx)
1✔
1809
                with b_false:
1✔
1810
                    sample_ptr = builder.gep(samples_ptr, [min_idx])
1✔
1811
                    builder.store(b.load(sample_ptr), min_sample_ptr)
1✔
1812

1813
        builder.ret_void()
1✔
1814
        return builder.function
1✔
1815

1816
    def _gen_llvm_function_body(self, ctx, builder, params, state_features, arg_in, arg_out, *, tags:frozenset):
1✔
1817
        controller = self._get_optimized_controller()
1✔
1818
        if controller is not None:
1✔
1819
            assert controller.function is self
1✔
1820
            obj_func = ctx.import_llvm_function(controller, tags=tags.union({"evaluate", "evaluate_type_objective"}))
1✔
1821
            comp_args = builder.function.args[-3:]
1✔
1822
            obj_param_ptr = comp_args[0]
1✔
1823
            obj_state_ptr = comp_args[1]
1✔
1824

1825
            # Construct input
1826
            comp_input = builder.alloca(obj_func.args[4].type.pointee, name="sim_input")
1✔
1827

1828
            input_initialized = [False] * len(comp_input.type.pointee)
1✔
1829
            for src_idx, ip in enumerate(controller.input_ports):
1✔
1830
                if ip.shadow_inputs is None:
1✔
1831
                    continue
1✔
1832

1833
                # shadow inputs point to an input port of of a node.
1834
                # If that node takes direct input, it will have an associated
1835
                # (input_port, output_port) in the input_CIM.
1836
                # Take the former as an index to composition input variable.
1837
                cim_in_port = controller.agent_rep.input_CIM_ports[ip.shadow_inputs][0]
1✔
1838
                dst_idx = controller.agent_rep.input_CIM.input_ports.index(cim_in_port)
1✔
1839

1840
                # Check that all inputs are unique
1841
                assert not input_initialized[dst_idx], "Double initialization of input {}".format(dst_idx)
1✔
1842
                input_initialized[dst_idx] = True
1✔
1843

1844
                src = builder.gep(arg_in, [ctx.int32_ty(0), ctx.int32_ty(src_idx)])
1✔
1845
                # Destination is a struct of 2d arrays
1846
                dst = builder.gep(comp_input, [ctx.int32_ty(0),
1✔
1847
                                               ctx.int32_ty(dst_idx),
1848
                                               ctx.int32_ty(0)])
1849
                builder.store(builder.load(src), dst)
1✔
1850

1851
            # Assert that we have populated all inputs
1852
            assert all(input_initialized), \
1✔
1853
              "Not all inputs to the simulated composition are initialized: {}".format(input_initialized)
1854

1855
            num_inputs = builder.alloca(obj_func.args[6].type.pointee, name="num_sim_inputs")
1✔
1856
            builder.store(num_inputs.type.pointee(1), num_inputs)
1✔
1857

1858
            # Extra args: input, data, number of inputs
1859
            extra_args = [comp_input, comp_args[2], num_inputs]
1✔
1860
        else:
1861
            obj_func = ctx.import_llvm_function(self.objective_function)
1✔
1862
            obj_state_ptr = pnlvm.helpers.get_state_ptr(builder, self, state_features,
1✔
1863
                                                        "objective_function")
1864
            obj_param_ptr = pnlvm.helpers.get_param_ptr(builder, self, params,
1✔
1865
                                                        "objective_function")
1866
            extra_args = []
1✔
1867

1868
        sample_t = obj_func.args[2].type.pointee
1✔
1869
        value_t = obj_func.args[3].type.pointee
1✔
1870
        min_sample_ptr = builder.alloca(sample_t)
1✔
1871
        min_value_ptr = builder.alloca(value_t)
1✔
1872
        sample_ptr = builder.alloca(sample_t)
1✔
1873
        value_ptr = builder.alloca(value_t)
1✔
1874

1875
        search_space_ptr = pnlvm.helpers.get_param_ptr(builder, self, params,
1✔
1876
                                                       self.parameters.search_space.name)
1877

1878
        opt_count_ptr = builder.alloca(ctx.float_ty)
1✔
1879
        builder.store(opt_count_ptr.type.pointee(0), opt_count_ptr)
1✔
1880

1881
        # Use NaN here. fcmp_unordered below returns true if one of the
1882
        # operands is a NaN. This makes sure we always set min_*
1883
        # in the first iteration
1884
        builder.store(min_value_ptr.type.pointee(float("NaN")), min_value_ptr)
1✔
1885

1886
        b = builder
1✔
1887
        with contextlib.ExitStack() as stack:
1✔
1888
            for i in range(len(search_space_ptr.type.pointee)):
1✔
1889
                dimension = b.gep(search_space_ptr, [ctx.int32_ty(0), ctx.int32_ty(i)])
1✔
1890
                arg_elem = b.gep(sample_ptr, [ctx.int32_ty(0), ctx.int32_ty(i)])
1✔
1891
                if isinstance(dimension.type.pointee,  pnlvm.ir.ArrayType):
1✔
1892
                    b, idx = stack.enter_context(pnlvm.helpers.array_ptr_loop(b, dimension, "loop_" + str(i)))
1✔
1893
                    alloc_elem = b.gep(dimension, [ctx.int32_ty(0), idx])
1✔
1894
                    b.store(b.load(alloc_elem), arg_elem)
1✔
1895
                elif isinstance(dimension.type.pointee, pnlvm.ir.LiteralStructType):
1✔
1896
                    assert len(dimension.type.pointee) == 3
1✔
1897
                    start_ptr = b.gep(dimension, [ctx.int32_ty(0), ctx.int32_ty(0)])
1✔
1898
                    step_ptr = b.gep(dimension, [ctx.int32_ty(0), ctx.int32_ty(1)])
1✔
1899
                    num_ptr = b.gep(dimension, [ctx.int32_ty(0), ctx.int32_ty(2)])
1✔
1900
                    start = b.load(start_ptr)
1✔
1901
                    step = b.load(step_ptr)
1✔
1902
                    num = b.load(num_ptr)
1✔
1903
                    b, idx = stack.enter_context(pnlvm.helpers.for_loop_zero_inc(b, num, "loop_" + str(i)))
1✔
1904
                    val = b.uitofp(idx, start.type)
1✔
1905
                    val = b.fmul(val, step)
1✔
1906
                    val = b.fadd(val, start)
1✔
1907
                    b.store(val, arg_elem)
1✔
1908
                else:
1909
                    assert False, "Unknown dimension type: {}".format(dimension.type)
1910

1911
            # We are in the inner most loop now with sample_ptr setup for execution
1912
            b.call(obj_func, [obj_param_ptr, obj_state_ptr, sample_ptr,
1✔
1913
                              value_ptr] + extra_args)
1914

1915
            # Check if smaller than current best.
1916
            # the argument pointers are already offset, so use range <0,1)
1917
            min_tags = tags.union({"select_min", "evaluate_type_objective"})
1✔
1918
            select_min_f = ctx.import_llvm_function(self, tags=min_tags)
1✔
1919
            b.call(select_min_f, [params, state_features, min_sample_ptr, sample_ptr,
1✔
1920
                                  min_value_ptr, value_ptr, opt_count_ptr,
1921
                                  ctx.int32_ty(0), ctx.int32_ty(1)])
1922

1923
            builder = b
1✔
1924

1925
        # Produce output
1926
        out_sample_ptr = builder.gep(arg_out, [ctx.int32_ty(0), ctx.int32_ty(0)])
1✔
1927
        out_value_ptr = builder.gep(arg_out, [ctx.int32_ty(0), ctx.int32_ty(1)])
1✔
1928
        builder.store(builder.load(min_sample_ptr), out_sample_ptr)
1✔
1929
        builder.store(builder.load(min_value_ptr), out_value_ptr)
1✔
1930
        return builder
1✔
1931

1932
    def _function(self,
1✔
1933
                 variable=None,
1934
                 context=None,
1935
                 params=None,
1936
                 **kwargs):
1937
        """Return the sample that yields the optimal value of `objective_function <GridSearch.objective_function>`,
1938
        and possibly all samples evaluated and their corresponding values.
1939

1940
        Optimal value is defined by `direction <GridSearch.direction>`:
1941
        - if *MAXIMIZE*, returns greatest value
1942
        - if *MINIMIZE*, returns least value
1943

1944
        Returns
1945
        -------
1946

1947
        optimal sample, optimal value, saved_samples, saved_values : ndarray, list, list
1948
            first array contains sample that yields the highest or lowest value of `objective_function
1949
            <GridSearch.objective_function>`, depending on `direction <GridSearch.direction>`, and the
1950
            second array contains the value of the function for that sample. If `save_samples
1951
            <GridSearch.save_samples>` is `True`, first list contains all the values sampled in the order they were
1952
            evaluated; otherwise it is empty.  If `save_values <GridSearch.save_values>` is `True`, second list
1953
            contains the values returned by `objective_function <GridSearch.objective_function>` for all the samples
1954
            in the order they were evaluated; otherwise it is empty.
1955
        """
1956

1957
        self.reset_grid()
1✔
1958
        return_all_samples = return_all_values = []
1✔
1959

1960
        direction = self.parameters.direction._get(context)
1✔
1961
        if MPI_IMPLEMENTATION:
1!
1962

1963
            from mpi4py import MPI
×
1964

1965
            Comm = MPI.COMM_WORLD
×
1966
            rank = Comm.Get_rank()
×
1967
            size = Comm.Get_size()
×
1968

1969
            self.search_space = np.atleast_2d(self.search_space)
×
1970

1971
            chunk_size = (len(self.search_space) + (size - 1)) // size
×
1972
            start = chunk_size * rank
×
1973
            stop = chunk_size * (rank + 1)
×
1974
            if start > len(self.search_space):
×
1975
                start = len(self.search_space)
×
1976
            if stop > len(self.search_space):
×
1977
                stop = len(self.search_space)
×
1978

1979
            # FIX:  INITIALIZE TO FULL LENGTH AND ASSIGN DEFAULT VALUES (MORE EFFICIENT):
1980
            samples = np.array([[]])
×
1981
            sample_optimal = np.empty_like(self.search_space[0])
×
1982
            values = np.array([])
×
1983
            value_optimal = float('-Infinity')
×
1984
            sample_value_max_tuple = (sample_optimal, value_optimal)
×
1985

1986
            # Set up progress bar
1987
            _show_progress = False
×
1988
            if hasattr(self, OWNER) and self.owner and self.owner.prefs.reportOutputPref is SIMULATION_PROGRESS:
×
1989
                _show_progress = True
×
1990
                _progress_bar_char = '.'
×
1991
                _progress_bar_rate_str = ""
×
1992
                _search_space_size = len(self.search_space)
×
1993
                _progress_bar_rate = int(10**(np.log10(_search_space_size) - 2))
×
1994
                if _progress_bar_rate > 1:
×
1995
                    _progress_bar_rate_str = str(_progress_bar_rate) + " "
×
1996
                print("\n{} executing optimization process (one {} for each {}of {} samples): ".
×
1997
                      format(self.owner.name, repr(_progress_bar_char), _progress_bar_rate_str, _search_space_size))
1998
                _progress_bar_count = 0
×
1999

2000
            for sample in self.search_space[start:stop,:]:
×
2001

2002
                if _show_progress:
×
2003
                    increment_progress_bar = (_progress_bar_rate < 1) or not (_progress_bar_count % _progress_bar_rate)
×
2004
                    if increment_progress_bar:
×
2005
                        print(_progress_bar_char, end='', flush=True)
×
2006
                    _progress_bar_count +=1
×
2007

2008
                # Evaluate objective_function for current sample
2009
                value = self.objective_function(sample, context=context)
×
2010

2011
                # Evaluate for optimal value
2012
                if direction == MAXIMIZE:
×
2013
                    value_optimal = max(value, value_optimal)
×
2014
                elif direction == MINIMIZE:
×
2015
                    value_optimal = min(value, value_optimal)
×
2016
                else:
2017
                    assert False, "PROGRAM ERROR: bad value for {} arg of {}: {}".\
2018
                        format(repr(DIRECTION),self.name,direction)
2019

2020
                # FIX: PUT ERROR HERE IF value AND/OR value_max ARE EMPTY (E.G., WHEN EXECUTION_ID IS WRONG)
2021
                # If value is optimal, store corresponing sample
2022
                if value == value_optimal:
×
2023
                    # Keep track of port values and allocation policy associated with EVC max
2024
                    sample_optimal = sample
×
2025
                    sample_value_max_tuple = (sample_optimal, value_optimal)
×
2026

2027
                # Save samples and/or values if specified
2028
                if self.save_values:
×
2029
                    # FIX:  ASSIGN BY INDEX (MORE EFFICIENT)
2030
                    values = np.append(values, np.atleast_1d(value), axis=0)
×
2031
                if self.save_samples:
×
2032
                    if len(samples[0])==0:
×
2033
                        samples = np.atleast_2d(sample)
×
2034
                    else:
2035
                        samples = np.append(samples, np.atleast_2d(sample), axis=0)
×
2036

2037
            # Aggregate, reduce and assign global results
2038
            # combine max result tuples from all processes and distribute to all processes
2039
            max_tuples = Comm.allgather(sample_value_max_tuple)
×
2040
            # get tuple with "value_max of maxes"
2041
            max_value_of_max_tuples = max(max_tuples, key=lambda max_tuple: max_tuple[1])
×
2042
            # get value_optimal, port values and allocation policy associated with "max of maxes"
2043
            return_optimal_sample = max_value_of_max_tuples[0]
×
2044
            return_optimal_value = max_value_of_max_tuples[1]
×
2045

2046
            if self.parameters.save_samples._get(context):
×
2047
                return_all_samples = np.concatenate(Comm.allgather(samples), axis=0)
×
2048
            if self.parameters.save_values._get(context):
×
2049
                return_all_values = np.concatenate(Comm.allgather(values), axis=0)
×
2050

2051
        else:
2052
            assert direction == MAXIMIZE or direction == MINIMIZE, \
1✔
2053
                "PROGRAM ERROR: bad value for {} arg of {}: {}, {}". \
2054
                    format(repr(DIRECTION), self.name, direction)
2055

2056
            # Evaluate objective_function for each sample
2057
            last_sample, last_value, all_samples, all_values = self._evaluate(
1✔
2058
                variable=variable,
2059
                context=context,
2060
                params=params,
2061
            )
2062

2063
            # Compiled version
2064
            ocm = self._get_optimized_controller()
1✔
2065
            if ocm is not None and ocm.parameters.comp_execution_mode._get(context) in {"PTX", "LLVM"}:
1✔
2066

2067
                # Reduce array of values to min/max
2068
                # select_min params are:
2069
                # params, state, min_sample_ptr, sample_ptr, min_value_ptr, value_ptr, opt_count_ptr, count
2070
                min_tags = frozenset({"select_min", "evaluate_type_objective"})
1✔
2071
                bin_func = pnlvm.LLVMBinaryFunction.from_obj(self, tags=min_tags)
1✔
2072
                ct_param = bin_func.byref_arg_types[0](*self._get_param_initializer(context))
1✔
2073
                ct_state = bin_func.byref_arg_types[1](*self._get_state_initializer(context))
1✔
2074
                ct_opt_sample = bin_func.byref_arg_types[2](float("NaN"))
1✔
2075
                ct_alloc = None # NULL for samples
1✔
2076
                ct_values = all_values
1✔
2077
                ct_opt_value = bin_func.byref_arg_types[4]()
1✔
2078
                ct_opt_count = bin_func.byref_arg_types[6](0)
1✔
2079
                ct_start = bin_func.c_func.argtypes[7](0)
1✔
2080
                ct_stop = bin_func.c_func.argtypes[8](len(ct_values))
1✔
2081

2082
                bin_func(ct_param, ct_state, ct_opt_sample, ct_alloc, ct_opt_value,
1✔
2083
                         ct_values, ct_opt_count, ct_start, ct_stop)
2084

2085
                value_optimal = ct_opt_value.value
1✔
2086
                sample_optimal = np.ctypeslib.as_array(ct_opt_sample)
1✔
2087
                all_values = np.ctypeslib.as_array(ct_values)
1✔
2088

2089
                # These are normally stored in the parent function (OptimizationFunction).
2090
                # Since we didn't  call super()._function like the python path,
2091
                # save the values here
2092
                if self.parameters.save_samples._get(context):
1!
2093
                    self.parameters.saved_samples._set(all_samples, context)
×
2094
                if self.parameters.save_values._get(context):
1✔
2095
                    self.parameters.saved_values._set(all_values, context)
1✔
2096

2097
            # Python version
2098
            else:
2099

2100

2101
                if all_values.shape[-1] != all_samples.shape[-1]:
1✔
2102
                    raise ValueError(f"GridSearch Error: {self}._evaluate returned mismatched sizes for "
2103
                                     f"samples and values. This is likely due to a bug in the implementation of "
2104
                                     f"{self.__class__} _evaluate method.")
2105

2106
                if all_values.shape[0] > 1:
1✔
2107
                    raise ValueError(f"GridSearch Error: {self}._evaluate returned values with more than one element. "
2108
                                     "GridSearch currently does not support optimizing over multiple output values.")
2109

2110
                # Find the optimal value(s)
2111
                optimal_value_count = 1
1✔
2112
                value_sample_pairs = zip(all_values.flatten(),
1✔
2113
                                         [all_samples[:,i] for i in range(all_samples.shape[1])])
2114
                value_optimal, sample_optimal = next(value_sample_pairs)
1✔
2115

2116
                select_randomly = self.parameters.select_randomly_from_optimal_values._get(context)
1✔
2117
                for value, sample in value_sample_pairs:
1✔
2118
                    if select_randomly and np.allclose(value, value_optimal):
1✔
2119
                        optimal_value_count += 1
1✔
2120

2121
                        # swap with probability = 1/optimal_value_count in order to achieve
2122
                        # uniformly random selection from identical outcomes
2123
                        probability = 1 / optimal_value_count
1✔
2124
                        random_state = self._get_current_parameter_value("random_state", context)
1✔
2125
                        random_value = random_state.rand()
1✔
2126

2127
                        if random_value < probability:
1✔
2128
                            value_optimal, sample_optimal = value, sample
1✔
2129

2130
                    elif (value > value_optimal and direction == MAXIMIZE) or \
1✔
2131
                            (value < value_optimal and direction == MINIMIZE):
2132
                        value_optimal, sample_optimal = value, sample
1✔
2133
                        optimal_value_count = 1
1✔
2134

2135
            if self.parameters.save_samples._get(context):
1!
2136
                self.parameters.saved_samples._set(all_samples, context)
×
2137
                return_all_samples = all_samples
×
2138
            if self.parameters.save_values._get(context):
1✔
2139
                self.parameters.saved_values._set(all_values, context)
1✔
2140
                return_all_values = all_values
1✔
2141

2142
        return sample_optimal, value_optimal, return_all_samples, return_all_values
1✔
2143

2144

2145
class GaussianProcess(OptimizationFunction):
1✔
2146
    """
2147
    GaussianProcess(                 \
2148
        default_variable=None,       \
2149
        objective_function=None,     \
2150
        search_space=None,           \
2151
        direction=MAXIMIZE,          \
2152
        max_iterations=1000,         \
2153
        save_samples=False,          \
2154
        save_values=False,           \
2155
        params=None,                 \
2156
        owner=None,                  \
2157
        prefs=None                   \
2158
        )
2159

2160
    Draw samples with dimensionality and bounds specified by `search_space <GaussianProcess.search_space>` and
2161
    return one that optimizes the value of `objective_function <GaussianProcess.objective_function>`.
2162

2163
    .. _GaussianProcess_Procedure:
2164

2165
    **Gaussian Process Procedure**
2166

2167
    The number of items (`SampleIterators <SampleIteartor>` in `search_space <GaussianProcess.search_space>` determines
2168
    the dimensionality of each sample to evaluate by `objective_function <GaussianProcess.objective_function>`,
2169
    with the `start <SampleIterator.start>` and `stop <SampleIterator.stop>` attributes of each `SampleIterator`
2170
    specifying the bounds for sampling along the corresponding dimension.
2171

2172
    When `function <GaussianProcess.function>` is executed, it iterates over the folowing steps:
2173

2174
        - draw sample along each dimension of `search_space <GaussianProcess.search_space>`, within bounds
2175
          specified by `start <SampleIterator.start>` and `stop <SampleIterator.stop>` attributes of each
2176
          `SampleIterator` in the `search_space <GaussianProcess.search_space>` list.
2177
        ..
2178
        - compute value of `objective_function <GaussianProcess.objective_function>` for that sample;
2179

2180
    The current iteration is contained in `iteration <GaussianProcess.iteration>`. Iteration continues until [
2181
    FRED: FILL IN THE BLANK], or `max_iterations <GaussianProcess.max_iterations>` is execeeded.  The function
2182
    returns the sample that yielded either the highest (if `direction <GaussianProcess.direction>`
2183
    is *MAXIMIZE*) or lowest (if `direction <GaussianProcess.direction>` is *MINIMIZE*) value of the `objective_function
2184
    <GaussianProcess.objective_function>`, along with the value for that sample, as well as lists containing all of the
2185
    samples evaluated and their values if either `save_samples <GaussianProcess.save_samples>` or `save_values
2186
    <GaussianProcess.save_values>` is `True`, respectively.
2187

2188
    Arguments
2189
    ---------
2190

2191
    default_variable : list or ndarray : default None
2192
        specifies a template for (i.e., an example of the shape of) the samples used to evaluate the
2193
        `objective_function <GaussianProcess.objective_function>`.
2194

2195
    objective_function : function or method
2196
        specifies function used to evaluate sample in each iteration of the `optimization process
2197
        <GaussianProcess_Procedure>`; it must be specified and must return a scalar value.
2198

2199
    search_space : list or array
2200
        specifies bounds of the samples used to evaluate `objective_function <GaussianProcess.objective_function>`
2201
        along each dimension of `variable <GaussianProcess.variable>`;  each item must be a tuple the first element
2202
        of which specifies the lower bound and the second of which specifies the upper bound.
2203

2204
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
2205
        specifies the direction of optimization:  if *MAXIMIZE*, the highest value of `objective_function
2206
        <GaussianProcess.objective_function>` is sought;  if *MINIMIZE*, the lowest value is sought.
2207

2208
    max_iterations : int : default 1000
2209
        specifies the maximum number of times the `optimization process<GaussianProcess_Procedure>` is allowed to
2210
        iterate; if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
2211

2212
    save_samples : bool
2213
        specifies whether or not to return all of the samples used to evaluate `objective_function
2214
        <GaussianProcess.objective_function>` in the `optimization process <GaussianProcess_Procedure>`
2215
        (i.e., a copy of the `search_space <GaussianProcess.search_space>`.
2216

2217
    save_values : bool
2218
        specifies whether or not to save and return the values of `objective_function <GaussianProcess.objective_function>`
2219
        for all samples evaluated in the `optimization process <GaussianProcess_Procedure>`.
2220

2221
    Attributes
2222
    ----------
2223

2224
    variable : ndarray
2225
        template for sample evaluated by `objective_function <GaussianProcess.objective_function>`.
2226

2227
    objective_function : function or method
2228
        function used to evaluate sample in each iteration of the `optimization process <GaussianProcess_Procedure>`.
2229

2230
    search_space : list or array
2231
        contains tuples specifying bounds within which each dimension of `variable <GaussianProcess.variable>` is
2232
        sampled, and used to evaluate `objective_function <GaussianProcess.objective_function>` in iterations of the
2233
        `optimization process <GaussianProcess_Procedure>`.
2234

2235
    direction : MAXIMIZE or MINIMIZE : default MAXIMIZE
2236
        determines the direction of optimization:  if *MAXIMIZE*, the greatest value of `objective_function
2237
        <GaussianProcess.objective_function>` is sought;  if *MINIMIZE*, the least value is sought.
2238

2239
    iteration : int
2240
        the currention iteration of the `optimization process <GaussianProcess_Procedure>`.
2241

2242
    max_iterations : int
2243
        determines the maximum number of times the `optimization process<GaussianProcess_Procedure>` is allowed to iterate;
2244
        if exceeded, a warning is issued and the function returns the optimal sample of those evaluated.
2245

2246
    save_samples : True
2247
        determines whether or not to save and return all samples evaluated by the `objective_function
2248
        <GaussianProcess.objective_function>` in the `optimization process <GaussianProcess_Procedure>` (if the process
2249
        completes, this should be identical to `search_space <GaussianProcess.search_space>`.
2250

2251
    save_values : bool
2252
        determines whether or not to save and return the value of `objective_function
2253
        <GaussianProcess.objective_function>` for all samples evaluated in the `optimization process <GaussianProcess_Procedure>`.
2254
    """
2255

2256
    componentName = GAUSSIAN_PROCESS_FUNCTION
1✔
2257

2258
    class Parameters(OptimizationFunction.Parameters):
1✔
2259
        """
2260
            Attributes
2261
            ----------
2262

2263
                variable
2264
                    see `variable <GaussianProcess.variable>`
2265

2266
                    :default value: [[0], [0]]
2267
                    :type: ``list``
2268
                    :read only: True
2269

2270
                direction
2271
                    see `direction <GaussianProcess.direction>`
2272

2273
                    :default value: `MAXIMIZE`
2274
                    :type: ``str``
2275

2276
                save_samples
2277
                    see `save_samples <GaussianProcess.save_samples>`
2278

2279
                    :default value: True
2280
                    :type: ``bool``
2281

2282
                save_values
2283
                    see `save_values <GaussianProcess.save_values>`
2284

2285
                    :default value: True
2286
                    :type: ``bool``
2287
        """
2288
        variable = Parameter([[0], [0]], read_only=True, pnl_internal=True, constructor_argument='default_variable')
1✔
2289

2290
        save_samples = True
1✔
2291
        save_values = True
1✔
2292

2293
        direction = MAXIMIZE
1✔
2294

2295
    # TODO: should save_values be in the constructor if it's ignored?
2296
    # is False or True the correct value?
2297
    @check_user_specified
1✔
2298
    @tc.typecheck
1✔
2299
    def __init__(self,
1✔
2300
                 default_variable=None,
2301
                 objective_function:tc.optional(is_function_type)=None,
2302
                 search_space=None,
2303
                 direction:tc.optional(tc.enum(MAXIMIZE, MINIMIZE))=None,
2304
                 save_values:tc.optional(bool)=None,
2305
                 params=None,
2306
                 owner=None,
2307
                 prefs=None,
2308
                 **kwargs):
2309

2310
        search_function = self._gaussian_process_sample
×
2311
        search_termination_function = self._gaussian_process_satisfied
×
2312
        self._return_values = save_values
×
2313
        self._return_samples = save_values
×
2314
        self.direction = direction
×
2315

2316
        super().__init__(
×
2317
            default_variable=default_variable,
2318
            objective_function=objective_function,
2319
            search_function=search_function,
2320
            search_space=search_space,
2321
            search_termination_function=search_termination_function,
2322
            save_samples=True,
2323
            save_values=save_values,
2324
            params=params,
2325
            owner=owner,
2326
            prefs=prefs,
2327
        )
2328

2329
    def _validate_params(self, request_set, target_set=None, context=None):
1✔
2330
        super()._validate_params(request_set=request_set, target_set=target_set,context=context)
×
2331
        # if SEARCH_SPACE in request_set:
2332
        #     search_space = request_set[SEARCH_SPACE]
2333
        #     # search_space must be specified
2334
        #     if search_space is None:
2335
        #         raise OptimizationFunctionError("The {} arg must be specified for a {}".
2336
        #                                         format(repr(SEARCH_SPACE), self.__class__.__name__))
2337
        #     # must be a list or array
2338
        #     if not isinstance(search_space, (list, np.ndarray)):
2339
        #         raise OptimizationFunctionError("The specification for the {} arg of {} must be a list or array".
2340
        #                                         format(repr(SEARCH_SPACE), self.__class__.__name__))
2341
        #     # must have same number of items as variable
2342
        #     if len(search_space) != len(self.defaults.variable):
2343
        #         raise OptimizationFunctionError("The number of items in {} for {} ([]) must equal that of its {} ({})".
2344
        #                                         format(repr(SEARCH_SPACE), self.__class__.__name__, len(search_space),
2345
        #                                                repr(VARIABLE), len(self.defaults.variable)))
2346
        #     # every item must be a tuple with two elements, both of which are scalars, and first must be <= second
2347
        #     for i in search_space:
2348
        #         if not isinstance(i, tuple) or len(i) != 2:
2349
        #             raise OptimizationFunctionError("Item specified for {} of {} ({}) is not a tuple with two items".
2350
        #                                             format(repr(SEARCH_SPACE), self.__class__.__name__, i))
2351
        #         if not all([np.isscalar(j) for j in i]):
2352
        #             raise OptimizationFunctionError("Both elements of item specified for {} of {} ({}) must be scalars".
2353
        #                                             format(repr(SEARCH_SPACE), self.__class__.__name__, i))
2354
        #         if not i[0] < i[1]:
2355
        #             raise OptimizationFunctionError("First element of item in {} specified for {} ({}) "
2356
        #                                             "must be less than or equal to its second element".
2357
        #                                             format(repr(SEARCH_SPACE), self.__class__.__name__, i))
2358

2359
    def _function(self,
1✔
2360
                 variable=None,
2361
                 context=None,
2362
                 params=None,
2363
                 **kwargs):
2364
        """Return the sample that yields the optimal value of `objective_function <GaussianProcess.objective_function>`,
2365
        and possibly all samples evaluated and their corresponding values.
2366

2367
        Optimal value is defined by `direction <GaussianProcess.direction>`:
2368
        - if *MAXIMIZE*, returns greatest value
2369
        - if *MINIMIZE*, returns least value
2370

2371
        Returns
2372
        -------
2373

2374
        optimal sample, optimal value, saved_samples, saved_values : ndarray, list, list
2375
            first array contains sample that yields the highest or lowest value of `objective_function
2376
            <GaussianProcess.objective_function>`, depending on `direction <GaussianProcess.direction>`, and the
2377
            second array contains the value of the function for that sample. If `save_samples
2378
            <GaussianProcess.save_samples>` is `True`, first list contains all the values sampled in the order they were
2379
            evaluated; otherwise it is empty.  If `save_values <GaussianProcess.save_values>` is `True`, second list
2380
            contains the values returned by `objective_function <GaussianProcess.objective_function>` for all the
2381
            samples in the order they were evaluated; otherwise it is empty.
2382
        """
2383

2384
        return_all_samples = return_all_values = []
×
2385

2386
        # Enforce no MPI for now
2387
        MPI_IMPLEMENTATION = False
×
2388
        if MPI_IMPLEMENTATION:
×
2389
            # FIX: WORRY ABOUT THIS LATER
2390
            pass
×
2391

2392
        else:
2393
            last_sample, last_value, all_samples, all_values = super()._function(
×
2394
                    variable=variable,
2395
                    context=context,
2396
                    params=params,
2397

2398
            )
2399

2400
            return_optimal_value = max(all_values)
×
2401
            return_optimal_sample = all_samples[all_values.index(return_optimal_value)]
×
2402
            if self._return_samples:
×
2403
                return_all_samples = all_samples
×
2404
            if self._return_values:
×
2405
                return_all_values = all_values
×
2406

2407
        return return_optimal_sample, return_optimal_value, return_all_samples, return_all_values
×
2408

2409
    # FRED: THESE ARE THE SHELLS FOR THE METHODS I BELIEVE YOU NEED:
2410
    def _gaussian_process_sample(self, variable, sample_num, context=None):
1✔
2411
        """Draw and return sample from search_space."""
2412
        # FRED: YOUR CODE HERE;  THIS IS THE search_function METHOD OF OptimizationControlMechanism (i.e., PARENT)
2413
        # NOTES:
2414
        #   This method is assigned as the search function of GaussianProcess,
2415
        #     and should return a sample that will be evaluated in the call to GaussianProcess' `objective_function`
2416
        #     (in the context of use with an OptimizationControlMechanism, a sample is a control_allocation,
2417
        #     and the objective_function is the evaluate method of the agent_rep).
2418
        #   You have accessible:
2419
        #     variable arg:  the last sample evaluated
2420
        #     sample_num:  number of current iteration in the search/sampling process
2421
        #     self.search_space:  self.parameters.search_space._get(context), which you can assume will be a
2422
        #                         list of tuples, each of which contains the sampling bounds for each dimension;
2423
        #                         so its length = length of a sample
2424
        #     (the extra stuff in getting the search space is to support statefulness in parallelization of sims)
2425
        # return self._opt.ask() # [SAMPLE:  VECTOR SAME SHAPE AS VARIABLE]
2426
        return variable
×
2427

2428
    def _gaussian_process_satisfied(self, variable, value, iteration, context=None):
1✔
2429
        """Determine whether search should be terminated;  return `True` if so, `False` if not."""
2430
        # FRED: YOUR CODE HERE;    THIS IS THE search_termination_function METHOD OF OptimizationControlMechanism (
2431
        # i.e., PARENT)
2432
        return iteration==2# [BOOLEAN, SPECIFIYING WHETHER TO END THE SEARCH/SAMPLING PROCESS]
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc