• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

quaquel / EMAworkbench / 18555741015

16 Oct 2025 08:50AM UTC coverage: 92.203% (+3.5%) from 88.699%
18555741015

Pull #424

github

web-flow
Merge 5aa1577ae into 2182e4997
Pull Request #424: Optimization improvements

572 of 597 new or added lines in 14 files covered. (95.81%)

2 existing lines in 1 file now uncovered.

8266 of 8965 relevant lines covered (92.2%)

0.92 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

57.45
/ema_workbench/em_framework/optimization_convergence.py
1
"""helper stuff for analyzing converngence of optimization results."""
2

3
import abc
1✔
4

5
from platypus import (
1✔
6
    EpsilonIndicator,
7
    GenerationalDistance,
8
    Hypervolume,
9
    InvertedGenerationalDistance,
10
    Spacing,
11
)
12

13
from ..util import get_module_logger
1✔
14
from .optimization import rebuild_platypus_population
1✔
15

16
__all__ = [
1✔
17
    "EpsilonIndicatorMetric",
18
    "EpsilonProgress",
19
    "GenerationalDistanceMetric",
20
    "HypervolumeMetric",
21
    "InvertedGenerationalDistanceMetric",
22
    "OperatorProbabilities",
23
    "SpacingMetric",
24
]
25

26
_logger = get_module_logger(__name__)
1✔
27

28

29
class AbstractConvergenceMetric(abc.ABC):
1✔
30
    """Base convergence metric class."""
31

32
    def __init__(self, name):
1✔
33
        """Init."""
NEW
34
        super().__init__()
×
NEW
35
        self.name = name
×
NEW
36
        self.results = []
×
37

38
    @abc.abstractmethod
1✔
39
    def __call__(self, optimizer):
1✔
40
        """Call the convergence metric."""
41

42
    def reset(self):
1✔
NEW
43
        self.results = []
×
44

45
    def get_results(self):
1✔
NEW
46
        return self.results
×
47

48

49
class EpsilonProgress(AbstractConvergenceMetric):
1✔
50
    """Epsilon progress convergence metric class."""
51

52
    def __init__(self):
1✔
53
        """Init."""
NEW
54
        super().__init__("epsilon_progress")
×
55

56
    def __call__(self, optimizer):  # noqa: D102
1✔
NEW
57
        self.results.append(optimizer.archive.improvements)
×
58

59

60
class MetricWrapper:
1✔
61
    """Wrapper class for wrapping platypus indicators.
62

63
    Parameters
64
    ----------
65
    reference_set : DataFrame
66
    problem : PlatypusProblem instance
67
    kwargs : dict
68
             any additional keyword arguments to be passed
69
             on to the wrapper platypus indicator class
70

71
    Notes
72
    -----
73
    this class relies on multi-inheritance and careful consideration
74
    of the MRO to conveniently wrap the convergence metrics provided
75
    by platypus.
76

77
    """
78

79
    def __init__(self, reference_set, problem, **kwargs):
1✔
NEW
80
        self.problem = problem
×
NEW
81
        reference_set = rebuild_platypus_population(reference_set, self.problem)
×
NEW
82
        super().__init__(reference_set=reference_set, **kwargs)
×
83

84
    def calculate(self, archive):
1✔
NEW
85
        solutions = rebuild_platypus_population(archive, self.problem)
×
NEW
86
        return super().calculate(solutions)
×
87

88

89
class HypervolumeMetric(MetricWrapper, Hypervolume):
1✔
90
    """Hypervolume metric.
91

92
    Parameters
93
    ----------
94
    reference_set : DataFrame
95
    problem : PlatypusProblem instance
96

97
    this is a thin wrapper around Hypervolume as provided
98
    by platypus to make it easier to use in conjunction with the
99
    workbench.
100

101
    """
102

103

104
class GenerationalDistanceMetric(MetricWrapper, GenerationalDistance):
1✔
105
    """GenerationalDistance metric.
106

107
    Parameters
108
    ----------
109
    reference_set : DataFrame
110
    problem : PlatypusProblem instance
111
    d : int, default=1
112
        the power in the intergenerational distance function
113

114

115
    This is a thin wrapper around GenerationalDistance as provided
116
    by platypus to make it easier to use in conjunction with the
117
    workbench.
118

119
    see https://link.springer.com/content/pdf/10.1007/978-3-319-15892-1_8.pdf
120
    for more information
121

122
    """
123

124

125
class InvertedGenerationalDistanceMetric(MetricWrapper, InvertedGenerationalDistance):
1✔
126
    """InvertedGenerationalDistance metric.
127

128
    Parameters
129
    ----------
130
    reference_set : DataFrame
131
    problem : PlatypusProblem instance
132
    d : int, default=1
133
        the power in the inverted intergenerational distance function
134

135

136
    This is a thin wrapper around InvertedGenerationalDistance as provided
137
    by platypus to make it easier to use in conjunction with the
138
    workbench.
139

140
    see https://link.springer.com/content/pdf/10.1007/978-3-319-15892-1_8.pdf
141
    for more information
142

143
    """
144

145

146
class EpsilonIndicatorMetric(MetricWrapper, EpsilonIndicator):
1✔
147
    """EpsilonIndicator metric.
148

149
    Parameters
150
    ----------
151
    reference_set : DataFrame
152
    problem : PlatypusProblem instance
153

154

155
    this is a thin wrapper around EpsilonIndicator as provided
156
    by platypus to make it easier to use in conjunction with the
157
    workbench.
158

159
    """
160

161

162
class SpacingMetric(MetricWrapper, Spacing):
1✔
163
    """Spacing metric.
164

165
    Parameters
166
    ----------
167
    problem : PlatypusProblem instance
168

169

170
    this is a thin wrapper around Spacing as provided
171
    by platypus to make it easier to use in conjunction with the
172
    workbench.
173

174
    """
175

176
    def __init__(self, problem):
1✔
NEW
177
        self.problem = problem
×
178

179

180
# class HyperVolume(AbstractConvergenceMetric):
181
#     """Hypervolume convergence metric class.
182
#
183
#     This metric is derived from a hyper-volume measure, which describes the
184
#     multi-dimensional volume of space contained within the pareto front. When
185
#     computed with minimum and maximums, it describes the ratio of dominated
186
#     outcomes to all possible outcomes in the extent of the space.  Getting this
187
#     number to be high or low is not necessarily important, as not all outcomes
188
#     within the min-max range will be feasible.  But, having the hypervolume remain
189
#     fairly stable over multiple generations of the evolutionary algorithm provides
190
#     an indicator of convergence.
191
#
192
#     Parameters
193
#     ----------
194
#     minimum : numpy array
195
#     maximum : numpy array
196
#
197
#
198
#     This class is deprecated and will be removed in version 3.0 of the EMAworkbench.
199
#     Use ArchiveLogger instead and calculate hypervolume in post using HypervolumeMetric
200
#     as also shown in the directed search tutorial.
201
#
202
#     """
203
#
204
#     def __init__(self, minimum, maximum):
205
#         super().__init__("hypervolume")
206
#         warnings.warn(
207
#             "HyperVolume is deprecated and will be removed in version 3.0 of the EMAworkbench."
208
#             "Use ArchiveLogger and HypervolumeMetric instead",
209
#             DeprecationWarning,
210
#             stacklevel=2,
211
#         )
212
#         self.hypervolume_func = Hypervolume(minimum=minimum, maximum=maximum)
213
#
214
#     def __call__(self, optimizer):
215
#         self.results.append(self.hypervolume_func.calculate(optimizer.archive))
216
#
217
#     @classmethod
218
#     def from_outcomes(cls, outcomes):
219
#         ranges = [o.expected_range for o in outcomes if o.kind != o.INFO]
220
#         minimum, maximum = np.asarray(list(zip(*ranges)))
221
#         return cls(minimum, maximum)
222

223

224
class OperatorProbabilities(AbstractConvergenceMetric):
1✔
225
    """OperatorProbabiliy convergence tracker for use with auto adaptive operator selection.
226

227
    Parameters
228
    ----------
229
    name : str
230
    index : int
231

232

233
    State of the art MOEAs like Borg (and GenerationalBorg provided by the workbench)
234
    use autoadaptive operator selection. The algorithm has multiple different evolutionary
235
    operators. Over the run, it tracks how well each operator is doing in producing fitter
236
    offspring. The probability of the algorithm using a given evolutionary operator is
237
    proportional to how well this operator has been doing in producing fitter offspring in
238
    recent generations. This class can be used to track these probabilities over the
239
    run of the algorithm.
240

241
    """
242

243
    def __init__(self, name, index):
1✔
NEW
244
        super().__init__(name)
×
NEW
245
        self.index = index
×
246

247
    def __call__(self, optimizer):  # noqa: D102
1✔
NEW
248
        try:
×
NEW
249
            props = optimizer.algorithm.variator.probabilities
×
NEW
250
            self.results.append(props[self.index])
×
NEW
251
        except AttributeError:
×
NEW
252
            pass
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc