• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

thouska / spotpy / 15283517039

27 May 2025 07:01PM UTC coverage: 67.799% (+0.006%) from 67.793%
15283517039

Pull #334

github

web-flow
Merge 459e2938c into 47dd43fa1
Pull Request #334: Typo adjustments, black formatting, drop Python 3.9, support Python 3.13

24 of 99 new or added lines in 15 files covered. (24.24%)

14 existing lines in 8 files now uncovered.

3752 of 5534 relevant lines covered (67.8%)

2.03 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

85.09
/src/spotpy/algorithms/abc.py
1
# -*- coding: utf-8 -*-
2
"""
3
Copyright (c) 2018 by Tobias Houska
4
This file is part of Statistical Parameter Optimization Tool for Python(SPOTPY).
5
:author: Patrick Lauer
6
"""
7

8
import random
3✔
9

10
import numpy as np
3✔
11

12
from . import _algorithm
3✔
13

14

15
class abc(_algorithm):
3✔
16
    """
17
    This class holds the Artificial Bee Colony (ABC) algorithm, based on Karaboga (2007).
18
    D. Karaboga, AN IDEA BASED ON HONEY BEE SWARM FOR NUMERICAL OPTIMIZATION,TECHNICAL REPORT-TR06, Erciyes University, Engineering Faculty, Computer Engineering Department 2005.
19
    D. Karaboga, B. Basturk, A powerful and Efficient Algorithm for Numerical Function Optimization: Artificial Bee Colony (ABC) Algorithm, Journal of Global Optimization, Volume:39, Issue:3,pp:459-171, November 2007,ISSN:0925-5001 , doi: 10.1007/s10898-007-9149-x
20

21
    """
22

23
    def __init__(self, *args, **kwargs):
3✔
24
        """
25
        Input
26
        ----------
27
        spot_setup: class
28
            model: function
29
                Should be callable with a parameter combination of the parameter-function
30
                and return an list of simulation results (as long as evaluation list)
31
            parameter: function
32
                When called, it should return a random parameter combination. Which can
33
                be e.g. uniform or Gaussian
34
            objectivefunction: function
35
                Should return the objectivefunction for a given list of a model simulation and
36
                observation.
37
            evaluation: function
38
                Should return the true values as return by the model.
39

40
        dbname: str
41
            * Name of the database where parameter, objectivefunction value and simulation results will be saved.
42

43
        dbformat: str
44
            * ram: fast suited for short sampling time. no file will be created and results are saved in an array.
45
            * csv: A csv file will be created, which you can import afterwards.
46

47
        parallel: str
48
            * seq: Sequentiel sampling (default): Normal iterations on one core of your cpu.
49
            * mpi: Message Passing Interface: Parallel computing on cluster pcs (recommended for unix os).
50

51
        save_sim: boolean
52
            * True:  Simulation results will be saved
53
            * False: Simulation results will not be saved
54
        """
55
        kwargs["optimization_direction"] = "maximize"
3✔
56
        kwargs["algorithm_name"] = "Artificial Bee Colony (ABC) algorithm"
3✔
57
        super(abc, self).__init__(*args, **kwargs)
3✔
58

59
    def sample(
3✔
60
        self, repetitions, eb=48, a=(1 / 10), peps=0.0001, ownlimit=False, limit=24
61
    ):
62
        """
63

64

65
        Parameters
66
        ----------
67
        repetitions: int
68
            maximum number of function evaluations allowed during optimization
69
        eb: int
70
            number of employed bees (half of population size)
71
        a: float
72
            mutation factor
73
        peps: float
74
            Convergence criterium
75
        ownlimit: boolean
76
            determines if an userdefined limit is set or not
77
        limit: int
78
            sets the limit
79
        """
80
        self.set_repetiton(repetitions)
3✔
81
        print(f"Starting the ABC algorithm with {repetitions} repetitions...")
3✔
82
        # Initialize ABC parameters:
83
        randompar = self.parameter()["random"]
3✔
84
        self.nopt = randompar.size
3✔
85
        random.seed()
3✔
86
        if ownlimit == True:
3✔
87
            self.limit = limit
×
88
        else:
89
            self.limit = eb
3✔
90
        lb, ub = self.parameter()["minbound"], self.parameter()["maxbound"]
3✔
91
        # Initialization
92
        work = []
3✔
93
        icall = 0
3✔
94
        gnrng = 1e100
3✔
95
        # Calculate the objective function
96
        param_generator = ((rep, self.parameter()["random"]) for rep in range(eb))
3✔
97
        for rep, randompar, simulations in self.repeat(param_generator):
3✔
98
            # Calculate fitness
99
            like = self.postprocessing(
3✔
100
                rep, randompar, simulations, chains=1, negativlike=True
101
            )
102
            c = 0
3✔
103
            p = 0
3✔
104
            work.append([like, randompar, like, randompar, c, p])
3✔
105
            icall += 1
3✔
106
            if self.status.stop:
3✔
107
                print("Stopping sampling")
×
108
                break
×
109

110
        while icall < repetitions and gnrng > peps:
3✔
111
            psum = 0
3✔
112
            # Employed bee phase
113
            # Generate new input parameters
114
            for i, val in enumerate(work):
3✔
115
                k = i
3✔
116
                while k == i:
3✔
117
                    k = random.randint(0, (eb - 1))
3✔
118
                j = random.randint(0, (self.nopt - 1))
3✔
119
                work[i][3][j] = work[i][1][j] + random.uniform(-a, a) * (
3✔
120
                    work[i][1][j] - work[k][1][j]
121
                )
122
                if work[i][3][j] < lb[j]:
3✔
123
                    work[i][3][j] = lb[j]
3✔
124
                if work[i][3][j] > ub[j]:
3✔
125
                    work[i][3][j] = ub[j]
3✔
126

127
            # Calculate the objective function
128
            param_generator = ((rep, work[rep][3]) for rep in range(eb))
3✔
129
            for rep, randompar, simulations in self.repeat(param_generator):
3✔
130
                # Calculate fitness
131
                clike = self.postprocessing(
3✔
132
                    icall + eb, randompar, simulations, chains=2, negativlike=True
133
                )
134
                if clike > work[rep][0]:
3✔
135
                    work[rep][1] = work[rep][3]
3✔
136
                    work[rep][0] = clike
3✔
137
                    work[rep][4] = 0
3✔
138
                else:
139
                    work[rep][4] = work[rep][4] + 1
3✔
140
                icall += 1
3✔
141
                if self.status.stop:
3✔
142
                    print("Stopping samplig")
×
143
                    break  # Probability distribution for roulette wheel selection
×
144
            bn = []
3✔
145
            for i, val in enumerate(work):
3✔
146
                psum = psum + (1 / work[i][0])
3✔
147
            for i, val in enumerate(work):
3✔
148
                work[i][5] = (1 / work[i][0]) / psum
3✔
149
                bn.append(work[i][5])
3✔
150
            bounds = np.cumsum(bn)
3✔
151
            # Onlooker bee phase
152
            # Roulette wheel selection
153
            for i, val in enumerate(work):
3✔
154
                pn = random.uniform(0, 1)
3✔
155
                k = i
3✔
156
                while k == i:
3✔
157
                    k = random.randint(0, eb - 1)
3✔
158
                for t, vol in enumerate(bounds):
3✔
159
                    if bounds[t] - pn >= 0:
3✔
160
                        z = t
3✔
161
                        break
3✔
162
                j = random.randint(0, (self.nopt - 1))
3✔
163
                # Generate new input parameters
164
                try:
3✔
165
                    work[i][3][j] = work[z][1][j] + random.uniform(-a, a) * (
3✔
166
                        work[z][1][j] - work[k][1][j]
167
                    )
168
                except UnboundLocalError:
×
169
                    z = 0
×
170
                    work[i][3][j] = work[z][1][j] + random.uniform(-a, a) * (
×
171
                        work[z][1][j] - work[k][1][j]
172
                    )
173
                if work[i][3][j] < lb[j]:
3✔
174
                    work[i][3][j] = lb[j]
3✔
175
                if work[i][3][j] > ub[j]:
3✔
UNCOV
176
                    work[i][3][j] = ub[j]
2✔
177
            # Calculate the objective function
178
            param_generator = ((rep, work[rep][3]) for rep in range(eb))
3✔
179
            for rep, randompar, simulations in self.repeat(param_generator):
3✔
180
                # Calculate fitness
181
                clike = self.postprocessing(
3✔
182
                    icall + eb, randompar, simulations, chains=3, negativlike=True
183
                )
184
                if clike > work[rep][0]:
3✔
185
                    work[rep][1] = work[rep][3]
3✔
186
                    work[rep][0] = clike
3✔
187
                    work[rep][4] = 0
3✔
188
                else:
189
                    work[rep][4] = work[rep][4] + 1
3✔
190
                icall += 1
3✔
191
                if self.status.stop:
3✔
192
                    print("Stopping samplig")
3✔
193
                    break
3✔
194
            # Scout bee phase
195
            for i, val in enumerate(work):
3✔
196
                if work[i][4] >= self.limit:
3✔
197
                    work[i][1] = self.parameter()["random"]
×
198
                    work[i][4] = 0
×
199
                    t, work[i][0], simulations = self.simulate((icall, work[i][1]))
×
200
                    clike = self.postprocessing(
×
201
                        icall + eb, randompar, simulations, chains=4, negativlike=True
202
                    )
203
                    work[i][0] = clike
×
204
                    icall += 1
×
205
                    if self.status.stop:
×
206
                        print("Stopping samplig")
×
207
                        break
×
208
            gnrng = -self.status.objectivefunction_max
3✔
209
            if icall >= repetitions:
3✔
210
                print("*** OPTIMIZATION SEARCH TERMINATED BECAUSE THE LIMIT")
3✔
211
                print("ON THE MAXIMUM NUMBER OF TRIALS ")
3✔
212
                print(repetitions)
3✔
213
                print("HAS BEEN EXCEEDED.")
3✔
214

215
            if gnrng < peps:
3✔
216
                print(
3✔
217
                    "THE POPULATION HAS CONVERGED TO A PRESPECIFIED SMALL PARAMETER SPACE AT RUN"
218
                )
219
                print(icall)
3✔
220
        self.final_call()
3✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc