• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

KarlNaumann / MacroStat / 17696704914

13 Sep 2025 12:44PM UTC coverage: 95.711% (-1.0%) from 96.722%
17696704914

push

github

web-flow
Testing adjustments (#50)

* fix: restrict parameters passed to core to those included in the defaults

* fix: adjust for potentially mismatched dimensions in the course of the timeseries

* fix: tests for batchprocessing

* fix: adjust sampler tests for split in transform and save

* fix: correction for step range to avoid skipping one step timestep

* fix: drop from_json in core.Model as the parameters are now model-dependent

* fix: adapt tests for the model core to the parameter and variable adaptations (gather_timeseries and that parameters rejects parameters not in the default_parameters)

* fix: NK3E adjusted for revised variable treatment

247 of 249 branches covered (99.2%)

Branch coverage included in aggregate %.

14 of 14 new or added lines in 3 files covered. (100.0%)

23 existing lines in 4 files now uncovered.

1650 of 1733 relevant lines covered (95.21%)

0.95 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

81.71
/src/macrostat/core/model.py
1
# -*- coding: utf-8 -*-
2
"""
3
Generic model class as a wrapper to specific implementations
4
"""
5

6
__author__ = ["Karl Naumann-Woleske"]
1✔
7
__credits__ = ["Karl Naumann-Woleske"]
1✔
8
__license__ = "MIT"
1✔
9
__version__ = "0.1.0"
1✔
10
__maintainer__ = ["Karl Naumann-Woleske"]
1✔
11

12
import logging
1✔
13
import os
1✔
14
import pickle
1✔
15

16
import torch
1✔
17

18
from macrostat.core.behavior import Behavior
1✔
19
from macrostat.core.parameters import Parameters
1✔
20
from macrostat.core.scenarios import Scenarios
1✔
21
from macrostat.core.variables import Variables
1✔
22

23
logger = logging.getLogger(__name__)
1✔
24

25

26
class Model:
1✔
27
    """A general class to represent a macroeconomic model.
28

29
    This class provides a wrapper for users to write their underlying model
30
    behavior while maintaining a uniformly accessible interface.
31

32
    Attributes
33
    ----------
34
    parameters : macrostat.core.parameters.Parameters
35
        The parameters of the model.
36
    scenarios : macrostat.core.scenarios.Scenarios
37
        The scenarios of the model.
38
    variables : macrostat.core.variables.Variables
39
        The variables of the model.
40
    behavior : macrostat.core.behavior.Behavior
41
        The behavior class of the model.
42
    name : str
43
        The name of the model.
44

45
    Example
46
    -------
47
    A general workflow for a model might look like:
48

49
    >>> model = Model()
50
    >>> output = model.simulate()
51
    >>> model.save()
52

53
    """
54

55
    def __init__(
1✔
56
        self,
57
        parameters: Parameters | dict | None = None,
58
        hyperparameters: dict | None = None,
59
        scenarios: Scenarios | dict = None,
60
        variables: Variables | dict = None,
61
        behavior: Behavior = Behavior,
62
        name: str = "model",
63
        log_level: int = logging.INFO,
64
        log_file: str = "macrostat_model.log",
65
    ):
66
        """Initialization of the model class.
67

68

69
        Parameters
70
        ----------
71
        parameters: macrostat.core.parameters.Parameters | dict
72
            The parameters of the model.
73
        hyperparameters: dict (optional)
74
            The hyperparameters of the model.
75
        scenarios: macrostat.core.scenarios.Scenarios | dict (optional)
76
            The scenarios of the model.
77
        variables: macrostat.core.variables.Variables | dict (optional)
78
            The variables of the model.
79
        behavior: macrostat.core.behavior.Behavior (optional)
80
            The behavior of the model.
81
        name: str (optional)
82
            The name of the model.
83
        log_level: int (optional)
84
            The log level, defaults to logging.INFO but can be set to logging.DEBUG
85
            for more verbose output.
86
        log_file: str (optional)
87
            The log file, defaults to "macrostat_model.log" in the current working
88
            directory.
89
        """
90
        # Essential attributes
91
        if isinstance(parameters, dict):
1✔
UNCOV
92
            self.parameters = Parameters(
×
93
                parameters=parameters, hyperparameters=hyperparameters
94
            )
95
        elif isinstance(parameters, Parameters):
1✔
96
            self.parameters = parameters
1✔
97
            if hyperparameters is not None:
1✔
UNCOV
98
                self.parameters.hyper.update(hyperparameters)
×
99
        else:
100
            logger.warning("No parameters provided, using default parameters")
1✔
101
            self.parameters = Parameters()
1✔
102

103
        if isinstance(scenarios, Scenarios):
1✔
104
            self.scenarios = scenarios
1✔
105
        else:
106
            logger.warning("No scenarios provided, using default scenarios")
1✔
107
            self.scenarios = Scenarios(parameters=self.parameters)
1✔
108

109
        if isinstance(variables, Variables):
1✔
110
            self.variables = variables
1✔
111
        else:
112
            logger.warning("No variables provided, using default variables")
1✔
113
            self.variables = Variables(parameters=self.parameters)
1✔
114

115
        if behavior is not None and issubclass(behavior, Behavior):
1✔
116
            self.behavior = behavior
1✔
117
        else:
118
            logger.warning("No behavior provided, using default behavior")
1✔
119
            self.behavior = Behavior
1✔
120

121
        self.name = name
1✔
122

123
        logging.basicConfig(level=log_level, filename=log_file)
1✔
124

125
    @classmethod
1✔
126
    def load(cls, path: os.PathLike):
1✔
127
        """Class method to load a model instance from a pickled file.
128

129
        Parameters
130
        ----------
131
        path: os.PathLike
132
            path to the targeted file containing the model.
133

134
        Notes
135
        -----
136
        .. note:: This implementation is dependent on your pickling version
137

138
        """
UNCOV
139
        with open(path, "rb") as f:
×
UNCOV
140
            model = pickle.load(f)
×
UNCOV
141
        return model
×
142

143
    def save(self, path: os.PathLike):
1✔
144
        """Save the model object as a pickled file
145

146
        Parameters
147
        ----------
148
        path: os.PathLike
149
            path where the model will be stored. If it is None then
150
            the model's name will be used and the file stored in the
151
            working directory.
152

153
        Notes
154
        -----
155
        .. note:: This implementation is dependent on your pickling version
156
        """
UNCOV
157
        with open(path, "wb") as f:
×
UNCOV
158
            pickle.dump(self, f)
×
159

160
    def simulate(self, scenario: int | str = 0, *args, **kwargs):
1✔
161
        """Simulate the model.
162

163
        Parameters
164
        ----------
165
        scenario: int (optional)
166
            The scenario to use for the model run, defaults to 0, which
167
            represents the default scenario (no shocks).
168
        """
169
        if isinstance(scenario, str):
1✔
170
            scenario = self.scenarios.get_scenario_index(scenario)
1✔
171

172
        logging.debug(f"Starting simulation. Scenario: {scenario}")
1✔
173
        behavior = self.behavior(
1✔
174
            self.parameters,
175
            self.scenarios,
176
            self.variables,
177
            scenario=scenario,
178
            *args,
179
            **kwargs,
180
        )
181
        behavior = behavior.to(self.parameters["device"])
1✔
182
        with torch.no_grad():
1✔
183
            return behavior.forward(*args, **kwargs)
1✔
184

185
    def get_model_training_instance(self, scenario: int | str = 0, *args, **kwargs):
1✔
186
        """Simulate the model.
187

188
        Parameters
189
        ----------
190
        scenario: int (optional)
191
            The scenario to use for the model run, defaults to 0, which
192
            represents the default scenario (no shocks).
193
        """
194
        if isinstance(scenario, str):
×
195
            scenario = self.scenarios.get_scenario_index(scenario)
×
196

197
        logging.debug(f"Starting simulation. Scenario: {scenario}")
×
198
        behavior = self.behavior(
×
199
            self.parameters,
200
            self.scenarios,
201
            self.variables,
202
            scenario=scenario,
203
            *args,
204
            **kwargs,
205
        )
206
        behavior = behavior.to(self.parameters["device"])
×
207
        behavior.train()
×
208
        return behavior
×
209

210
    def compute_theoretical_steady_state(
1✔
211
        self, scenario: int | str = 0, *args, **kwargs
212
    ):
213
        """Compute the theoretical steady state of the model.
214

215
        This process generally follows the structure of the forward() function,
216
        but instead of simulating the model, the steady state is computed at
217
        each timestep. Therefore, (1) the model is initialized, and (2) for
218
        each timestep the parameter and scenario information is passed to the
219
        compute_theoretical_steady_state_per_step() function that computes the
220
        steady state at that timestep.
221

222
        Parameters
223
        ----------
224
        scenario: int (optional)
225
            The scenario to use for the model run, defaults to 0, which
226
            represents the default scenario (no shocks).
227
        """
228
        if isinstance(scenario, str):
1✔
229
            scenario = self.scenarios.get_scenario_index(scenario)
1✔
230

231
        logging.info(f"Computing theoretical steady state. Scenario: {scenario}")
1✔
232
        behavior = self.behavior(
1✔
233
            self.parameters,
234
            self.scenarios,
235
            self.variables,
236
            scenario=scenario,
237
            *args,
238
            **kwargs,
239
        )
240
        with torch.no_grad():
1✔
241
            return behavior.compute_theoretical_steady_state(*args, **kwargs)
1✔
242

243
    def to_json(self, file_path: os.PathLike, *args, **kwargs):
1✔
244
        """Convert the model to a JSON file split into parameters, scenarios,
245
        and variables.
246

247
        Parameters
248
        ----------
249
        file_path: os.PathLike
250
            The path to the file to save the model to.
251
        """
252
        self.parameters.to_json(f"{file_path}_params.json")
1✔
253
        self.scenarios.to_json(f"{file_path}_scenarios.json")
1✔
254
        self.variables.to_json(f"{file_path}_variables.json")
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc