• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

blue-marble / gridpath / 17810926279

17 Sep 2025 09:17PM UTC coverage: 88.943% (-0.02%) from 88.959%
17810926279

Pull #1289

github

web-flow
Merge 48fa61368 into bdb0a2ac5
Pull Request #1289: Maintenance dependency upgrades

10 of 16 new or added lines in 7 files covered. (62.5%)

141 existing lines in 44 files now uncovered.

27349 of 30749 relevant lines covered (88.94%)

0.89 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.06
/data_toolkit/system/create_sync_load_input_csvs.py
1
# Copyright 2016-2025 Blue Marble Analytics LLC.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License");
4
# you may not use this file except in compliance with the License.
5
# You may obtain a copy of the License at
6
#
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
9
# Unless required by applicable law or agreed to in writing, software
10
# distributed under the License is distributed on an "AS IS" BASIS,
11
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
# See the License for the specific language governing permissions and
13
# limitations under the License.
14

15
"""
16
Sync Loads
17
**********
18

19
Create GridPath sync load profile inputs.
20

21
=====
22
Usage
23
=====
24

25
>>> gridpath_run_data_toolkit --single_step create_sync_load_input_csvs --settings_csv PATH/TO/SETTINGS/CSV
26

27
===================
28
Input prerequisites
29
===================
30

31
This module assumes the following raw input database tables have been populated:
32
    * raw_data_system_load
33
    * user_defined_load_zone_units
34

35
=========
36
Settings
37
=========
38
    * database
39
    * output_directory
40
    * load_scenario_id
41
    * load_scenario_name
42
    * overwrite
43

44
"""
45

46
import sys
1✔
47
from argparse import ArgumentParser
1✔
48
import os.path
1✔
49
import pandas as pd
1✔
50

51
from data_toolkit.system.common_methods import (
1✔
52
    create_load_scenario_csv,
53
    create_load_components_scenario_csv,
54
)
55
from db.common_functions import connect_to_database
1✔
56

57
LOAD_SCENARIO_ID_DEFAULT = 1  # it's 6 in the test examples
1✔
58
LOAD_SCENARIO_NAME_DEFAULT = "ra_toolkit"
1✔
59
LOAD_COMPONENTS_SCENARIO_ID_DEFAULT = 1  # it's 6 in the test examples
1✔
60
LOAD_COMPONENTS_SCENARIO_NAME_DEFAULT = "ra_toolkit"
1✔
61
LOAD_LEVELS_SCENARIO_ID_DEFAULT = 1  # it's 6 in the test examples
1✔
62
LOAD_LEVELS_SCENARIO_NAME_DEFAULT = "ra_toolkit"
1✔
63
STAGE_ID_DEFAULT = 1
1✔
64
LOAD_COMPONENT_NAME_DEFAULT = "all"
1✔
65

66

67
def parse_arguments(args):
1✔
68
    """
69
    :param args: the script arguments specified by the user
70
    :return: the parsed known argument values (<class 'argparse.Namespace'>
71
    Python object)
72

73
    Parse the known arguments.
74
    """
75
    parser = ArgumentParser(add_help=True)
1✔
76

77
    parser.add_argument("-db", "--database")
1✔
78

79
    parser.add_argument(
1✔
80
        "-out_dir",
81
        "--output_directory",
82
        help="""This will be the location of the load_scenario_id file. The 
83
        load components and load levels files are assumed to be in the 
84
        'load_components' and 'load_levels' subdirectories respectively.""",
85
    )
86
    parser.add_argument(
1✔
87
        "-id",
88
        "--load_scenario_id",
89
        default=LOAD_SCENARIO_ID_DEFAULT,
90
        help=f"Defaults to {LOAD_SCENARIO_ID_DEFAULT}.",
91
    )
92
    parser.add_argument(
1✔
93
        "-name",
94
        "--load_scenario_name",
95
        default=LOAD_SCENARIO_NAME_DEFAULT,
96
        help=f"Defaults to '{LOAD_SCENARIO_NAME_DEFAULT}'.",
97
    )
98
    parser.add_argument(
1✔
99
        "-lc_id",
100
        "--load_components_scenario_id",
101
        default=LOAD_COMPONENTS_SCENARIO_ID_DEFAULT,
102
        help=f"Defaults to {LOAD_COMPONENTS_SCENARIO_ID_DEFAULT}.",
103
    )
104
    parser.add_argument(
1✔
105
        "-lc_name",
106
        "--load_components_scenario_name",
107
        default=LOAD_COMPONENTS_SCENARIO_NAME_DEFAULT,
108
        help=f"Defaults to '{LOAD_COMPONENTS_SCENARIO_NAME_DEFAULT}'.",
109
    )
110
    parser.add_argument(
1✔
111
        "-ll_id",
112
        "--load_levels_scenario_id",
113
        default=LOAD_LEVELS_SCENARIO_ID_DEFAULT,
114
        help=f"Defaults to {LOAD_LEVELS_SCENARIO_ID_DEFAULT}.",
115
    )
116
    parser.add_argument(
1✔
117
        "-ll_name",
118
        "--load_levels_scenario_name",
119
        default=LOAD_LEVELS_SCENARIO_NAME_DEFAULT,
120
        help=f"Defaults to '{LOAD_LEVELS_SCENARIO_NAME_DEFAULT}'.",
121
    )
122

123
    parser.add_argument(
1✔
124
        "-stage",
125
        "--stage_id",
126
        default=STAGE_ID_DEFAULT,
127
        help=f"Defaults to '{STAGE_ID_DEFAULT}",
128
    )
129

130
    parser.add_argument(
1✔
131
        "-comp",
132
        "--load_component",
133
        default=LOAD_COMPONENT_NAME_DEFAULT,
134
        help=f"Defaults to '{LOAD_COMPONENT_NAME_DEFAULT}",
135
    )
136

137
    parser.add_argument(
1✔
138
        "-l_o",
139
        "--load_scenario_overwrite",
140
        default=False,
141
        action="store_true",
142
        help="Overwrite existing CSV files.",
143
    )
144
    parser.add_argument(
1✔
145
        "-lc_o",
146
        "--load_components_overwrite",
147
        default=False,
148
        action="store_true",
149
        help="Overwrite existing CSV files.",
150
    )
151
    parser.add_argument(
1✔
152
        "-ll_o",
153
        "--load_levels_overwrite",
154
        default=False,
155
        action="store_true",
156
        help="Overwrite existing CSV files.",
157
    )
158

159
    parser.add_argument(
1✔
160
        "-skip_l",
161
        "--skip_load_scenario",
162
        default=False,
163
        action="store_true",
164
        help="Don't create load_scenario file.",
165
    )
166
    parser.add_argument(
1✔
167
        "-skip_lc",
168
        "--skip_load_components",
169
        default=False,
170
        action="store_true",
171
        help="Don't create load components file.",
172
    )
173
    parser.add_argument(
1✔
174
        "-skip_ll",
175
        "--skip_load_levels",
176
        default=False,
177
        action="store_true",
178
        help="Don't create load levels file.",
179
    )
180

181
    parser.add_argument("-q", "--quiet", default=False, action="store_true")
1✔
182

183
    parsed_arguments = parser.parse_known_args(args=args)[0]
1✔
184

185
    return parsed_arguments
1✔
186

187

188
def create_load_levels_csv(
1✔
189
    conn,
190
    output_directory,
191
    load_levels_scenario_id,
192
    load_levels_scenario_name,
193
    stage_id,
194
    load_component_name,
195
    overwrite,
196
):
197
    """
198
    This module currently assumes timepoint IDs will be 1 through 8760 for
199
    each year. The query will aggregate loads based on the aggregations and
200
    weights defined in the user_defined_load_zone_units
201
    table.
202
    """
203

204
    query = f"""
1✔
205
        SELECT load_zone, year AS weather_iteration, {stage_id} as stage_id, 
206
        hour_of_year as timepoint, 
207
        '{load_component_name}' AS load_component, sum(weighted_load_mw) as 
208
        load_mw
209
        FROM (
210
        SELECT year, month, day_of_month, hour_of_day, load_zone_unit, load_zone, unit_weight, load_mw, unit_weight * load_mw as weighted_load_mw,
211
            (CAST(
212
                strftime('%j',
213
                    year || '-' || 
214
                    CASE
215
                    WHEN month > 9 THEN month
216
                    ELSE '0' || month END
217
                    || '-' || 
218
                    CASE
219
                    WHEN day_of_month > 9 THEN day_of_month
220
                    ELSE '0' || day_of_month END
221
                    ) AS DECIMAL
222
                ) - 1) * 24 + hour_of_day AS hour_of_year
223
        FROM raw_data_system_load
224
        JOIN user_defined_load_zone_units
225
        USING (load_zone_unit)
226
        )
227
    GROUP BY load_zone, year, hour_of_year
228
    """
229

230
    # Put into a dataframe and add to file
231
    df = pd.read_sql(query, con=conn)
1✔
232

233
    filename = os.path.join(
1✔
234
        output_directory,
235
        "load_levels",
236
        f"{load_levels_scenario_id}_{load_levels_scenario_name}.csv",
237
    )
238
    if overwrite:
1✔
239
        mode = "w"
1✔
240
        write_header = True
1✔
241
    else:
242
        mode = "a"
×
243
        write_header = not os.path.exists(filename)
×
244

245
    df.to_csv(
1✔
246
        filename,
247
        mode=mode,
248
        header=write_header,
249
        index=False,
250
    )
251

252

253
def main(args=None):
1✔
254
    if args is None:
1✔
255
        args = sys.argv[1:]
×
256

257
    parsed_args = parse_arguments(args=args)
1✔
258

259
    if not parsed_args.quiet:
1✔
260
        print("Creating sync load profile CSVs...")
×
261

262
    os.makedirs(parsed_args.output_directory, exist_ok=True)
1✔
263
    os.makedirs(
1✔
264
        os.path.join(parsed_args.output_directory, "load_components"), exist_ok=True
265
    )
266
    os.makedirs(
1✔
267
        os.path.join(parsed_args.output_directory, "load_levels"), exist_ok=True
268
    )
269

270
    conn = connect_to_database(db_path=parsed_args.database)
1✔
271

272
    if not parsed_args.skip_load_scenario:
1✔
273
        create_load_scenario_csv(
1✔
274
            output_directory=parsed_args.output_directory,
275
            load_scenario_id=parsed_args.load_scenario_id,
276
            load_scenario_name=parsed_args.load_scenario_name,
277
            load_components_scenario_id=parsed_args.load_components_scenario_id,
278
            load_levels_scenario_id=parsed_args.load_levels_scenario_id,
279
            overwrite_load_scenario_csv=parsed_args.load_scenario_overwrite,
280
        )
281

282
    if not parsed_args.skip_load_components:
1✔
283
        create_load_components_scenario_csv(
1✔
284
            conn=conn,
285
            output_directory=parsed_args.output_directory,
286
            load_component_name=parsed_args.load_component,
287
            load_components_scenario_id=parsed_args.load_components_scenario_id,
288
            load_components_scenario_name=parsed_args.load_components_scenario_name,
289
            overwrite_load_components_csv=parsed_args.load_components_overwrite,
290
        )
291

292
    if not parsed_args.skip_load_levels:
1✔
293
        create_load_levels_csv(
1✔
294
            conn=conn,
295
            output_directory=parsed_args.output_directory,
296
            load_levels_scenario_id=parsed_args.load_levels_scenario_id,
297
            load_levels_scenario_name=parsed_args.load_levels_scenario_name,
298
            stage_id=parsed_args.stage_id,
299
            load_component_name=parsed_args.load_component,
300
            overwrite=parsed_args.load_levels_overwrite,
301
        )
302

303

304
if __name__ == "__main__":
1✔
UNCOV
305
    main()
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc