• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

blue-marble / gridpath / 24158835669

08 Apr 2026 09:08PM UTC coverage: 88.927% (-0.03%) from 88.956%
24158835669

Pull #1351

github

web-flow
Merge a41ad8303 into ad380fcdb
Pull Request #1351: RA Toolkit updates

110 of 132 new or added lines in 12 files covered. (83.33%)

69 existing lines in 9 files now uncovered.

27497 of 30921 relevant lines covered (88.93%)

0.89 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.19
/data_toolkit/system/create_sync_load_input_csvs.py
1
# Copyright 2016-2025 Blue Marble Analytics LLC.
2
#
3
# Licensed under the Apache License, Version 2.0 (the "License");
4
# you may not use this file except in compliance with the License.
5
# You may obtain a copy of the License at
6
#
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
9
# Unless required by applicable law or agreed to in writing, software
10
# distributed under the License is distributed on an "AS IS" BASIS,
11
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
# See the License for the specific language governing permissions and
13
# limitations under the License.
14

15
"""
16
Sync Loads
17
**********
18

19
Create GridPath sync load profile inputs.
20

21
=====
22
Usage
23
=====
24

25
>>> gridpath_run_data_toolkit --single_step create_sync_load_input_csvs --settings_csv PATH/TO/SETTINGS/CSV
26

27
===================
28
Input prerequisites
29
===================
30

31
This module assumes the following raw input database tables have been populated:
32
    * raw_data_profiles
33
    * user_defined_load_zone_units
34

35
=========
36
Settings
37
=========
38
    * database
39
    * output_directory
40
    * load_scenario_id
41
    * load_scenario_name
42
    * overwrite
43

44
"""
45

46
import sys
1✔
47
from argparse import ArgumentParser
1✔
48
import os.path
1✔
49
import pandas as pd
1✔
50

51
from data_toolkit.system.common_methods import (
1✔
52
    create_load_scenario_csv,
53
    create_load_components_scenario_csv,
54
)
55
from db.common_functions import connect_to_database
1✔
56

57
LOAD_SCENARIO_ID_DEFAULT = 1  # it's 6 in the test examples
1✔
58
LOAD_SCENARIO_NAME_DEFAULT = "ra_toolkit"
1✔
59
LOAD_COMPONENTS_SCENARIO_ID_DEFAULT = 1  # it's 6 in the test examples
1✔
60
LOAD_COMPONENTS_SCENARIO_NAME_DEFAULT = "ra_toolkit"
1✔
61
LOAD_LEVELS_SCENARIO_ID_DEFAULT = 1  # it's 6 in the test examples
1✔
62
LOAD_LEVELS_SCENARIO_NAME_DEFAULT = "ra_toolkit"
1✔
63
STAGE_ID_DEFAULT = 1
1✔
64
LOAD_COMPONENT_NAME_DEFAULT = "all"
1✔
65

66

67
def parse_arguments(args):
1✔
68
    """
69
    :param args: the script arguments specified by the user
70
    :return: the parsed known argument values (<class 'argparse.Namespace'>
71
    Python object)
72

73
    Parse the known arguments.
74
    """
75
    parser = ArgumentParser(add_help=True)
1✔
76

77
    parser.add_argument("-db", "--database")
1✔
78

79
    parser.add_argument(
1✔
80
        "-out_dir",
81
        "--output_directory",
82
        help="""This will be the location of the load_scenario_id file. The 
83
        load components and load levels files are assumed to be in the 
84
        'load_components' and 'load_levels' subdirectories respectively.""",
85
    )
86
    parser.add_argument(
1✔
87
        "-id",
88
        "--load_scenario_id",
89
        default=LOAD_SCENARIO_ID_DEFAULT,
90
        help=f"Defaults to {LOAD_SCENARIO_ID_DEFAULT}.",
91
    )
92
    parser.add_argument(
1✔
93
        "-name",
94
        "--load_scenario_name",
95
        default=LOAD_SCENARIO_NAME_DEFAULT,
96
        help=f"Defaults to '{LOAD_SCENARIO_NAME_DEFAULT}'.",
97
    )
98
    parser.add_argument(
1✔
99
        "-lc_id",
100
        "--load_components_scenario_id",
101
        default=LOAD_COMPONENTS_SCENARIO_ID_DEFAULT,
102
        help=f"Defaults to {LOAD_COMPONENTS_SCENARIO_ID_DEFAULT}.",
103
    )
104
    parser.add_argument(
1✔
105
        "-lc_name",
106
        "--load_components_scenario_name",
107
        default=LOAD_COMPONENTS_SCENARIO_NAME_DEFAULT,
108
        help=f"Defaults to '{LOAD_COMPONENTS_SCENARIO_NAME_DEFAULT}'.",
109
    )
110
    parser.add_argument(
1✔
111
        "-ll_id",
112
        "--load_levels_scenario_id",
113
        default=LOAD_LEVELS_SCENARIO_ID_DEFAULT,
114
        help=f"Defaults to {LOAD_LEVELS_SCENARIO_ID_DEFAULT}.",
115
    )
116
    parser.add_argument(
1✔
117
        "-ll_name",
118
        "--load_levels_scenario_name",
119
        default=LOAD_LEVELS_SCENARIO_NAME_DEFAULT,
120
        help=f"Defaults to '{LOAD_LEVELS_SCENARIO_NAME_DEFAULT}'.",
121
    )
122

123
    parser.add_argument(
1✔
124
        "-stage",
125
        "--stage_id",
126
        default=STAGE_ID_DEFAULT,
127
        help=f"Defaults to '{STAGE_ID_DEFAULT}",
128
    )
129

130
    parser.add_argument(
1✔
131
        "-comp",
132
        "--load_component",
133
        default=LOAD_COMPONENT_NAME_DEFAULT,
134
        help=f"Defaults to '{LOAD_COMPONENT_NAME_DEFAULT}",
135
    )
136

137
    parser.add_argument(
1✔
138
        "-l_o",
139
        "--load_scenario_overwrite",
140
        default=False,
141
        action="store_true",
142
        help="Overwrite existing CSV files.",
143
    )
144
    parser.add_argument(
1✔
145
        "-lc_o",
146
        "--load_components_overwrite",
147
        default=False,
148
        action="store_true",
149
        help="Overwrite existing CSV files.",
150
    )
151
    parser.add_argument(
1✔
152
        "-ll_o",
153
        "--load_levels_overwrite",
154
        default=False,
155
        action="store_true",
156
        help="Overwrite existing CSV files.",
157
    )
158

159
    parser.add_argument(
1✔
160
        "-skip_l",
161
        "--skip_load_scenario",
162
        default=False,
163
        action="store_true",
164
        help="Don't create load_scenario file.",
165
    )
166
    parser.add_argument(
1✔
167
        "-skip_lc",
168
        "--skip_load_components",
169
        default=False,
170
        action="store_true",
171
        help="Don't create load components file.",
172
    )
173
    parser.add_argument(
1✔
174
        "-skip_ll",
175
        "--skip_load_levels",
176
        default=False,
177
        action="store_true",
178
        help="Don't create load levels file.",
179
    )
180

181
    parser.add_argument("-q", "--quiet", default=False, action="store_true")
1✔
182

183
    parsed_arguments = parser.parse_known_args(args=args)[0]
1✔
184

185
    return parsed_arguments
1✔
186

187

188
def create_load_levels_csv(
1✔
189
    conn,
190
    output_directory,
191
    load_levels_scenario_id,
192
    load_levels_scenario_name,
193
    stage_id,
194
    load_component_name,
195
    overwrite,
196
):
197
    """
198
    This module currently assumes timepoint IDs will be 1 through 8760 for
199
    each year. The query will aggregate loads based on the aggregations and
200
    weights defined in the user_defined_load_zone_units
201
    table.
202
    """
203

204
    query = f"""
1✔
205
        SELECT load_zone, year AS weather_iteration, {stage_id} as stage_id, 
206
        hour_of_year as timepoint, 
207
        '{load_component_name}' AS load_component, sum(weighted_load_mw) as 
208
        load_mw
209
        FROM (
210
        SELECT year, month, day_of_month, hour_of_day, unit, load_zone, 
211
        unit_weight, value, unit_weight * value as weighted_load_mw,
212
            (CAST(
213
                strftime('%j',
214
                    year || '-' || 
215
                    CASE
216
                    WHEN month > 9 THEN month
217
                    ELSE '0' || month END
218
                    || '-' || 
219
                    CASE
220
                    WHEN day_of_month > 9 THEN day_of_month
221
                    ELSE '0' || day_of_month END
222
                    ) AS DECIMAL
223
                ) - 1) * 24 + hour_of_day AS hour_of_year
224
        FROM raw_data_profiles
225
        JOIN user_defined_load_zone_units
226
        USING (unit)
227
        )
228
    GROUP BY load_zone, year, hour_of_year
229
    """
230

231
    # Put into a dataframe and add to file
232
    df = pd.read_sql(query, con=conn)
1✔
233

234
    filename = os.path.join(
1✔
235
        output_directory,
236
        "load_levels",
237
        f"{load_levels_scenario_id}_{load_levels_scenario_name}.csv",
238
    )
239
    if overwrite:
1✔
240
        mode = "w"
1✔
241
        write_header = True
1✔
242
    else:
243
        mode = "a"
×
UNCOV
244
        write_header = not os.path.exists(filename)
×
245

246
    df.to_csv(
1✔
247
        filename,
248
        mode=mode,
249
        header=write_header,
250
        index=False,
251
    )
252

253

254
def main(args=None):
1✔
255
    if args is None:
1✔
UNCOV
256
        args = sys.argv[1:]
×
257

258
    parsed_args = parse_arguments(args=args)
1✔
259

260
    if not parsed_args.quiet:
1✔
261
        print("Creating sync load profile CSVs...")
×
262

263
    os.makedirs(parsed_args.output_directory, exist_ok=True)
1✔
264
    os.makedirs(
1✔
265
        os.path.join(parsed_args.output_directory, "load_components"), exist_ok=True
266
    )
267
    os.makedirs(
1✔
268
        os.path.join(parsed_args.output_directory, "load_levels"), exist_ok=True
269
    )
270

271
    conn = connect_to_database(db_path=parsed_args.database)
1✔
272

273
    if not parsed_args.skip_load_scenario:
1✔
274
        create_load_scenario_csv(
1✔
275
            output_directory=parsed_args.output_directory,
276
            load_scenario_id=parsed_args.load_scenario_id,
277
            load_scenario_name=parsed_args.load_scenario_name,
278
            load_components_scenario_id=parsed_args.load_components_scenario_id,
279
            load_levels_scenario_id=parsed_args.load_levels_scenario_id,
280
            overwrite_load_scenario_csv=parsed_args.load_scenario_overwrite,
281
        )
282

283
    if not parsed_args.skip_load_components:
1✔
284
        create_load_components_scenario_csv(
1✔
285
            conn=conn,
286
            output_directory=parsed_args.output_directory,
287
            load_component_name=parsed_args.load_component,
288
            load_components_scenario_id=parsed_args.load_components_scenario_id,
289
            load_components_scenario_name=parsed_args.load_components_scenario_name,
290
            overwrite_load_components_csv=parsed_args.load_components_overwrite,
291
        )
292

293
    if not parsed_args.skip_load_levels:
1✔
294
        create_load_levels_csv(
1✔
295
            conn=conn,
296
            output_directory=parsed_args.output_directory,
297
            load_levels_scenario_id=parsed_args.load_levels_scenario_id,
298
            load_levels_scenario_name=parsed_args.load_levels_scenario_name,
299
            stage_id=parsed_args.stage_id,
300
            load_component_name=parsed_args.load_component,
301
            overwrite=parsed_args.load_levels_overwrite,
302
        )
303

304
    conn.close()
1✔
305

306

307
if __name__ == "__main__":
1✔
UNCOV
308
    main()
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc