• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

OGGM / oggm / 17036427554

18 Aug 2025 09:19AM UTC coverage: 84.387% (-0.03%) from 84.412%
17036427554

Pull #1795

github

web-flow
Merge f58c0836a into 0793d4813
Pull Request #1795: add new observation handling

208 of 217 new or added lines in 7 files covered. (95.85%)

28 existing lines in 2 files now uncovered.

12404 of 14699 relevant lines covered (84.39%)

3.78 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

73.36
/oggm/cli/prepro_levels.py
1
"""Command line arguments to the oggm_prepro command
2

3
Type `$ oggm_prepro -h` for help
4

5
"""
6

7
# External modules
8
import os
1✔
9
import sys
1✔
10
import shutil
1✔
11
import argparse
1✔
12
import time
1✔
13
import logging
1✔
14
import json
1✔
15
import pandas as pd
1✔
16
import numpy as np
1✔
17
import geopandas as gpd
1✔
18

19
# Locals
20
import oggm.cfg as cfg
1✔
21
from oggm import utils, workflow, tasks, GlacierDirectory
1✔
22
from oggm.core import gis
1✔
23
from oggm.exceptions import InvalidParamsError, InvalidDEMError
1✔
24

25
# Module logger
26
from oggm.utils import get_prepro_base_url, file_downloader
1✔
27

28
log = logging.getLogger(__name__)
1✔
29

30

31
@utils.entity_task(log)
1✔
32
def _rename_dem_folder(gdir, source=''):
1✔
33
    """Put the DEM files in a subfolder of the gdir.
34

35
    Parameters
36
    ----------
37
    gdir : GlacierDirectory
38
    source : str
39
        the DEM source
40
    """
41

42
    # open tif-file to check if it's worth it
43
    dem_f = gdir.get_filepath('dem')
×
44
    try:
×
45
        dem = gis.read_geotiff_dem(gdir)
×
46
    except IOError:
×
47
        # Error reading file, no problem - still, delete the file if needed
48
        if os.path.exists(dem_f):
×
49
            os.remove(dem_f)
×
50
        gdir.log('{},DEM SOURCE,{}'.format(gdir.rgi_id, source),
×
51
                 err=InvalidDEMError('File does not exist'))
52
        return
×
53

54
    # Check the DEM
55
    isfinite = np.isfinite(dem)
×
56
    if np.all(~isfinite) or (np.min(dem) == np.max(dem)):
×
57
        # Remove the file and return
58
        if os.path.exists(dem_f):
×
59
            os.remove(dem_f)
×
60
        gdir.log('{},DEM SOURCE,{}'.format(gdir.rgi_id, source),
×
61
                 err=InvalidDEMError('DEM does not contain more than one '
62
                                     'valid values.'))
63
        return
×
64

65
    # Create a source dir and move the files
66
    out = os.path.join(gdir.dir, source)
×
67
    utils.mkdir(out)
×
68
    for fname in ['dem', 'dem_source']:
×
69
        f = gdir.get_filepath(fname)
×
70
        os.rename(f, os.path.join(out, os.path.basename(f)))
×
71

72
    # log SUCCESS for this DEM source
73
    gdir.log('{},DEM SOURCE,{}'.format(gdir.rgi_id, source))
×
74

75

76
def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
1✔
77
                      output_folder='', working_dir='', dem_source='',
78
                      is_test=False, test_ids=None, demo=False, test_rgidf=None,
79
                      test_intersects_file=None, test_topofile=None,
80
                      disable_mp=False, params_file=None,
81
                      elev_bands=False, centerlines=False,
82
                      override_params=None, skip_inversion=False,
83
                      mb_calibration_strategy='informed_threestep',
84
                      select_source_from_dir=None, keep_dem_folders=False,
85
                      add_consensus_thickness=False, add_itslive_velocity=False,
86
                      add_millan_thickness=False, add_millan_velocity=False,
87
                      add_hugonnet_dhdt=False, add_bedmachine=False,
88
                      add_glathida=False,
89
                      start_level=None, start_base_url=None, max_level=5,
90
                      logging_level='WORKFLOW',
91
                      dynamic_spinup=False, ref_mb_err_scaling_factor=0.2,
92
                      dynamic_spinup_start_year=1979,
93
                      continue_on_error=True, store_fl_diagnostics=False):
94
    """Generate the preprocessed OGGM glacier directories for this OGGM version
95

96
    Parameters
97
    ----------
98
    rgi_version : str
99
        the RGI version to use (defaults to cfg.PARAMS)
100
    rgi_reg : str
101
        the RGI region to process
102
    border : int
103
        the number of pixels at the maps border
104
    output_folder : str
105
        path to the output folder (where to put the preprocessed tar files)
106
    dem_source : str
107
        which DEM source to use: default, SOURCE_NAME, STANDARD or ALL
108
        ALL is to generate RGITOPO
109
        "STANDARD" is doina small RGITOPO using COPDEM + NASADEM
110
        default is the current default lookup tables found at
111
        https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/rgitopo/2025.4/
112
    working_dir : str
113
        path to the OGGM working directory
114
    params_file : str
115
        path to the OGGM parameter file (to override defaults)
116
    is_test : bool
117
        to test on a couple of glaciers only!
118
    test_ids : list
119
        if is_test: list of ids to process
120
    demo : bool
121
        to run the prepro for the list of demo glaciers
122
    test_rgidf : shapefile
123
        for testing purposes only
124
    test_intersects_file : shapefile
125
        for testing purposes only
126
    test_topofile : str
127
        for testing purposes only
128
    test_crudir : str
129
        for testing purposes only
130
    disable_mp : bool
131
        disable multiprocessing
132
    elev_bands : bool
133
        compute all flowlines based on the Huss & Farinotti 2012 method.
134
    centerlines : bool
135
        compute all flowlines based on the OGGM centerline(s) method.
136
    mb_calibration_strategy : str
137
        how to calibrate the massbalance. Currently one of:
138
        - 'informed_threestep' (default)
139
        - 'melt_temp'
140
        - 'temp_melt'
141
        Add the `_regional` suffix to use regional values instead,
142
        for example `informed_threestep_regional`
143
    select_source_from_dir : str
144
        if starting from a level 1 "ALL" or "STANDARD" DEM sources directory,
145
        select the chosen DEM source here. If you set it to "BY_RES" here,
146
        COPDEM will be used and its resolution chosen based on the gdir's
147
        map resolution (COPDEM30 for dx < 60 m, COPDEM90 elsewhere).
148
    keep_dem_folders : bool
149
        if `select_source_from_dir` is used, wether to keep the original
150
        DEM folders in or not.
151
    add_consensus_thickness : bool
152
        adds (reprojects) the consensus estimates thickness to the glacier
153
        directories. With elev_bands=True, the data will also be binned.
154
    add_itslive_velocity : bool
155
        adds (reprojects) the ITS_LIVE velocity to the glacier
156
        directories. With elev_bands=True, the data will also be binned.
157
    add_millan_thickness : bool
158
        adds (reprojects) the millan thickness to the glacier
159
        directories. With elev_bands=True, the data will also be binned.
160
    add_millan_velocity : bool
161
        adds (reprojects) the millan velocity to the glacier
162
        directories. With elev_bands=True, the data will also be binned.
163
    add_hugonnet_dhdt : bool
164
        adds (reprojects) the hugonnet dhdt maps to the glacier
165
        directories. With elev_bands=True, the data will also be binned.
166
    add_bedmachine : bool
167
        adds (reprojects) the bedmachine ice thickness maps to the glacier
168
        directories. With elev_bands=True, the data will also be binned.
169
    add_glathida : bool
170
        adds (reprojects) the glathida thickness data to the glacier
171
        directories. Data points are stored as csv files.
172
    start_level : int
173
        the pre-processed level to start from (default is to start from
174
        scratch). If set, you'll need to indicate start_base_url as well.
175
    start_base_url : str
176
        the pre-processed base-url to fetch the data from.
177
    max_level : int
178
        the maximum pre-processing level before stopping
179
    skip_inversion : bool
180
         do not run the inversion (level 3 files). This is a temporary
181
         workaround for workflows that wont run that far into level 3.
182
    logging_level : str
183
        the logging level to use (DEBUG, INFO, WARNING, WORKFLOW)
184
    override_params : dict
185
        a dict of parameters to override.
186
    dynamic_spinup : str
187
        include a dynamic spinup matching 'area/dmdtda' OR 'volume/dmdtda' at
188
        the RGI-date
189
    ref_mb_err_scaling_factor : float
190
        scaling factor to reduce individual geodetic mass balance uncertainty
191
    dynamic_spinup_start_year : int
192
        if dynamic_spinup is set, define the starting year for the simulation.
193
        The default is 1979, unless the climate data starts later.
194
    continue_on_error : bool
195
        if True the workflow continues if a task raises an error. For operational
196
        runs it should be set to True (the default).
197
    store_fl_diagnostics : bool
198
        if True, also compute and store flowline diagnostics during preprocessing.
199
        This can increase data usage quite a bit.
200
    """
201

202
    # Input check
203
    if max_level not in [1, 2, 3, 4, 5]:
1✔
204
        raise InvalidParamsError('max_level should be one of [1, 2, 3, 4, 5]')
×
205

206
    if start_level is not None:
1✔
207
        if start_level not in [0, 1, 2, 3, 4]:
1✔
208
            raise InvalidParamsError('start_level should be one of [0, 1, 2, 3, 4]')
×
209
        if start_level > 0 and start_base_url is None:
1✔
210
            raise InvalidParamsError('With start_level, please also indicate '
1✔
211
                                     'start_base_url')
212
    else:
213
        start_level = 0
1✔
214

215
    if dynamic_spinup:
1✔
216
        if dynamic_spinup not in ['area/dmdtda', 'volume/dmdtda']:
1✔
217
            raise InvalidParamsError(f"Dynamic spinup option '{dynamic_spinup}' "
×
218
                                     "not supported")
219

220
    # Time
221
    start = time.time()
1✔
222

223
    def _time_log():
1✔
224
        # Log util
225
        m, s = divmod(time.time() - start, 60)
1✔
226
        h, m = divmod(m, 60)
1✔
227
        log.workflow('OGGM prepro_levels is done! Time needed: '
1✔
228
                     '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))
229

230
    # Local paths
231
    if override_params is None:
1✔
232
        override_params = {}
1✔
233

234
    # Use multiprocessing?
235
    override_params['use_multiprocessing'] = not disable_mp
1✔
236

237
    # How many grid points around the glacier?
238
    # Make it large if you expect your glaciers to grow large
239
    override_params['border'] = border
1✔
240

241
    # Some arbitrary heuristics on the length of tidewater extension
242
    extension = int(utils.clip_min(border / 2, 30))
1✔
243
    override_params['calving_line_extension'] = extension
1✔
244

245
    # Set to True for operational runs
246
    override_params['continue_on_error'] = continue_on_error
1✔
247

248
    # Do not use bias file if user wants melt_temp only
249
    if mb_calibration_strategy in ['melt_temp', 'temp_melt']:
1✔
250
        override_params['use_temp_bias_from_file'] = False
1✔
251

252
    # For centerlines we have to change the default evolution model and bed
253
    if centerlines:
1✔
254
        override_params['downstream_line_shape'] = 'parabola'
1✔
255
        override_params['evolution_model'] = 'FluxBased'
1✔
256

257
    # Other things that make sense
258
    override_params['store_model_geometry'] = True
1✔
259
    override_params['store_fl_diagnostics'] = store_fl_diagnostics
1✔
260

261
    utils.mkdir(working_dir)
1✔
262
    override_params['working_dir'] = working_dir
1✔
263

264
    # Initialize OGGM and set up the run parameters
265
    cfg.initialize(file=params_file, params=override_params,
1✔
266
                   logging_level=logging_level)
267

268
    # Prepare the download of climate file to be shared across processes
269
    # TODO
270

271
    # Log the parameters
272
    msg = '# OGGM Run parameters:'
1✔
273
    for k, v in cfg.PARAMS.items():
1✔
274
        if type(v) in [pd.DataFrame, dict]:
1✔
275
            continue
1✔
276
        msg += '\n    {}: {}'.format(k, v)
1✔
277
    log.workflow(msg)
1✔
278

279
    if rgi_version is None:
1✔
280
        rgi_version = cfg.PARAMS['rgi_version']
×
281
    output_base_dir = os.path.join(output_folder,
1✔
282
                                   'RGI{}'.format(rgi_version),
283
                                   'b_{:03d}'.format(border))
284

285
    # Add a package version file
286
    utils.mkdir(output_base_dir)
1✔
287
    opath = os.path.join(output_base_dir, 'package_versions.txt')
1✔
288
    with open(opath, 'w') as vfile:
1✔
289
        vfile.write(utils.show_versions(logger=log))
1✔
290

291
    if demo:
1✔
292
        rgidf = utils.get_rgi_glacier_entities(cfg.DATA['demo_glaciers'].index)
×
293
    elif test_rgidf is None:
1✔
294

295
        # Get the RGI file
296
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
×
297
                                                        version=rgi_version))
298
        # We use intersects
299
        if rgi_version != '70C':
×
300
            rgif = utils.get_rgi_intersects_region_file(rgi_reg,
×
301
                                                        version=rgi_version)
302
            cfg.set_intersects_db(rgif)
×
303

304
        if rgi_version == '62':
×
305
            # Some RGI input quality checks - this is based on visual checks
306
            # of large glaciers in the RGI
307
            ids_to_ice_cap = [
×
308
                'RGI60-05.10315',  # huge Greenland ice cap
309
                'RGI60-03.01466',  # strange thing next to Devon
310
                'RGI60-09.00918',  # Academy of sciences Ice cap
311
                'RGI60-09.00969',
312
                'RGI60-09.00958',
313
                'RGI60-09.00957',
314
            ]
315
            rgidf.loc[rgidf.RGIId.isin(ids_to_ice_cap), 'Form'] = '1'
×
316

317
            # In AA almost all large ice bodies are actually ice caps
318
            if rgi_reg == '19':
×
319
                rgidf.loc[rgidf.Area > 100, 'Form'] = '1'
×
320

321
            # For greenland we omit connectivity level 2
322
            if rgi_reg == '05':
×
323
                rgidf = rgidf.loc[rgidf['Connect'] != 2]
×
324
    else:
325
        rgidf = test_rgidf
1✔
326
        cfg.set_intersects_db(test_intersects_file)
1✔
327

328
    if is_test:
1✔
329
        if test_ids is not None:
1✔
330
            try:
1✔
331
                rgidf = rgidf.loc[rgidf.RGIId.isin(test_ids)]
1✔
332
            except AttributeError:
×
333
                # RGI7
334
                rgidf = rgidf.loc[rgidf.rgi_id.isin(test_ids)]
×
335
        else:
336
            rgidf = rgidf.sample(4)
1✔
337

338
    if len(rgidf) == 0:
1✔
339
        raise InvalidParamsError('Zero glaciers selected!')
×
340

341
    log.workflow('Starting prepro run for RGI reg: {} '
1✔
342
                 'and border: {}'.format(rgi_reg, border))
343
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))
1✔
344

345
    # Add a new default source
346
    if not dem_source:
1✔
347
        fs_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/rgitopo/2025.4/'
1✔
348
        if rgi_version == '62':
1✔
349
            fs = utils.file_downloader(fs_url + 'chosen_dem_RGI62_20250616.csv')
×
350
            dfs = pd.read_csv(fs, index_col=0)
×
351
            rgidf['dem_source'] = dfs.loc[rgidf['RGIId'], 'dem_source'].values
×
352
        if rgi_version == '70G':
1✔
353
            fs = utils.file_downloader(fs_url + 'chosen_dem_RGI70G_20250616.csv')
×
354
            dfs = pd.read_csv(fs, index_col=0)
×
355
            rgidf['dem_source'] = dfs.loc[rgidf['rgi_id'], 'dem_source'].values
×
356
        if rgi_version == '70C':
1✔
357
            fs = utils.file_downloader(fs_url + 'chosen_dem_RGI70C_20250616.csv')
×
358
            dfs = pd.read_csv(fs, index_col=0)
×
359
            rgidf['dem_source'] = dfs.loc[rgidf['rgi_id'], 'dem_source'].values
×
360

361
    # L0 - go
362
    if start_level == 0:
1✔
363
        gdirs = workflow.init_glacier_directories(rgidf, reset=True, force=True)
1✔
364

365
        # Glacier stats
366
        sum_dir = os.path.join(output_base_dir, 'L0', 'summary')
1✔
367
        utils.mkdir(sum_dir)
1✔
368
        opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
369
        utils.compile_glacier_statistics(gdirs, path=opath)
1✔
370

371
        # L0 OK - compress all in output directory
372
        log.workflow('L0 done. Writing to tar...')
1✔
373
        level_base_dir = os.path.join(output_base_dir, 'L0')
1✔
374
        workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
1✔
375
                                     base_dir=level_base_dir)
376
        utils.base_dir_to_tar(level_base_dir)
1✔
377
        if max_level == 0:
1✔
378
            _time_log()
×
379
            return
×
380
    else:
381
        gdirs = workflow.init_glacier_directories(rgidf, reset=True, force=True,
1✔
382
                                                  from_prepro_level=start_level,
383
                                                  prepro_border=border,
384
                                                  prepro_rgi_version=rgi_version,
385
                                                  prepro_base_url=start_base_url
386
                                                  )
387

388
    # L1 - Add dem files
389
    if start_level == 0:
1✔
390
        if test_topofile:
1✔
391
            cfg.PATHS['dem_file'] = test_topofile
1✔
392

393
        # Which DEM source?
394
        if dem_source.upper() in ['ALL', 'STANDARD']:
1✔
395
            # This is the complex one, just do the job and leave
396

397
            if dem_source.upper() == 'ALL':
1✔
398
                sources = utils.DEM_SOURCES
1✔
399
            if dem_source.upper() == 'STANDARD':
1✔
400
                sources = ['COPDEM30', 'COPDEM90', 'NASADEM']
×
401

402
            log.workflow('Running prepro on several sources')
1✔
403
            for i, s in enumerate(sources):
1✔
404
                rs = i == 0
1✔
405
                log.workflow('Running prepro on sources: {}'.format(s))
1✔
406
                gdirs = workflow.init_glacier_directories(rgidf, reset=rs,
1✔
407
                                                          force=rs)
408
                workflow.execute_entity_task(tasks.define_glacier_region, gdirs,
1✔
409
                                             source=s)
410
                workflow.execute_entity_task(_rename_dem_folder, gdirs, source=s)
1✔
411

412
            # make a GeoTiff mask of the glacier, choose any source
413
            workflow.execute_entity_task(gis.rasterio_glacier_mask,
1✔
414
                                         gdirs, source='ALL')
415

416
            # Glacier stats
417
            sum_dir = os.path.join(output_base_dir, 'L1', 'summary')
1✔
418
            utils.mkdir(sum_dir)
1✔
419
            opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
420
            utils.compile_glacier_statistics(gdirs, path=opath)
1✔
421

422
            # L1 OK - compress all in output directory
423
            log.workflow('L1 done. Writing to tar...')
1✔
424
            level_base_dir = os.path.join(output_base_dir, 'L1')
1✔
425
            workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
1✔
426
                                         base_dir=level_base_dir)
427
            utils.base_dir_to_tar(level_base_dir)
1✔
428

429
            _time_log()
1✔
430
            return
1✔
431

432
        # Force a given source
433
        source = dem_source.upper() if dem_source else None
1✔
434

435
        # L1 - go
436
        workflow.execute_entity_task(tasks.define_glacier_region, gdirs,
1✔
437
                                     source=source)
438

439
        # Glacier stats
440
        sum_dir = os.path.join(output_base_dir, 'L1', 'summary')
1✔
441
        utils.mkdir(sum_dir)
1✔
442
        opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
443
        utils.compile_glacier_statistics(gdirs, path=opath)
1✔
444

445
        # L1 OK - compress all in output directory
446
        log.workflow('L1 done. Writing to tar...')
1✔
447
        level_base_dir = os.path.join(output_base_dir, 'L1')
1✔
448
        workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
1✔
449
                                     base_dir=level_base_dir)
450
        utils.base_dir_to_tar(level_base_dir)
1✔
451
        if max_level == 1:
1✔
452
            _time_log()
×
453
            return
×
454

455
    # L2 - Tasks
456
    if start_level <= 1:
1✔
457
        # Check which glaciers will be processed as what
458
        if elev_bands:
1✔
459
            gdirs_band = gdirs
1✔
460
            gdirs_cent = []
1✔
461
        elif centerlines:
1✔
462
            gdirs_band = []
1✔
463
            gdirs_cent = gdirs
1✔
464
        else:
465
            raise InvalidParamsError('Need to specify if `elev_bands` or '
×
466
                                     '`centerlines` type.')
467

468
        log.workflow('Start flowline processing with: '
1✔
469
                     'N centerline type: {}, '
470
                     'N elev bands type: {}.'
471
                     ''.format(len(gdirs_cent), len(gdirs_band)))
472

473
        # If we are coming from a multi-dem setup, let's select it from there
474
        if select_source_from_dir is not None:
1✔
475
            from oggm.shop.rgitopo import select_dem_from_dir
×
476
            workflow.execute_entity_task(select_dem_from_dir, gdirs_band,
×
477
                                         dem_source=select_source_from_dir,
478
                                         keep_dem_folders=keep_dem_folders)
479
            workflow.execute_entity_task(select_dem_from_dir, gdirs_cent,
×
480
                                         dem_source=select_source_from_dir,
481
                                         keep_dem_folders=keep_dem_folders)
482

483
        # HH2015 method
484
        workflow.execute_entity_task(tasks.simple_glacier_masks, gdirs_band)
1✔
485

486
        # Centerlines OGGM
487
        workflow.execute_entity_task(tasks.glacier_masks, gdirs_cent)
1✔
488

489
        bin_variables = []
1✔
490
        if add_consensus_thickness:
1✔
491
            from oggm.shop.bedtopo import add_consensus_thickness
×
492
            workflow.execute_entity_task(add_consensus_thickness, gdirs)
×
493
            bin_variables.append('consensus_ice_thickness')
×
494
        if add_itslive_velocity:
1✔
495
            from oggm.shop.its_live import itslive_velocity_to_gdir
×
496
            workflow.execute_entity_task(itslive_velocity_to_gdir, gdirs)
×
497
            bin_variables.append('itslive_v')
×
498
        if add_millan_thickness:
1✔
499
            from oggm.shop.millan22 import millan_thickness_to_gdir
×
500
            workflow.execute_entity_task(millan_thickness_to_gdir, gdirs)
×
501
            bin_variables.append('millan_ice_thickness')
×
502
        if add_millan_velocity:
1✔
503
            from oggm.shop.millan22 import millan_velocity_to_gdir
×
504
            workflow.execute_entity_task(millan_velocity_to_gdir, gdirs)
×
505
            bin_variables.append('millan_v')
×
506
        if add_hugonnet_dhdt:
1✔
507
            from oggm.shop.hugonnet_maps import hugonnet_to_gdir
×
508
            workflow.execute_entity_task(hugonnet_to_gdir, gdirs)
×
509
            bin_variables.append('hugonnet_dhdt')
×
510
        if add_bedmachine:
1✔
511
            from oggm.shop.bedmachine import bedmachine_to_gdir
×
512
            workflow.execute_entity_task(bedmachine_to_gdir, gdirs)
×
513
            bin_variables.append('bedmachine_ice_thickness')
×
514
        if add_glathida:
1✔
515
            from oggm.shop.glathida import glathida_to_gdir
×
516
            workflow.execute_entity_task(glathida_to_gdir, gdirs)
×
517
        if rgi_version == '70C':
1✔
518
            # Some additional data for the 70C glaciers
519
            workflow.execute_entity_task(tasks.rgi7g_to_complex, gdirs)
×
520

521
        if bin_variables and gdirs_band:
1✔
522
            workflow.execute_entity_task(tasks.elevation_band_flowline,
×
523
                                         gdirs_band,
524
                                         bin_variables=bin_variables)
525
            workflow.execute_entity_task(tasks.fixed_dx_elevation_band_flowline,
×
526
                                         gdirs_band,
527
                                         bin_variables=bin_variables)
528
        else:
529
            # HH2015 method without it
530
            task_list = [
1✔
531
                tasks.elevation_band_flowline,
532
                tasks.fixed_dx_elevation_band_flowline,
533
            ]
534
            for task in task_list:
1✔
535
                workflow.execute_entity_task(task, gdirs_band)
1✔
536

537
        # Centerlines OGGM
538
        task_list = [
1✔
539
            tasks.compute_centerlines,
540
            tasks.initialize_flowlines,
541
            tasks.catchment_area,
542
            tasks.catchment_intersections,
543
            tasks.catchment_width_geom,
544
            tasks.catchment_width_correction,
545
        ]
546
        for task in task_list:
1✔
547
            workflow.execute_entity_task(task, gdirs_cent)
1✔
548

549
        # Same for all glaciers
550
        if border >= 20:
1✔
551
            task_list = [
1✔
552
                tasks.compute_downstream_line,
553
                tasks.compute_downstream_bedshape,
554
            ]
555
            for task in task_list:
1✔
556
                workflow.execute_entity_task(task, gdirs)
1✔
557
        else:
558
            log.workflow('L2: for map border values < 20, wont compute '
×
559
                         'downstream lines.')
560

561
        # Glacier stats
562
        sum_dir = os.path.join(output_base_dir, 'L2', 'summary')
1✔
563
        utils.mkdir(sum_dir)
1✔
564
        opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
565
        utils.compile_glacier_statistics(gdirs, path=opath)
1✔
566

567
        if add_itslive_velocity:
1✔
568
            from oggm.shop.its_live import compile_itslive_statistics
×
569
            opath = os.path.join(sum_dir, 'itslive_statistics_{}.csv'.format(rgi_reg))
×
570
            compile_itslive_statistics(gdirs, path=opath)
×
571
        if add_millan_thickness or add_millan_velocity:
1✔
572
            from oggm.shop.millan22 import compile_millan_statistics
×
573
            opath = os.path.join(sum_dir, 'millan_statistics_{}.csv'.format(rgi_reg))
×
574
            compile_millan_statistics(gdirs, path=opath)
×
575
        if add_consensus_thickness:
1✔
576
            from oggm.shop.bedtopo import compile_consensus_statistics
×
577
            opath = os.path.join(sum_dir, 'consensus_statistics_{}.csv'.format(rgi_reg))
×
578
            compile_consensus_statistics(gdirs, path=opath)
×
579
        if add_hugonnet_dhdt:
1✔
580
            from oggm.shop.hugonnet_maps import compile_hugonnet_statistics
×
581
            opath = os.path.join(sum_dir, 'hugonnet_statistics_{}.csv'.format(rgi_reg))
×
582
            compile_hugonnet_statistics(gdirs, path=opath)
×
583
        if add_bedmachine:
1✔
584
            from oggm.shop.bedmachine import compile_bedmachine_statistics
×
585
            opath = os.path.join(sum_dir, 'bedmachine_statistics_{}.csv'.format(rgi_reg))
×
586
            compile_bedmachine_statistics(gdirs, path=opath)
×
587
        if add_glathida:
1✔
588
            from oggm.shop.glathida import compile_glathida_statistics
×
589
            opath = os.path.join(sum_dir, 'glathida_statistics_{}.csv'.format(rgi_reg))
×
590
            compile_glathida_statistics(gdirs, path=opath)
×
591

592
        # And for level 2: shapes
593
        if len(gdirs_cent) > 0:
1✔
594
            opath = os.path.join(sum_dir, f'centerlines_{rgi_reg}.shp')
1✔
595
            utils.write_centerlines_to_shape(gdirs_cent, to_tar=True,
1✔
596
                                             path=opath)
597
            opath = os.path.join(sum_dir, f'centerlines_smoothed_{rgi_reg}.shp')
1✔
598
            utils.write_centerlines_to_shape(gdirs_cent, to_tar=True,
1✔
599
                                             ensure_exterior_match=True,
600
                                             simplify_line_before=0.75,
601
                                             corner_cutting=3,
602
                                             path=opath)
603
            opath = os.path.join(sum_dir, f'flowlines_{rgi_reg}.shp')
1✔
604
            utils.write_centerlines_to_shape(gdirs_cent, to_tar=True,
1✔
605
                                             flowlines_output=True,
606
                                             path=opath)
607
            opath = os.path.join(sum_dir, f'geom_widths_{rgi_reg}.shp')
1✔
608
            utils.write_centerlines_to_shape(gdirs_cent, to_tar=True,
1✔
609
                                             geometrical_widths_output=True,
610
                                             path=opath)
611
            opath = os.path.join(sum_dir, f'widths_{rgi_reg}.shp')
1✔
612
            utils.write_centerlines_to_shape(gdirs_cent, to_tar=True,
1✔
613
                                             corrected_widths_output=True,
614
                                             path=opath)
615

616
        # L2 OK - compress all in output directory
617
        log.workflow('L2 done. Writing to tar...')
1✔
618
        level_base_dir = os.path.join(output_base_dir, 'L2')
1✔
619
        workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
1✔
620
                                     base_dir=level_base_dir)
621
        utils.base_dir_to_tar(level_base_dir)
1✔
622
        if max_level == 2:
1✔
623
            _time_log()
×
624
            return
×
625

626
    # L3 - Tasks
627
    if start_level <= 2:
1✔
628
        sum_dir = os.path.join(output_base_dir, 'L3', 'summary')
1✔
629
        utils.mkdir(sum_dir)
1✔
630

631
        # Climate
632
        workflow.execute_entity_task(tasks.process_climate_data, gdirs)
1✔
633

634
        # Small optim to avoid concurrency
635
        utils.get_geodetic_mb_dataframe()
1✔
636
        utils.get_temp_bias_dataframe()
1✔
637

638
        use_regional_avg = False
1✔
639
        if '_regional' in mb_calibration_strategy:
1✔
640
            use_regional_avg = True
×
641
            mb_calibration_strategy = mb_calibration_strategy.replace('_regional', '')
×
642

643
        if mb_calibration_strategy == 'informed_threestep':
1✔
644
            workflow.execute_entity_task(tasks.mb_calibration_from_hugonnet_mb,
1✔
645
                                         gdirs,
646
                                         informed_threestep=True,
647
                                         use_regional_avg=use_regional_avg)
648
        elif mb_calibration_strategy == 'melt_temp':
1✔
649
            workflow.execute_entity_task(tasks.mb_calibration_from_hugonnet_mb,
1✔
650
                                         gdirs,
651
                                         calibrate_param1='melt_f',
652
                                         calibrate_param2='temp_bias',
653
                                         use_regional_avg=use_regional_avg)
654
        elif mb_calibration_strategy == 'temp_melt':
×
NEW
655
            workflow.execute_entity_task(tasks.mb_calibration_from_hugonnet_mb,
×
656
                                         gdirs,
657
                                         calibrate_param1='temp_bias',
658
                                         calibrate_param2='melt_f',
659
                                         use_regional_avg=use_regional_avg)
660
        else:
661
            raise InvalidParamsError('mb_calibration_strategy not understood: '
×
662
                                     f'{mb_calibration_strategy}')
663

664
        if not skip_inversion:
1✔
665
            workflow.execute_entity_task(tasks.apparent_mb_from_any_mb, gdirs)
1✔
666

667
            # Inversion: we match the consensus
668
            filter = border >= 20
1✔
669
            workflow.calibrate_inversion_from_consensus(gdirs,
1✔
670
                                                        apply_fs_on_mismatch=True,
671
                                                        error_on_mismatch=False,
672
                                                        filter_inversion_output=filter)
673

674
            # We get ready for modelling
675
            if border >= 20:
1✔
676
                workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)
1✔
677
            else:
678
                log.workflow('L3: for map border values < 20, wont initialize glaciers '
×
679
                             'for the run.')
680
        # Glacier stats
681
        opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
682
        utils.compile_glacier_statistics(gdirs, path=opath)
1✔
683
        opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
1✔
684
        utils.compile_climate_statistics(gdirs, path=opath)
1✔
685
        opath = os.path.join(sum_dir, 'fixed_geometry_mass_balance_{}.csv'.format(rgi_reg))
1✔
686
        utils.compile_fixed_geometry_mass_balance(gdirs, path=opath)
1✔
687

688
        # L3 OK - compress all in output directory
689
        log.workflow('L3 done. Writing to tar...')
1✔
690
        level_base_dir = os.path.join(output_base_dir, 'L3')
1✔
691
        workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
1✔
692
                                     base_dir=level_base_dir)
693
        utils.base_dir_to_tar(level_base_dir)
1✔
694
        if max_level == 3:
1✔
695
            _time_log()
×
696
            return
×
697
        if border < 20:
1✔
698
            log.workflow('L3: for map border values < 20, wont compute L4 and L5.')
×
699
            _time_log()
×
700
            return
×
701

702
        # is needed to copy some files for L4 and L5
703
        sum_dir_L3 = sum_dir
1✔
704

705
    # L4 - Tasks (add historical runs (old default) and dynamic spinup runs)
706
    if start_level <= 3:
1✔
707
        sum_dir = os.path.join(output_base_dir, 'L4', 'summary')
1✔
708
        utils.mkdir(sum_dir)
1✔
709

710
        # Copy L3 files for consistency
711
        for bn in ['glacier_statistics', 'climate_statistics',
1✔
712
                   'fixed_geometry_mass_balance']:
713
            if start_level <= 2:
1✔
714
                ipath = os.path.join(sum_dir_L3, bn + '_{}.csv'.format(rgi_reg))
1✔
715
            else:
716
                ipath = file_downloader(os.path.join(
×
717
                    get_prepro_base_url(base_url=start_base_url,
718
                                        rgi_version=rgi_version, border=border,
719
                                        prepro_level=start_level), 'summary',
720
                    bn + '_{}.csv'.format(rgi_reg)))
721

722
            opath = os.path.join(sum_dir, bn + '_{}.csv'.format(rgi_reg))
1✔
723
            shutil.copyfile(ipath, opath)
1✔
724

725
        # Get end date. The first gdir might have blown up, try some others
726
        i = 0
1✔
727
        while True:
1✔
728
            if i >= len(gdirs):
1✔
729
                raise RuntimeError('Found no valid glaciers!')
×
730
            try:
1✔
731
                y0 = gdirs[i].get_climate_info()['baseline_yr_0']
1✔
732
                # One adds 1 because the run ends at the end of the year
733
                ye = gdirs[i].get_climate_info()['baseline_yr_1'] + 1
1✔
734
                break
1✔
735
            except BaseException:
×
736
                i += 1
×
737

738
        # conduct historical run before dynamic melt_f calibration
739
        # (for comparison to old default behavior)
740
        workflow.execute_entity_task(tasks.run_from_climate_data, gdirs,
1✔
741
                                     min_ys=y0, ye=ye,
742
                                     output_filesuffix='_historical')
743
        # Now compile the output
744
        opath = os.path.join(sum_dir, f'historical_run_output_{rgi_reg}.nc')
1✔
745
        utils.compile_run_output(gdirs, path=opath, input_filesuffix='_historical')
1✔
746

747
        # conduct dynamic spinup if wanted
748
        if dynamic_spinup:
1✔
749
            if y0 > dynamic_spinup_start_year:
1✔
750
                dynamic_spinup_start_year = y0
×
751

752
            minimise_for = dynamic_spinup.split('/')[0]
1✔
753

754
            melt_f_max = cfg.PARAMS['melt_f_max']
1✔
755
            workflow.execute_entity_task(
1✔
756
                tasks.run_dynamic_melt_f_calibration, gdirs,
757
                ref_mb_err_scaling_factor=ref_mb_err_scaling_factor,
758
                ys=dynamic_spinup_start_year, ye=ye,
759
                melt_f_max=melt_f_max,
760
                kwargs_run_function={'minimise_for': minimise_for},
761
                ignore_errors=True,
762
                kwargs_fallback_function={'minimise_for': minimise_for},
763
                output_filesuffix='_spinup_historical',)
764
            # Now compile the output
765
            opath = os.path.join(sum_dir, f'spinup_historical_run_output_{rgi_reg}.nc')
1✔
766
            utils.compile_run_output(gdirs, path=opath,
1✔
767
                                     input_filesuffix='_spinup_historical')
768

769
        # Glacier statistics we recompute here for error analysis
770
        opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
771
        utils.compile_glacier_statistics(gdirs, path=opath)
1✔
772

773
        # Add the extended files
774
        pf = os.path.join(sum_dir, 'historical_run_output_{}.nc'.format(rgi_reg))
1✔
775
        # We have copied the files above
776
        mf = os.path.join(sum_dir, 'fixed_geometry_mass_balance_{}.csv'.format(rgi_reg))
1✔
777
        sf = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
1✔
778
        opath = os.path.join(sum_dir, 'historical_run_output_extended_{}.nc'.format(rgi_reg))
1✔
779
        utils.extend_past_climate_run(past_run_file=pf,
1✔
780
                                      fixed_geometry_mb_file=mf,
781
                                      glacier_statistics_file=sf,
782
                                      path=opath)
783

784
        # L4 OK - compress all in output directory
785
        log.workflow('L4 done. Writing to tar...')
1✔
786
        level_base_dir = os.path.join(output_base_dir, 'L4')
1✔
787
        workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
1✔
788
                                     base_dir=level_base_dir)
789
        utils.base_dir_to_tar(level_base_dir)
1✔
790

791
        sum_dir_L4 = sum_dir
1✔
792

793
        if max_level == 4:
1✔
794
            _time_log()
×
795
            return
×
796

797
    # L5 - No tasks: make the dirs small
798
    sum_dir = os.path.join(output_base_dir, 'L5', 'summary')
1✔
799
    utils.mkdir(sum_dir)
1✔
800

801
    # Copy L4 files for consistency
802
    files_to_copy = ['glacier_statistics', 'climate_statistics',
1✔
803
                     'fixed_geometry_mass_balance', 'historical_run_output',
804
                     'historical_run_output_extended']
805
    files_suffixes = ['csv', 'csv', 'csv', 'nc', 'nc']
1✔
806
    if dynamic_spinup:
1✔
807
        files_to_copy.append('spinup_historical_run_output')
1✔
808
        files_suffixes.append('nc')
1✔
809
    for bn, suffix in zip(files_to_copy, files_suffixes):
1✔
810
        if start_level <= 3:
1✔
811
            ipath = os.path.join(sum_dir_L4, bn + f'_{rgi_reg}.{suffix}')
1✔
812
        else:
813
            ipath = file_downloader(os.path.join(
×
814
                get_prepro_base_url(base_url=start_base_url,
815
                                    rgi_version=rgi_version, border=border,
816
                                    prepro_level=start_level), 'summary',
817
                bn + f'_{rgi_reg}.{suffix}'))
818
        opath = os.path.join(sum_dir, bn + f'_{rgi_reg}.{suffix}')
1✔
819
        shutil.copyfile(ipath, opath)
1✔
820

821
    # Copy mini data to new dir
822
    mini_base_dir = os.path.join(working_dir, 'mini_perglacier',
1✔
823
                                 'RGI{}'.format(rgi_version),
824
                                 'b_{:03d}'.format(border))
825
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir, gdirs,
1✔
826
                                              base_dir=mini_base_dir,
827
                                              setup='run/spinup')
828

829
    # L5 OK - compress all in output directory
830
    log.workflow('L5 done. Writing to tar...')
1✔
831
    level_base_dir = os.path.join(output_base_dir, 'L5')
1✔
832
    workflow.execute_entity_task(utils.gdir_to_tar, mini_gdirs, delete=False,
1✔
833
                                 base_dir=level_base_dir)
834
    utils.base_dir_to_tar(level_base_dir)
1✔
835

836
    _time_log()
1✔
837

838

839
def parse_args(args):
1✔
840
    """Check input arguments and env variables"""
841

842
    # CLI args
843
    description = ('Generate the preprocessed OGGM glacier directories for '
1✔
844
                   'this OGGM version.')
845
    parser = argparse.ArgumentParser(description=description)
1✔
846
    parser.add_argument('--map-border', type=int,
1✔
847
                        help='the size of the map border. Is required if '
848
                             '$OGGM_MAP_BORDER is not set.')
849
    parser.add_argument('--rgi-reg', type=str,
1✔
850
                        help='the rgi region to process. Is required if '
851
                             '$OGGM_RGI_REG is not set.')
852
    parser.add_argument('--rgi-version', type=str,
1✔
853
                        help='the RGI version to use. Defaults to the OGGM '
854
                             'default.')
855
    parser.add_argument('--start-level', type=int, default=0,
1✔
856
                        help='the pre-processed level to start from (default '
857
                             'is to start from 0). If set, you will need to '
858
                             'indicate --start-base-url as well.')
859
    parser.add_argument('--start-base-url', type=str,
1✔
860
                        help='the pre-processed base-url to fetch the data '
861
                             'from when starting from level > 0.')
862
    parser.add_argument('--max-level', type=int, default=5,
1✔
863
                        help='the maximum level you want to run the '
864
                             'pre-processing for (1, 2, 3, 4 or 5).')
865
    parser.add_argument('--working-dir', type=str,
1✔
866
                        help='path to the directory where to write the '
867
                             'output. Defaults to current directory or '
868
                             '$OGGM_WORKDIR.')
869
    parser.add_argument('--params-file', type=str,
1✔
870
                        help='path to the OGGM parameter file to use in place '
871
                             'of the default one.')
872
    parser.add_argument('--output', type=str,
1✔
873
                        help='path to the directory where to write the '
874
                             'output. Defaults to current directory or '
875
                             '$OGGM_OUTDIR.')
876
    parser.add_argument('--logging-level', type=str, default='WORKFLOW',
1✔
877
                        help='the logging level to use (DEBUG, INFO, WARNING, '
878
                             'WORKFLOW).')
879
    parser.add_argument('--elev-bands', nargs='?', const=True, default=False,
1✔
880
                        help='compute the flowlines based on the Huss & Farinotti '
881
                             '2012 method.')
882
    parser.add_argument('--centerlines', nargs='?', const=True, default=False,
1✔
883
                        help='compute the flowlines based on the OGGM '
884
                             'centerline(s) method.')
885
    parser.add_argument('--skip-inversion', nargs='?', const=True, default=False,
1✔
886
                        help='do not run the inversion (level 3 files). '
887
                             'this is a temporary workaround for workflows '
888
                             'that wont run that far into level 3.')
889
    parser.add_argument('--mb-calibration-strategy', type=str,
1✔
890
                        default='informed_threestep',
891
                        help='how to calibrate the massbalance. Currently one of '
892
                             'informed_threestep (default) , melt_temp '
893
                             'or temp_melt. Add the _regional suffix to '
894
                             'use regional values instead, for example '
895
                             'informed_threestep_regional')
896
    parser.add_argument('--dem-source', type=str, default='',
1✔
897
                        help='which DEM source to use. Possible options are '
898
                             'the name of a specific DEM (e.g. RAMP, SRTM...) '
899
                             'or ALL, in which case all available DEMs will '
900
                             'be processed and adjoined with a suffix at the '
901
                             'end of the file name. The ALL option is only '
902
                             'compatible with level 1 folders, after which '
903
                             'the processing will stop. The default is to use '
904
                             'the default OGGM DEM.')
905
    parser.add_argument('--select-source-from-dir', type=str,
1✔
906
                        default=None,
907
                        help='if starting from a level 1 "ALL" or "STANDARD" DEM '
908
                        'sources directory, select the chosen DEM source here. '
909
                        'If you set it to "BY_RES" here, COPDEM will be used and '
910
                        'its resolution chosen based on the gdirs map resolution '
911
                        '(COPDEM30 for dx < 60 m, COPDEM90 elsewhere).')
912
    parser.add_argument('--keep-dem-folders', nargs='?', const=True, default=False,
1✔
913
                        help='if `select_source_from_dir` is used, wether to keep '
914
                        'the original DEM folders in or not.')
915
    parser.add_argument('--add-consensus-thickness', nargs='?', const=True, default=False,
1✔
916
                        help='adds (reprojects) the consensus thickness '
917
                             'estimates to the glacier directories. '
918
                             'With --elev-bands, the data will also be '
919
                             'binned.')
920
    parser.add_argument('--add-itslive-velocity', nargs='?', const=True, default=False,
1✔
921
                        help='adds (reprojects) the ITS_LIVE velocity '
922
                             'estimates to the glacier directories. '
923
                             'With --elev-bands, the data will also be '
924
                             'binned.')
925
    parser.add_argument('--add-millan-thickness', nargs='?', const=True, default=False,
1✔
926
                        help='adds (reprojects) the millan thickness '
927
                             'estimates to the glacier directories. '
928
                             'With --elev-bands, the data will also be '
929
                             'binned.')
930
    parser.add_argument('--add-millan-velocity', nargs='?', const=True, default=False,
1✔
931
                        help='adds (reprojects) the millan velocity '
932
                             'estimates to the glacier directories. '
933
                             'With --elev-bands, the data will also be '
934
                             'binned.')
935
    parser.add_argument('--add-hugonnet-dhdt', nargs='?', const=True, default=False,
1✔
936
                        help='adds (reprojects) the hugonnet dhdt '
937
                             'maps to the glacier directories. '
938
                             'With --elev-bands, the data will also be '
939
                             'binned.')
940
    parser.add_argument('--add-bedmachine', nargs='?', const=True, default=False,
1✔
941
                        help='adds (reprojects) the Bedmachine ice thickness '
942
                             'maps to the glacier directories. '
943
                             'With --elev-bands, the data will also be '
944
                             'binned.')
945
    parser.add_argument('--add-glathida', nargs='?', const=True, default=False,
1✔
946
                        help='adds (reprojects) the glathida point thickness '
947
                             'observations to the glacier directories. '
948
                             'The data points are stored as csv.')
949
    parser.add_argument('--demo', nargs='?', const=True, default=False,
1✔
950
                        help='if you want to run the prepro for the '
951
                             'list of demo glaciers.')
952
    parser.add_argument('--test', nargs='?', const=True, default=False,
1✔
953
                        help='if you want to do a test on a couple of '
954
                             'glaciers first.')
955
    parser.add_argument('--test-ids', nargs='+',
1✔
956
                        help='if --test, specify the RGI ids to run separated '
957
                             'by a space (default: 4 randomly selected).')
958
    parser.add_argument('--disable-mp', nargs='?', const=True, default=False,
1✔
959
                        help='if you want to disable multiprocessing.')
960
    parser.add_argument('--dynamic-spinup', type=str, default='',
1✔
961
                        help="include a dynamic spinup for matching glacier area "
962
                             "('area/dmdtda') OR volume ('volume/dmdtda') at "
963
                             "the RGI-date, AND mass-change from Hugonnet "
964
                             "in the period 2000-2020 (dynamic mu* "
965
                             "calibration).")
966
    parser.add_argument('--ref-mb-err-scaling-factor', type=float, default=0.2,
1✔
967
                        help="scaling factor to account for correlated "
968
                             "uncertainties of geodetic mass balance "
969
                             "observations when looking at regional scale. "
970
                             "Should be smaller or equal to 1.")
971
    parser.add_argument('--dynamic-spinup-start-year', type=int, default=1979,
1✔
972
                        help="if --dynamic-spinup is set, define the starting"
973
                             "year for the simulation. The default is 1979, "
974
                             "unless the climate data starts later.")
975
    parser.add_argument('--store-fl-diagnostics', nargs='?', const=True, default=False,
1✔
976
                        help="Also compute and store flowline diagnostics during "
977
                             "preprocessing. This can increase data usage quite "
978
                             "a bit.")
979
    parser.add_argument('--override-params', type=json.loads, default=None)
1✔
980

981
    args = parser.parse_args(args)
1✔
982

983
    # Check input
984
    rgi_reg = args.rgi_reg
1✔
985
    if args.demo:
1✔
986
        rgi_reg = 0
1✔
987
    if not rgi_reg and not args.demo:
1✔
988
        rgi_reg = os.environ.get('OGGM_RGI_REG', None)
1✔
989
        if rgi_reg is None:
1✔
990
            raise InvalidParamsError('--rgi-reg is required!')
1✔
991
    rgi_reg = '{:02}'.format(int(rgi_reg))
1✔
992
    ok_regs = ['{:02}'.format(int(r)) for r in range(1, 20)]
1✔
993
    if not args.demo and rgi_reg not in ok_regs:
1✔
994
        raise InvalidParamsError('--rgi-reg should range from 01 to 19!')
×
995

996
    rgi_version = args.rgi_version
1✔
997

998
    border = args.map_border
1✔
999
    if not border:
1✔
1000
        border = os.environ.get('OGGM_MAP_BORDER', None)
1✔
1001
        if border is None:
1✔
1002
            raise InvalidParamsError('--map-border is required!')
1✔
1003

1004
    working_dir = args.working_dir
1✔
1005
    if not working_dir:
1✔
1006
        working_dir = os.environ.get('OGGM_WORKDIR', '')
1✔
1007

1008
    output_folder = args.output
1✔
1009
    if not output_folder:
1✔
1010
        output_folder = os.environ.get('OGGM_OUTDIR', '')
1✔
1011

1012
    border = int(border)
1✔
1013
    output_folder = os.path.abspath(output_folder)
1✔
1014
    working_dir = os.path.abspath(working_dir)
1✔
1015

1016
    dynamic_spinup = False if args.dynamic_spinup == '' else args.dynamic_spinup
1✔
1017

1018
    # All good
1019
    return dict(rgi_version=rgi_version, rgi_reg=rgi_reg,
1✔
1020
                border=border, output_folder=output_folder,
1021
                working_dir=working_dir, params_file=args.params_file,
1022
                is_test=args.test, test_ids=args.test_ids,
1023
                demo=args.demo, dem_source=args.dem_source,
1024
                start_level=args.start_level, start_base_url=args.start_base_url,
1025
                max_level=args.max_level, disable_mp=args.disable_mp,
1026
                logging_level=args.logging_level,
1027
                elev_bands=args.elev_bands,
1028
                skip_inversion=args.skip_inversion,
1029
                centerlines=args.centerlines,
1030
                select_source_from_dir=args.select_source_from_dir,
1031
                keep_dem_folders=args.keep_dem_folders,
1032
                add_consensus_thickness=args.add_consensus_thickness,
1033
                add_millan_thickness=args.add_millan_thickness,
1034
                add_itslive_velocity=args.add_itslive_velocity,
1035
                add_millan_velocity=args.add_millan_velocity,
1036
                add_hugonnet_dhdt=args.add_hugonnet_dhdt,
1037
                add_bedmachine=args.add_bedmachine,
1038
                add_glathida=args.add_glathida,
1039
                dynamic_spinup=dynamic_spinup,
1040
                ref_mb_err_scaling_factor=args.ref_mb_err_scaling_factor,
1041
                dynamic_spinup_start_year=args.dynamic_spinup_start_year,
1042
                mb_calibration_strategy=args.mb_calibration_strategy,
1043
                store_fl_diagnostics=args.store_fl_diagnostics,
1044
                override_params=args.override_params,
1045
                )
1046

1047

1048
def main():
1✔
1049
    """Script entry point"""
1050

1051
    run_prepro_levels(**parse_args(sys.argv[1:]))
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc