• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

OGGM / oggm / 5975837719

25 Aug 2023 12:24PM UTC coverage: 85.547% (+0.05%) from 85.502%
5975837719

push

github

web-flow
Tiny edit on index page regarding youtube (#1628)

* Update index.rst

* Update index.rst

11441 of 13374 relevant lines covered (85.55%)

3.76 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

75.98
/oggm/shop/millan22.py
1
import logging
8✔
2
import os
8✔
3
import warnings
8✔
4

5
import numpy as np
8✔
6
import pandas as pd
8✔
7
import xarray as xr
8✔
8
import shapely.geometry as shpg
8✔
9

10
try:
8✔
11
    import salem
8✔
12
except ImportError:
13
    pass
14

15
try:
8✔
16
    import geopandas as gpd
8✔
17
except ImportError:
18
    pass
19

20
from oggm import utils, cfg
8✔
21
from oggm.exceptions import InvalidWorkflowError
8✔
22

23
# Module logger
24
log = logging.getLogger(__name__)
8✔
25

26
default_base_url = 'https://cluster.klima.uni-bremen.de/~oggm/velocities/millan22/'
8✔
27

28
_lookup_thickness = None
8✔
29
_lookup_velocity = None
8✔
30

31

32
def _get_lookup_thickness():
8✔
33
    global _lookup_thickness
34
    if _lookup_thickness is None:
1✔
35
        fname = default_base_url + 'millan22_thickness_lookup_shp_20220902.zip'
1✔
36
        _lookup_thickness = gpd.read_file('zip://' + utils.file_downloader(fname))
1✔
37
    return _lookup_thickness
1✔
38

39

40
def _get_lookup_velocity():
8✔
41
    global _lookup_velocity
42
    if _lookup_velocity is None:
1✔
43
        fname = default_base_url + 'millan22_velocity_lookup_shp_20220902.zip'
1✔
44
        _lookup_velocity = gpd.read_file('zip://' + utils.file_downloader(fname))
1✔
45
    return _lookup_velocity
1✔
46

47

48
def _filter(ds):
8✔
49
    # Read the data and prevent bad surprises
50
    data = ds.get_vardata().astype(np.float64)
1✔
51
    # Nans with 0
52
    data[~ np.isfinite(data)] = 0
1✔
53
    nmax = np.nanmax(data)
1✔
54
    if nmax == np.inf:
1✔
55
        # Replace inf with 0
56
        data[data == nmax] = 0
×
57

58
    return data
1✔
59

60

61
def _filter_and_reproj(gdir, var, gdf, allow_neg=True):
8✔
62
    """ Same code for thickness and error
63

64
    Parameters
65
    ----------
66
    var : 'thickness' or 'err'
67
    gdf : the lookup
68
    """
69

70
    # We may have more than one file
71
    total_data = 0
1✔
72
    grids_used = []
1✔
73
    files_used = []
1✔
74
    for i, s in gdf.iterrows():
1✔
75
        # Fetch it
76
        url = default_base_url + s[var]
1✔
77
        input_file = utils.file_downloader(url)
1✔
78

79
        # Subset to avoid mega files
80
        dsb = salem.GeoTiff(input_file)
1✔
81
        x0, x1, y0, y1 = gdir.grid.extent_in_crs(dsb.grid.proj)
1✔
82
        with warnings.catch_warnings():
1✔
83
            # This can trigger an out of bounds warning
84
            warnings.filterwarnings("ignore", category=RuntimeWarning,
1✔
85
                                    message='.*out of bounds.*')
86
            dsb.set_subset(corners=((x0, y0), (x1, y1)),
1✔
87
                           crs=dsb.grid.proj,
88
                           margin=5)
89

90
        data = _filter(dsb)
1✔
91
        if not allow_neg:
1✔
92
            # Replace negative values with 0
93
            data[data < 0] = 0
1✔
94

95
        if np.nansum(data) == 0:
1✔
96
            # No need to continue
97
            continue
×
98

99
        # Reproject now
100
        with warnings.catch_warnings():
1✔
101
            # This can trigger an out of bounds warning
102
            warnings.filterwarnings("ignore", category=RuntimeWarning,
1✔
103
                                    message='.*out of bounds.*')
104
            r_data = gdir.grid.map_gridded_data(data, dsb.grid, interp='linear')
1✔
105
        total_data += r_data.filled(0)
1✔
106
        grids_used.append(dsb)
1✔
107
        files_used.append(s.file_id)
1✔
108

109
    return total_data, files_used, grids_used
1✔
110

111

112
@utils.entity_task(log, writes=['gridded_data'])
8✔
113
def thickness_to_gdir(gdir, add_error=False):
8✔
114
    """Add the Millan 22 thickness data to this glacier directory.
115

116
    Parameters
117
    ----------
118
    gdir : :py:class:`oggm.GlacierDirectory`
119
        the glacier directory to process
120
    add_error : bool
121
        add the error data or not
122
    """
123

124
    # Find out which file(s) we need
125
    gdf = _get_lookup_thickness()
1✔
126
    cp = shpg.Point(gdir.cenlon, gdir.cenlat)
1✔
127
    sel = gdf.loc[gdf.contains(cp)]
1✔
128
    if len(sel) == 0:
1✔
129
        raise InvalidWorkflowError(f'There seems to be no Millan file for this '
×
130
                                   f'glacier: {gdir.rgi_id}')
131

132
    total_thick, files_used, _ = _filter_and_reproj(gdir, 'thickness', sel,
1✔
133
                                                    allow_neg=False)
134

135
    # We mask zero ice as nodata
136
    total_thick = np.where(total_thick == 0, np.NaN, total_thick)
1✔
137

138
    if add_error:
1✔
139
        total_err, _, _ = _filter_and_reproj(gdir, 'err', sel, allow_neg=False)
×
140
        total_err[~ np.isfinite(total_thick)] = np.NaN
×
141
        # Error cannot be larger than ice thickness itself
142
        total_err = utils.clip_max(total_err, total_thick)
×
143

144
    # Write
145
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
1✔
146

147
        vn = 'millan_ice_thickness'
1✔
148
        if vn in nc.variables:
1✔
149
            v = nc.variables[vn]
×
150
        else:
151
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
1✔
152
        v.units = 'm'
1✔
153
        ln = 'Ice thickness from Millan et al. 2022'
1✔
154
        v.long_name = ln
1✔
155
        data_str = ' '.join(files_used) if len(files_used) > 1 else files_used[0]
1✔
156
        v.data_source = data_str
1✔
157
        v[:] = total_thick
1✔
158

159
        if add_error:
1✔
160
            vn = 'millan_ice_thickness_err'
×
161
            if vn in nc.variables:
×
162
                v = nc.variables[vn]
×
163
            else:
164
                v = nc.createVariable(vn, 'f4', ('y', 'x',), zlib=True)
×
165
            v.units = 'm'
×
166
            ln = 'Ice thickness error from Millan et al. 2022'
×
167
            v.long_name = ln
×
168
            v.data_source = data_str
×
169
            v[:] = total_err
×
170

171

172
@utils.entity_task(log, writes=['gridded_data'])
8✔
173
def velocity_to_gdir(gdir, add_error=False):
8✔
174
    """Add the Millan 22 velocity data to this glacier directory.
175

176
    Parameters
177
    ----------
178
    gdir : :py:class:`oggm.GlacierDirectory`
179
        the glacier directory to process
180
    add_error : bool
181
        add the error data or not
182
    """
183

184
    if gdir.rgi_region in ['05']:
1✔
185
        raise NotImplementedError('Millan 22 does not provide velocity '
186
                                  'products for Greenland - we would need to '
187
                                  'implement MEASURES in the shop for this.')
188

189
    # Find out which file(s) we need
190
    gdf = _get_lookup_velocity()
1✔
191
    cp = shpg.Point(gdir.cenlon, gdir.cenlat)
1✔
192
    sel = gdf.loc[gdf.contains(cp)]
1✔
193
    if len(sel) == 0:
1✔
194
        raise InvalidWorkflowError(f'There seems to be no Millan file for this '
×
195
                                   f'glacier: {gdir.rgi_id}')
196

197
    vel, files, grids = _filter_and_reproj(gdir, 'v', sel, allow_neg=False)
1✔
198
    if len(grids) == 0:
1✔
199
        raise RuntimeError('There is no velocity data for this glacier')
×
200
    if len(grids) > 1:
1✔
201
        raise RuntimeError('Multiple velocity grids - dont know what to do.')
×
202

203
    sel = sel.loc[sel.file_id == files[0]]
1✔
204
    vx, _, gridsx = _filter_and_reproj(gdir, 'vx', sel)
1✔
205
    vy, _, gridsy = _filter_and_reproj(gdir, 'vy', sel)
1✔
206

207
    dsx = gridsx[0]
1✔
208
    dsy = gridsy[0]
1✔
209
    grid_vel = dsx.grid
1✔
210
    proj_vel = grid_vel.proj
1✔
211
    grid_gla = gdir.grid
1✔
212

213
    # Get the coords at t0
214
    xx0, yy0 = grid_vel.center_grid.xy_coordinates
1✔
215

216
    # Compute coords at t1
217
    xx1 = _filter(dsx)
1✔
218
    yy1 = _filter(dsy)
1✔
219
    xx1 += xx0
1✔
220
    yy1 += yy0
1✔
221

222
    # Transform both to glacier proj
223
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
1✔
224
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)
1✔
225

226
    # Compute velocities from there
227
    vx = xx1 - xx0
1✔
228
    vy = yy1 - yy0
1✔
229

230
    # And transform to local map
231
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
1✔
232
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')
1✔
233

234
    # Scale back to match velocity
235
    with warnings.catch_warnings():
1✔
236
        warnings.filterwarnings("ignore", category=RuntimeWarning)
1✔
237
        new_vel = np.sqrt(vx**2 + vy**2)
1✔
238
        p_ok = np.isfinite(new_vel) & (new_vel > 1)  # avoid div by zero
1✔
239
        scaler = vel[p_ok] / new_vel[p_ok]
1✔
240
        vx[p_ok] = vx[p_ok] * scaler
1✔
241
        vy[p_ok] = vy[p_ok] * scaler
1✔
242

243
    vx = vx.filled(np.NaN)
1✔
244
    vy = vy.filled(np.NaN)
1✔
245

246
    if add_error:
1✔
247
        err_vx, _, _ = _filter_and_reproj(gdir, 'err_vx', sel, allow_neg=False)
×
248
        err_vy, _, _ = _filter_and_reproj(gdir, 'err_vy', sel, allow_neg=False)
×
249
        err_vx[p_ok] = err_vx[p_ok] * scaler
×
250
        err_vy[p_ok] = err_vy[p_ok] * scaler
×
251

252
    # Write
253
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
1✔
254

255
        vn = 'millan_v'
1✔
256
        if vn in nc.variables:
1✔
257
            v = nc.variables[vn]
×
258
        else:
259
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
1✔
260
        v.units = 'm'
1✔
261
        ln = 'Ice velocity from Millan et al. 2022'
1✔
262
        v.long_name = ln
1✔
263
        v.data_source = files[0]
1✔
264
        v[:] = vel
1✔
265

266
        vn = 'millan_vx'
1✔
267
        if vn in nc.variables:
1✔
268
            v = nc.variables[vn]
×
269
        else:
270
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
1✔
271
        v.units = 'm'
1✔
272
        ln = 'Ice velocity in map x direction from Millan et al. 2022'
1✔
273
        v.long_name = ln
1✔
274
        v.data_source = files[0]
1✔
275
        v[:] = vx
1✔
276

277
        vn = 'millan_vy'
1✔
278
        if vn in nc.variables:
1✔
279
            v = nc.variables[vn]
×
280
        else:
281
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
1✔
282
        v.units = 'm'
1✔
283
        ln = 'Ice velocity in map y direction from Millan et al. 2022'
1✔
284
        v.long_name = ln
1✔
285
        v.data_source = files[0]
1✔
286
        v[:] = vy
1✔
287

288
        if add_error:
1✔
289
            vn = 'millan_err_vx'
×
290
            if vn in nc.variables:
×
291
                v = nc.variables[vn]
×
292
            else:
293
                v = nc.createVariable(vn, 'f4', ('y', 'x',), zlib=True)
×
294
            v.units = 'm'
×
295
            ln = 'Ice velocity error in map x direction from Millan et al. 2022'
×
296
            v.long_name = ln
×
297
            v.data_source = files[0]
×
298
            v[:] = err_vx
×
299

300
            vn = 'millan_err_vy'
×
301
            if vn in nc.variables:
×
302
                v = nc.variables[vn]
×
303
            else:
304
                v = nc.createVariable(vn, 'f4', ('y', 'x',), zlib=True)
×
305
            v.units = 'm'
×
306
            ln = 'Ice velocity error in map y direction from Millan et al. 2022'
×
307
            v.long_name = ln
×
308
            v.data_source = files[0]
×
309
            v[:] = err_vy
×
310

311

312
@utils.entity_task(log)
8✔
313
def millan_statistics(gdir):
8✔
314
    """Gather statistics about the Millan data interpolated to this glacier.
315
    """
316

317
    d = dict()
1✔
318

319
    # Easy stats - this should always be possible
320
    d['rgi_id'] = gdir.rgi_id
1✔
321
    d['rgi_region'] = gdir.rgi_region
1✔
322
    d['rgi_subregion'] = gdir.rgi_subregion
1✔
323
    d['rgi_area_km2'] = gdir.rgi_area_km2
1✔
324
    d['millan_vol_km3'] = 0
1✔
325
    d['millan_area_km2'] = 0
1✔
326
    d['millan_perc_cov'] = 0
1✔
327

328
    try:
1✔
329
        with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
1✔
330
            thick = ds['millan_ice_thickness'].where(ds['glacier_mask'], np.NaN).load()
1✔
331
            d['millan_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9)
1✔
332
            d['millan_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6)
1✔
333
            d['millan_perc_cov'] = float(d['millan_area_km2'] / gdir.rgi_area_km2)
1✔
334

335
            if 'millan_ice_thickness_err' in ds:
1✔
336
                err = ds['millan_ice_thickness_err'].where(ds['glacier_mask'], np.NaN).load()
×
337
                d['millan_vol_err_km3'] = float(err.sum() * gdir.grid.dx ** 2 * 1e-9)
×
338
    except (FileNotFoundError, AttributeError, KeyError):
×
339
        pass
×
340

341
    try:
1✔
342
        with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
1✔
343
            v = ds['millan_v'].where(ds['glacier_mask'], np.NaN).load()
1✔
344
            d['millan_avg_vel'] = np.nanmean(v)
1✔
345
            d['millan_max_vel'] = np.nanmax(v)
1✔
346

347
            if 'millan_err_vx' in ds:
1✔
348
                err_vx = ds['millan_err_vx'].where(ds['glacier_mask'], np.NaN).load()
×
349
                err_vy = ds['millan_err_vy'].where(ds['glacier_mask'], np.NaN).load()
×
350
                err = (err_vx**2 + err_vy**2)**0.5
×
351
                d['millan_avg_err_vel'] = np.nanmean(err)
×
352

353
    except (FileNotFoundError, AttributeError, KeyError):
×
354
        pass
×
355

356
    return d
1✔
357

358

359
@utils.global_task(log)
8✔
360
def compile_millan_statistics(gdirs, filesuffix='', path=True):
8✔
361
    """Gather as much statistics as possible about a list of glaciers.
362

363
    It can be used to do result diagnostics and other stuffs. If the data
364
    necessary for a statistic is not available (e.g.: flowlines length) it
365
    will simply be ignored.
366

367
    Parameters
368
    ----------
369
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
370
        the glacier directories to process
371
    filesuffix : str
372
        add suffix to output file
373
    path : str, bool
374
        Set to "True" in order  to store the info in the working directory
375
        Set to a path to store the file to your chosen location
376
    """
377
    from oggm.workflow import execute_entity_task
1✔
378

379
    out_df = execute_entity_task(millan_statistics, gdirs)
1✔
380

381
    out = pd.DataFrame(out_df).set_index('rgi_id')
1✔
382

383
    if path:
1✔
384
        if path is True:
1✔
385
            out.to_csv(os.path.join(cfg.PATHS['working_dir'],
1✔
386
                                    ('millan_statistics' +
387
                                     filesuffix + '.csv')))
388
        else:
389
            out.to_csv(path)
×
390

391
    return out
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc