• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

OGGM / oggm / 3836586642

pending completion
3836586642

Pull #1510

github

GitHub
Merge 2a19b5c72 into 8ab12c504
Pull Request #1510: Fix issue with TANDEM DEM and minor RGI7 issue

2 of 4 new or added lines in 2 files covered. (50.0%)

249 existing lines in 7 files now uncovered.

12099 of 13681 relevant lines covered (88.44%)

3.61 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.64
/oggm/core/flowline.py
1
"""Flowline modelling: bed shapes and model numerics.
2

3

4
"""
5
# Builtins
6
import logging
9✔
7
import copy
9✔
8
from collections import OrderedDict
9✔
9
from functools import partial
9✔
10
from time import gmtime, strftime
9✔
11
import os
9✔
12
import shutil
9✔
13
import warnings
9✔
14

15
# External libs
16
import numpy as np
9✔
17
import shapely.geometry as shpg
9✔
18
import xarray as xr
9✔
19
from scipy.linalg import solve_banded
9✔
20

21
# Optional libs
22
try:
9✔
23
    import salem
9✔
24
except ImportError:
25
    pass
26
import pandas as pd
9✔
27

28
# Locals
29
from oggm import __version__
9✔
30
import oggm.cfg as cfg
9✔
31
from oggm import utils
9✔
32
from oggm import entity_task
9✔
33
from oggm.exceptions import InvalidParamsError, InvalidWorkflowError
9✔
34
from oggm.core.massbalance import (MultipleFlowlineMassBalance,
9✔
35
                                   ConstantMassBalance,
36
                                   PastMassBalance,
37
                                   AvgClimateMassBalance,
38
                                   RandomMassBalance)
39
from oggm.core.centerlines import Centerline, line_order
9✔
40
from oggm.core.inversion import find_sia_flux_from_thickness
9✔
41

42
# Constants
43
from oggm.cfg import SEC_IN_DAY, SEC_IN_YEAR
9✔
44
from oggm.cfg import G, GAUSSIAN_KERNEL
9✔
45

46
# Module logger
47
log = logging.getLogger(__name__)
9✔
48

49

50
class Flowline(Centerline):
9✔
51
    """A Centerline with additional properties: input to the FlowlineModel
52
    """
53

54
    def __init__(self, line=None, dx=1, map_dx=None,
9✔
55
                 surface_h=None, bed_h=None, rgi_id=None,
56
                 water_level=None, gdir=None):
57
        """ Initialize a Flowline
58

59
        Parameters
60
        ----------
61
        line : :py:class:`shapely.geometry.LineString`
62
            the geometrical line of a :py:class:`oggm.Centerline`
63
        dx : float
64
            Grid spacing in pixel coordinates
65
        map_dx : float
66
            DEM grid spacing in meters
67
        surface_h: :py:class:`numpy.ndarray`
68
            elevation [m] of the flowline grid points
69
        bed_h: :py:class:`numpy.ndarray`
70
            elevation[m] of the bedrock at the flowline grid points
71
        rgi_id : str
72
            The glacier's RGI identifier
73
        water_level : float
74
            The water level (to compute volume below sea-level)
75
        """
76

77
        # This is do add flexibility for testing
78
        if dx is None:
9✔
UNCOV
79
            dx = 1.
×
80
        if line is None:
9✔
81
            coords = np.arange(len(surface_h)) * dx
4✔
82
            line = shpg.LineString(np.vstack([coords, coords * 0.]).T)
4✔
83

84
        super(Flowline, self).__init__(line, dx, surface_h)
9✔
85

86
        self._thick = utils.clip_min(surface_h - bed_h, 0.)
9✔
87
        self.map_dx = map_dx
9✔
88
        self.dx_meter = map_dx * self.dx
9✔
89
        self.bed_h = bed_h
9✔
90
        self.rgi_id = rgi_id
9✔
91
        self.water_level = water_level
9✔
92
        self._point_lons = None
9✔
93
        self._point_lats = None
9✔
94
        self.map_trafo = None
9✔
95
        if gdir is not None:
9✔
96
            self.map_trafo = partial(gdir.grid.ij_to_crs, crs=salem.wgs84)
6✔
97
        # volume not yet removed from the flowline
98
        self.calving_bucket_m3 = 0
9✔
99

100
    def has_ice(self):
9✔
101
        return np.any(self.thick > 0)
6✔
102

103
    @Centerline.widths.getter
9✔
104
    def widths(self):
9✔
105
        """Compute the widths out of H and shape"""
106
        return self.widths_m / self.map_dx
6✔
107

108
    @property
9✔
109
    def thick(self):
9✔
110
        """Needed for overriding later"""
111
        return self._thick
9✔
112

113
    @thick.setter
9✔
114
    def thick(self, value):
9✔
115
        self._thick = utils.clip_min(value, 0)
9✔
116

117
    @Centerline.surface_h.getter
9✔
118
    def surface_h(self):
9✔
119
        return self._thick + self.bed_h
8✔
120

121
    @surface_h.setter
9✔
122
    def surface_h(self, value):
9✔
123
        self.thick = value - self.bed_h
1✔
124

125
    @property
9✔
126
    def bin_area_m2(self):
9✔
127
        # area of the grid point
128
        # this takes the ice thickness into account
129
        return np.where(self.thick > 0, self.widths_m, 0) * self.dx_meter
8✔
130

131
    @property
9✔
132
    def length_m(self):
9✔
133
        # TODO: take calving bucket into account for fine tuned length?
134
        lt = cfg.PARAMS.get('min_ice_thick_for_length', 0)
8✔
135
        if cfg.PARAMS.get('glacier_length_method') == 'consecutive':
8✔
136
            if (self.thick > lt).all():
2✔
137
                nx = len(self.thick)
1✔
138
            else:
139
                nx = np.where(self.thick <= lt)[0][0]
2✔
140
        else:
141
            nx = len(np.where(self.thick > lt)[0])
8✔
142
        return nx * self.dx_meter
8✔
143

144
    @property
9✔
145
    def terminus_index(self):
9✔
146
        # the index of the last point with ice thickness above
147
        # min_ice_thick_for_length and consistent with length
148
        lt = cfg.PARAMS.get('min_ice_thick_for_length', 0)
1✔
149
        if cfg.PARAMS.get('glacier_length_method') == 'consecutive':
1✔
150
            if (self.thick > lt).all():
1✔
151
                ix = len(self.thick) - 1
1✔
152
            else:
153
                ix = np.where(self.thick <= lt)[0][0] - 1
1✔
154
        else:
155
            try:
1✔
156
                ix = np.where(self.thick > lt)[0][-1]
1✔
157
            except IndexError:
1✔
158
                ix = -1
1✔
159
        return ix
1✔
160

161
    def _compute_point_lls(self):
9✔
162
        if getattr(self, '_point_lons', None) is None:
2✔
163
            if getattr(self, 'map_trafo', None) is None:
2✔
164
                raise AttributeError('Cannot compute lons and lats on this '
2✔
165
                                     'flowline. It needs to be initialized '
166
                                     'with a gdir kwarg.')
167
            lons, lats = self.map_trafo(*self.line.xy)
1✔
168
            self._point_lons = lons
1✔
169
            self._point_lats = lats
1✔
170

171
    @property
9✔
172
    def point_lons(self):
9✔
173
        self._compute_point_lls()
2✔
174
        return self._point_lons
1✔
175

176
    @property
9✔
177
    def point_lats(self):
9✔
178
        self._compute_point_lls()
1✔
179
        return self._point_lats
1✔
180

181
    @property
9✔
182
    def volume_m3(self):
9✔
183
        return utils.clip_min(np.sum(self.section * self.dx_meter) -
8✔
184
                              getattr(self, 'calving_bucket_m3', 0), 0)
185

186
    @property
9✔
187
    def volume_km3(self):
9✔
188
        return self.volume_m3 * 1e-9
4✔
189

190
    def _vol_below_level(self, water_level=0):
9✔
191

192
        thick = np.copy(self.thick)
7✔
193
        n_thick = np.copy(thick)
7✔
194
        bwl = (self.bed_h < water_level) & (thick > 0)
7✔
195
        n_thick[~bwl] = 0
7✔
196
        self.thick = n_thick
7✔
197
        vol_tot = np.sum(self.section * self.dx_meter)
7✔
198
        n_thick[bwl] = utils.clip_max(self.surface_h[bwl],
7✔
199
                                      water_level) - self.bed_h[bwl]
200
        self.thick = n_thick
7✔
201
        vol_bwl = np.sum(self.section * self.dx_meter)
7✔
202
        self.thick = thick
7✔
203
        fac = vol_bwl / vol_tot if vol_tot > 0 else 0
7✔
204
        return utils.clip_min(vol_bwl -
7✔
205
                              getattr(self, 'calving_bucket_m3', 0) * fac, 0)
206

207
    @property
9✔
208
    def volume_bsl_m3(self):
9✔
209
        return self._vol_below_level(water_level=0)
7✔
210

211
    @property
9✔
212
    def volume_bsl_km3(self):
9✔
213
        return self.volume_bsl_m3 * 1e-9
1✔
214

215
    @property
9✔
216
    def volume_bwl_m3(self):
9✔
217
        return self._vol_below_level(water_level=self.water_level)
7✔
218

219
    @property
9✔
220
    def volume_bwl_km3(self):
9✔
221
        return self.volume_bwl_m3 * 1e-9
1✔
222

223
    @property
9✔
224
    def area_m2(self):
9✔
225
        # TODO: take calving bucket into account
226
        return np.sum(self.bin_area_m2)
8✔
227

228
    @property
9✔
229
    def area_km2(self):
9✔
230
        return self.area_m2 * 1e-6
4✔
231

232
    def _add_attrs_to_dataset(self, ds):
9✔
233
        """Add bed specific parameters."""
234
        # This must be done by child classes
235
        raise NotImplementedError()
236

237
    def to_geometry_dataset(self):
9✔
238
        """Makes an xarray Dataset out of the flowline.
239

240
        Useful only for geometry files (FileModel / restart files),
241
        therefore a bit cryptic regarding dimensions.
242
        """
243

244
        h = self.surface_h
4✔
245
        nx = len(h)
4✔
246
        ds = xr.Dataset()
4✔
247
        ds.coords['x'] = np.arange(nx)
4✔
248
        ds.coords['c'] = [0, 1]
4✔
249
        try:
4✔
250
            ds['linecoords'] = (['x', 'c'], np.asarray(self.line.coords))
4✔
UNCOV
251
        except AttributeError:
×
252
            # squeezed lines
UNCOV
253
            pass
×
254
        ds['surface_h'] = (['x'], h)
4✔
255
        ds['bed_h'] = (['x'], self.bed_h)
4✔
256
        ds.attrs['class'] = type(self).__name__
4✔
257
        ds.attrs['map_dx'] = self.map_dx
4✔
258
        ds.attrs['dx'] = self.dx
4✔
259
        self._add_attrs_to_dataset(ds)
4✔
260
        return ds
4✔
261

262
    def to_diagnostics_dataset(self):
9✔
263
        """Makes an xarray Dataset out of the flowline.
264

265
        Useful for run_until_and_store's flowline diagnostics data.
266
        """
267

268
        h = self.bed_h
1✔
269
        nx = len(h)
1✔
270
        ds = xr.Dataset()
1✔
271
        ds.coords['dis_along_flowline'] = np.arange(nx) * self.map_dx * self.dx
1✔
272
        try:
1✔
273
            # This is a bit bad design, but basically if some task
274
            # computed to the lons of the flowlines before, use it
275
            # we don't have access to gdir here so we cant convert the coords
276
            ds['point_lons'] = (['dis_along_flowline'], self.point_lons)
1✔
277
            ds['point_lons'].attrs['description'] = 'Longitude along the flowline'
1✔
278
            ds['point_lons'].attrs['unit'] = 'deg'
1✔
279
            ds['point_lats'] = (['dis_along_flowline'], self.point_lats)
1✔
280
            ds['point_lats'].attrs['description'] = 'Latitude along the flowline'
1✔
281
            ds['point_lats'].attrs['unit'] = 'deg'
1✔
282
        except AttributeError:
1✔
283
            # squeezed lines or we haven't computed lons and lats yet
284
            pass
1✔
285
        ds['bed_h'] = (['dis_along_flowline'], h)
1✔
286
        ds['bed_h'].attrs['description'] = 'Bed elevation along the flowline'
1✔
287
        ds['bed_h'].attrs['unit'] = 'm'
1✔
288
        ds.attrs['class'] = type(self).__name__
1✔
289
        ds.attrs['map_dx'] = self.map_dx
1✔
290
        ds.attrs['dx'] = self.dx
1✔
291
        return ds
1✔
292

293

294
class ParabolicBedFlowline(Flowline):
9✔
295
    """A parabolic shaped Flowline with one degree of freedom
296
    """
297

298
    def __init__(self, line=None, dx=None, map_dx=None,
9✔
299
                 surface_h=None, bed_h=None, bed_shape=None, rgi_id=None,
300
                 water_level=None, gdir=None):
301
        """ Instantiate.
302

303
        Parameters
304
        ----------
305
        line : :py:class:`shapely.geometry.LineString`
306
            the geometrical line of a :py:class:`oggm.Centerline`
307
        """
308
        super(ParabolicBedFlowline, self).__init__(line, dx, map_dx,
3✔
309
                                                   surface_h, bed_h,
310
                                                   rgi_id=rgi_id,
311
                                                   water_level=water_level,
312
                                                   gdir=gdir)
313

314
        assert np.all(np.isfinite(bed_shape))
3✔
315
        self.bed_shape = bed_shape
3✔
316

317
    @property
9✔
318
    def widths_m(self):
9✔
319
        """Compute the widths out of H and shape"""
320
        return np.sqrt(4*self.thick/self.bed_shape)
3✔
321

322
    @property
9✔
323
    def section(self):
9✔
324
        return 2./3. * self.widths_m * self.thick
3✔
325

326
    @section.setter
9✔
327
    def section(self, val):
9✔
328
        self.thick = (0.75 * val * np.sqrt(self.bed_shape))**(2./3.)
3✔
329

330
    @utils.lazy_property
9✔
331
    def shape_str(self):
9✔
332
        """The bed shape in text (for debug and other things)"""
333
        return np.repeat('parabolic', self.nx)
1✔
334

335
    def _add_attrs_to_dataset(self, ds):
9✔
336
        """Add bed specific parameters."""
337
        ds['bed_shape'] = (['x'], self.bed_shape)
1✔
338

339

340
class RectangularBedFlowline(Flowline):
9✔
341
    """Simple shaped Flowline, glacier width does not change with ice thickness
342

343
    """
344

345
    def __init__(self, line=None, dx=None, map_dx=None,
9✔
346
                 surface_h=None, bed_h=None, widths=None, rgi_id=None,
347
                 water_level=None, gdir=None):
348
        """ Instantiate.
349

350
        Parameters
351
        ----------
352
        line : :py:class:`shapely.geometry.LineString`
353
            the geometrical line of a :py:class:`oggm.Centerline`
354

355
        """
356
        super(RectangularBedFlowline, self).__init__(line, dx, map_dx,
3✔
357
                                                     surface_h, bed_h,
358
                                                     rgi_id=rgi_id,
359
                                                     water_level=water_level,
360
                                                     gdir=gdir)
361

362
        self._widths = widths
3✔
363

364
    @property
9✔
365
    def widths_m(self):
9✔
366
        """Compute the widths out of H and shape"""
367
        return self._widths * self.map_dx
3✔
368

369
    @property
9✔
370
    def section(self):
9✔
371
        return self.widths_m * self.thick
3✔
372

373
    @section.setter
9✔
374
    def section(self, val):
9✔
375
        self.thick = val / self.widths_m
3✔
376

377
    @utils.lazy_property
9✔
378
    def shape_str(self):
9✔
379
        """The bed shape in text (for debug and other things)"""
380
        return np.repeat('rectangular', self.nx)
2✔
381

382
    def _add_attrs_to_dataset(self, ds):
9✔
383
        """Add bed specific parameters."""
384
        ds['widths'] = (['x'], self._widths)
1✔
385

386

387
class TrapezoidalBedFlowline(Flowline):
9✔
388
    """A Flowline with trapezoidal shape and two degrees of freedom
389
    """
390

391
    def __init__(self, line=None, dx=None, map_dx=None, surface_h=None,
9✔
392
                 bed_h=None, widths=None, lambdas=None, rgi_id=None,
393
                 water_level=None, gdir=None):
394
        """ Instantiate.
395

396
        Parameters
397
        ----------
398
        line : :py:class:`shapely.geometry.LineString`
399
            the geometrical line of a :py:class:`oggm.Centerline`
400
        """
401
        super(TrapezoidalBedFlowline, self).__init__(line, dx, map_dx,
2✔
402
                                                     surface_h, bed_h,
403
                                                     rgi_id=rgi_id,
404
                                                     water_level=water_level,
405
                                                     gdir=gdir)
406

407
        self._w0_m = widths * self.map_dx - lambdas * self.thick
2✔
408

409
        if np.any(self._w0_m <= 0):
2✔
UNCOV
410
            raise ValueError('Trapezoid beds need to have origin widths > 0.')
×
411

412
        self._prec = np.where(lambdas == 0)[0]
2✔
413

414
        self._lambdas = lambdas
2✔
415

416
    @property
9✔
417
    def widths_m(self):
9✔
418
        """Compute the widths out of H and shape"""
419
        return self._w0_m + self._lambdas * self.thick
2✔
420

421
    @property
9✔
422
    def section(self):
9✔
423
        return (self.widths_m + self._w0_m) / 2 * self.thick
2✔
424

425
    @section.setter
9✔
426
    def section(self, val):
9✔
427
        b = 2 * self._w0_m
2✔
428
        a = 2 * self._lambdas
2✔
429
        with np.errstate(divide='ignore', invalid='ignore'):
2✔
430
            thick = (np.sqrt(b**2 + 4 * a * val) - b) / a
2✔
431
        thick[self._prec] = val[self._prec] / self._w0_m[self._prec]
2✔
432
        self.thick = thick
2✔
433

434
    @utils.lazy_property
9✔
435
    def shape_str(self):
9✔
436
        """The bed shape in text (for debug and other things)"""
437
        return np.repeat('trapezoid', self.nx)
1✔
438

439
    def _add_attrs_to_dataset(self, ds):
9✔
440
        """Add bed specific parameters."""
441
        ds['widths'] = (['x'], self.widths)
1✔
442
        ds['lambdas'] = (['x'], self._lambdas)
1✔
443

444

445
class MixedBedFlowline(Flowline):
9✔
446
    """A Flowline which can take a combination of different shapes (default)
447

448
    The default shape is parabolic. At ice divides a rectangular shape is used.
449
    And if the parabola gets too flat a trapezoidal shape is used.
450
    """
451

452
    def __init__(self, *, line=None, dx=None, map_dx=None, surface_h=None,
9✔
453
                 bed_h=None, section=None, bed_shape=None,
454
                 is_trapezoid=None, lambdas=None, widths_m=None, rgi_id=None,
455
                 water_level=None, gdir=None):
456
        """ Instantiate.
457

458
        Parameters
459
        ----------
460
        line : :py:class:`shapely.geometry.LineString`
461
            the geometrical line of a :py:class:`oggm.Centerline`
462
        """
463

464
        super(MixedBedFlowline, self).__init__(line=line, dx=dx, map_dx=map_dx,
8✔
465
                                               surface_h=surface_h.copy(),
466
                                               bed_h=bed_h.copy(),
467
                                               rgi_id=rgi_id,
468
                                               water_level=water_level,
469
                                               gdir=gdir)
470

471
        # To speedup calculations if no trapezoid bed is present
472
        self._do_trapeze = np.any(is_trapezoid)
8✔
473

474
        # Parabolic
475
        assert len(bed_shape) == self.nx
8✔
476
        self.bed_shape = bed_shape.copy()
8✔
477
        self._sqrt_bed = np.sqrt(bed_shape)
8✔
478

479
        # Trapeze
480
        assert len(lambdas) == self.nx
8✔
481
        assert len(is_trapezoid) == self.nx
8✔
482
        self._lambdas = lambdas.copy()
8✔
483
        self._ptrap = np.where(is_trapezoid)[0]
8✔
484
        self.is_trapezoid = is_trapezoid
8✔
485
        self.is_rectangular = self.is_trapezoid & (self._lambdas == 0)
8✔
486

487
        # Sanity
488
        self.bed_shape[is_trapezoid] = np.NaN
8✔
489
        self._lambdas[~is_trapezoid] = np.NaN
8✔
490

491
        # Here we have to compute the widths out of section and lambda
492
        thick = surface_h - bed_h
8✔
493
        with np.errstate(divide='ignore', invalid='ignore'):
8✔
494
            self._w0_m = section / thick - lambdas * thick / 2
8✔
495

496
        assert np.all(section >= 0)
8✔
497
        need_w = (section == 0) & is_trapezoid
8✔
498
        if np.any(need_w):
8✔
499
            if widths_m is None:
7✔
UNCOV
500
                raise ValueError('We need a non-zero section for trapezoid '
×
501
                                 'shapes unless you provide widths_m.')
502
            self._w0_m[need_w] = widths_m[need_w]
7✔
503

504
        self._w0_m[~is_trapezoid] = np.NaN
8✔
505

506
        if (np.any(self._w0_m[self._ptrap] <= 0) or
8✔
507
                np.any(~np.isfinite(self._w0_m[self._ptrap]))):
UNCOV
508
            raise ValueError('Trapezoid beds need to have origin widths > 0.')
×
509

510
        assert np.all(self.bed_shape[~is_trapezoid] > 0)
8✔
511

512
        self._prec = np.where(is_trapezoid & (lambdas == 0))[0]
8✔
513

514
        assert np.allclose(section, self.section)
8✔
515

516
    @property
9✔
517
    def widths_m(self):
9✔
518
        """Compute the widths out of H and shape"""
519
        out = np.sqrt(4*self.thick/self.bed_shape)
8✔
520
        if self._do_trapeze:
8✔
521
            out[self._ptrap] = (self._w0_m[self._ptrap] +
8✔
522
                                self._lambdas[self._ptrap] *
523
                                self.thick[self._ptrap])
524
        return out
8✔
525

526
    @property
9✔
527
    def section(self):
9✔
528
        out = 2./3. * self.widths_m * self.thick
8✔
529
        if self._do_trapeze:
8✔
530
            out[self._ptrap] = ((self.widths_m[self._ptrap] +
8✔
531
                                 self._w0_m[self._ptrap]) / 2 *
532
                                self.thick[self._ptrap])
533
        return out
8✔
534

535
    @section.setter
9✔
536
    def section(self, val):
9✔
537
        out = (0.75 * val * self._sqrt_bed)**(2./3.)
8✔
538
        if self._do_trapeze:
8✔
539
            b = 2 * self._w0_m[self._ptrap]
8✔
540
            a = 2 * self._lambdas[self._ptrap]
8✔
541
            with np.errstate(divide='ignore', invalid='ignore'):
8✔
542
                out[self._ptrap] = ((np.sqrt(b ** 2 + 4 * a * val[self._ptrap])
8✔
543
                                     - b) / a)
544
            out[self._prec] = val[self._prec] / self._w0_m[self._prec]
8✔
545
        self.thick = out
8✔
546

547
    @utils.lazy_property
9✔
548
    def shape_str(self):
9✔
549
        """The bed shape in text (for debug and other things)"""
550
        out = np.repeat('rectangular', self.nx)
2✔
551
        out[~ self.is_trapezoid] = 'parabolic'
2✔
552
        out[self.is_trapezoid & ~ self.is_rectangular] = 'trapezoid'
2✔
553
        return out
2✔
554

555
    def _add_attrs_to_dataset(self, ds):
9✔
556
        """Add bed specific parameters."""
557

558
        ds['section'] = (['x'], self.section)
4✔
559
        ds['bed_shape'] = (['x'], self.bed_shape)
4✔
560
        ds['is_trapezoid'] = (['x'], self.is_trapezoid)
4✔
561
        ds['widths_m'] = (['x'], self._w0_m)
4✔
562
        ds['lambdas'] = (['x'], self._lambdas)
4✔
563

564

565
class FlowlineModel(object):
9✔
566
    """Interface to OGGM's flowline models"""
567

568
    def __init__(self, flowlines, mb_model=None, y0=0., glen_a=None,
9✔
569
                 fs=None, inplace=False, smooth_trib_influx=True,
570
                 is_tidewater=False, is_lake_terminating=False,
571
                 mb_elev_feedback='annual', check_for_boundaries=None,
572
                 water_level=None, required_model_steps='monthly'):
573
        """Create a new flowline model from the flowlines and a MB model.
574

575
        Parameters
576
        ----------
577
        flowlines : list
578
            a list of :py:class:`oggm.Flowline` instances, sorted by order
579
        mb_model : :py:class:`oggm.core.massbalance.MassBalanceModel`
580
            the MB model to use
581
        y0 : int
582
            the starting year of the simulation
583
        glen_a : float
584
            glen's parameter A
585
        fs: float
586
            sliding parameter
587
        inplace : bool
588
            whether or not to make a copy of the flowline objects for the run
589
            setting to True implies that your objects will be modified at run
590
            time by the model (can help to spare memory)
591
        smooth_trib_influx : bool
592
            whether to smooth the mass influx from the incoming tributary.
593
            The default is to use a gaussian kernel on a 9 grid points
594
            window.
595
        is_tidewater: bool, default: False
596
            is this a tidewater glacier?
597
        is_lake_terminating: bool, default: False
598
            is this a lake terminating glacier?
599
        mb_elev_feedback : str, default: 'annual'
600
            'never', 'always', 'annual', or 'monthly': how often the
601
            mass balance should be recomputed from the mass balance model.
602
            'Never' is equivalent to 'annual' but without elevation feedback
603
            at all (the heights are taken from the first call).
604
        check_for_boundaries : bool
605
            whether the model should raise an error when the glacier exceeds
606
            the domain boundaries. The default is to follow
607
            PARAMS['error_when_glacier_reaches_boundaries']
608
        required_model_steps : str
609
            some Flowline models have an adaptive time stepping scheme, which
610
            is randomly taking steps towards the goal of a "run_until". The
611
            default ('monthly') makes sure that the model results are
612
            consistent whether the users want data at monthly or annual
613
            timesteps by forcing the model to land on monthly steps even if
614
            only annual updates are required. You may want to change this
615
            for optimisation reasons for models that don't require adaptive
616
            steps (for example the deltaH method).
617
        """
618

619
        self.is_tidewater = is_tidewater
9✔
620
        self.is_lake_terminating = is_lake_terminating
9✔
621
        self.is_marine_terminating = is_tidewater and not is_lake_terminating
9✔
622

623
        if water_level is None:
9✔
624
            self.water_level = 0
8✔
625
            if self.is_lake_terminating:
8✔
626
                if not flowlines[-1].has_ice():
1✔
627
                    raise InvalidParamsError('Set `water_level` for lake '
1✔
628
                                             'terminating glaciers in '
629
                                             'idealized runs')
630
                # Arbitrary water level 1m below last grid points elevation
631
                min_h = flowlines[-1].surface_h[flowlines[-1].thick > 0][-1]
1✔
632
                self.water_level = (min_h -
1✔
633
                                    cfg.PARAMS['free_board_lake_terminating'])
634
        else:
635
            self.water_level = water_level
5✔
636

637
        # Mass balance
638
        self.mb_elev_feedback = mb_elev_feedback.lower()
9✔
639
        if self.mb_elev_feedback in ['never', 'annual']:
9✔
640
            self.mb_step = 'annual'
9✔
641
        elif self.mb_elev_feedback in ['always', 'monthly']:
3✔
642
            self.mb_step = 'monthly'
3✔
643
        self.mb_model = mb_model
9✔
644

645
        # Defaults
646
        if glen_a is None:
9✔
647
            glen_a = cfg.PARAMS['glen_a']
6✔
648
        if fs is None:
9✔
649
            fs = cfg.PARAMS['fs']
3✔
650
        self.glen_a = glen_a
9✔
651
        self.fs = fs
9✔
652
        self.glen_n = cfg.PARAMS['glen_n']
9✔
653
        self.rho = cfg.PARAMS['ice_density']
9✔
654
        if check_for_boundaries is None:
9✔
655
            check_for_boundaries = cfg.PARAMS[('error_when_glacier_reaches_'
9✔
656
                                               'boundaries')]
657
        self.check_for_boundaries = check_for_boundaries
9✔
658

659
        # we keep glen_a as input, but for optimisation we stick to "fd"
660
        self._fd = 2. / (cfg.PARAMS['glen_n']+2) * self.glen_a
9✔
661

662
        # Calving shenanigans
663
        self.calving_m3_since_y0 = 0.  # total calving since time y0
9✔
664
        self.calving_rate_myr = 0.
9✔
665

666
        # Time
667
        if required_model_steps not in ['annual', 'monthly']:
9✔
UNCOV
668
            raise InvalidParamsError('required_model_steps needs to be of '
×
669
                                     '`annual` or `monthly`.')
670
        self.required_model_steps = required_model_steps
9✔
671
        self.y0 = None
9✔
672
        self.t = None
9✔
673
        self.reset_y0(y0)
9✔
674

675
        self.fls = None
9✔
676
        self._tributary_indices = None
9✔
677
        self.reset_flowlines(flowlines, inplace=inplace,
9✔
678
                             smooth_trib_influx=smooth_trib_influx)
679

680
    @property
9✔
681
    def mb_model(self):
9✔
682
        return self._mb_model
7✔
683

684
    @mb_model.setter
9✔
685
    def mb_model(self, value):
9✔
686
        # We need a setter because the MB func is stored as an attr too
687
        _mb_call = None
9✔
688
        if value:
9✔
689
            if self.mb_elev_feedback in ['always', 'monthly']:
9✔
690
                _mb_call = value.get_monthly_mb
3✔
691
            elif self.mb_elev_feedback in ['annual', 'never']:
9✔
692
                _mb_call = value.get_annual_mb
9✔
693
            else:
UNCOV
694
                raise ValueError('mb_elev_feedback not understood')
×
695
        self._mb_model = value
9✔
696
        self._mb_call = _mb_call
9✔
697
        self._mb_current_date = None
9✔
698
        self._mb_current_out = dict()
9✔
699
        self._mb_current_heights = dict()
9✔
700

701
    def reset_y0(self, y0):
9✔
702
        """Reset the initial model time"""
703
        self.y0 = y0
9✔
704
        self.t = 0
9✔
705

706
    def reset_flowlines(self, flowlines, inplace=False,
9✔
707
                        smooth_trib_influx=True):
708
        """Reset the initial model flowlines"""
709

710
        if not inplace:
9✔
711
            flowlines = copy.deepcopy(flowlines)
6✔
712

713
        try:
9✔
714
            len(flowlines)
9✔
715
        except TypeError:
×
UNCOV
716
            flowlines = [flowlines]
×
717

718
        self.fls = flowlines
9✔
719

720
        # list of tributary coordinates and stuff
721
        trib_ind = []
9✔
722
        for fl in self.fls:
9✔
723
            # Important also
724
            fl.water_level = self.water_level
9✔
725
            if fl.flows_to is None:
9✔
726
                trib_ind.append((None, None, None, None))
9✔
727
                continue
9✔
728
            idl = self.fls.index(fl.flows_to)
8✔
729
            ide = fl.flows_to_indice
8✔
730
            if not smooth_trib_influx:
8✔
731
                gk = 1
1✔
732
                id0 = ide
1✔
733
                id1 = ide+1
1✔
734
            elif fl.flows_to.nx >= 9:
8✔
735
                gk = GAUSSIAN_KERNEL[9]
8✔
736
                id0 = ide-4
8✔
737
                id1 = ide+5
8✔
738
            elif fl.flows_to.nx >= 7:
×
739
                gk = GAUSSIAN_KERNEL[7]
×
740
                id0 = ide-3
×
741
                id1 = ide+4
×
742
            elif fl.flows_to.nx >= 5:
×
743
                gk = GAUSSIAN_KERNEL[5]
×
744
                id0 = ide-2
×
UNCOV
745
                id1 = ide+3
×
746
            trib_ind.append((idl, id0, id1, gk))
8✔
747

748
        self._tributary_indices = trib_ind
9✔
749

750
    @property
9✔
751
    def yr(self):
9✔
752
        return self.y0 + self.t / SEC_IN_YEAR
8✔
753

754
    @property
9✔
755
    def area_m2(self):
9✔
756
        return np.sum([f.area_m2 for f in self.fls])
8✔
757

758
    @property
9✔
759
    def volume_m3(self):
9✔
760
        return np.sum([f.volume_m3 for f in self.fls])
8✔
761

762
    @property
9✔
763
    def volume_km3(self):
9✔
764
        return self.volume_m3 * 1e-9
5✔
765

766
    @property
9✔
767
    def volume_bsl_m3(self):
9✔
768
        return np.sum([f.volume_bsl_m3 for f in self.fls])
7✔
769

770
    @property
9✔
771
    def volume_bsl_km3(self):
9✔
772
        return self.volume_bsl_m3 * 1e-9
1✔
773

774
    @property
9✔
775
    def volume_bwl_m3(self):
9✔
776
        return np.sum([f.volume_bwl_m3 for f in self.fls])
7✔
777

778
    @property
9✔
779
    def volume_bwl_km3(self):
9✔
780
        return self.volume_bwl_m3 * 1e-9
1✔
781

782
    @property
9✔
783
    def area_km2(self):
9✔
784
        return self.area_m2 * 1e-6
4✔
785

786
    @property
9✔
787
    def length_m(self):
9✔
788
        return self.fls[-1].length_m
8✔
789

790
    def get_mb(self, heights, year=None, fl_id=None, fls=None):
9✔
791
        """Get the mass balance at the requested height and time.
792

793
        Optimized so that no mb model call is necessary at each step.
794
        """
795

796
        # Do we even have to optimise?
797
        if self.mb_elev_feedback == 'always':
8✔
798
            return self._mb_call(heights, year=year, fl_id=fl_id, fls=fls)
2✔
799

800
        # Ok, user asked for it
801
        if fl_id is None:
8✔
UNCOV
802
            raise ValueError('Need fls_id')
×
803

804
        if self.mb_elev_feedback == 'never':
8✔
805
            # The very first call we take the heights
806
            if fl_id not in self._mb_current_heights:
1✔
807
                # We need to reset just this tributary
808
                self._mb_current_heights[fl_id] = heights
1✔
809
            # All calls we replace
810
            heights = self._mb_current_heights[fl_id]
1✔
811

812
        date = utils.floatyear_to_date(year)
8✔
813
        if self.mb_elev_feedback in ['annual', 'never']:
8✔
814
            # ignore month changes
815
            date = (date[0], date[0])
8✔
816

817
        if self._mb_current_date == date:
8✔
818
            if fl_id not in self._mb_current_out:
8✔
819
                # We need to reset just this tributary
820
                self._mb_current_out[fl_id] = self._mb_call(heights,
6✔
821
                                                            year=year,
822
                                                            fl_id=fl_id,
823
                                                            fls=fls)
824
        else:
825
            # We need to reset all
826
            self._mb_current_date = date
8✔
827
            self._mb_current_out = dict()
8✔
828
            self._mb_current_out[fl_id] = self._mb_call(heights,
8✔
829
                                                        year=year,
830
                                                        fl_id=fl_id,
831
                                                        fls=fls)
832

833
        return self._mb_current_out[fl_id]
8✔
834

835
    def to_geometry_netcdf(self, path):
9✔
836
        """Creates a netcdf group file storing the state of the model."""
837

838
        flows_to_id = []
4✔
839
        for trib in self._tributary_indices:
4✔
840
            flows_to_id.append(trib[0] if trib[0] is not None else -1)
4✔
841

842
        ds = xr.Dataset()
4✔
843
        try:
4✔
844
            ds.attrs['description'] = 'OGGM model output'
4✔
845
            ds.attrs['oggm_version'] = __version__
4✔
846
            ds.attrs['calendar'] = '365-day no leap'
4✔
847
            ds.attrs['creation_date'] = strftime("%Y-%m-%d %H:%M:%S", gmtime())
4✔
848
            ds['flowlines'] = ('flowlines', np.arange(len(flows_to_id)))
4✔
849
            ds['flows_to_id'] = ('flowlines', flows_to_id)
4✔
850
            ds.to_netcdf(path)
4✔
851
            for i, fl in enumerate(self.fls):
4✔
852
                ds = fl.to_geometry_dataset()
4✔
853
                ds.to_netcdf(path, 'a', group='fl_{}'.format(i))
4✔
854
        finally:
855
            ds.close()
4✔
856

857
    def check_domain_end(self):
9✔
858
        """Returns False if the glacier reaches the domains bound."""
UNCOV
859
        return np.isclose(self.fls[-1].thick[-1], 0)
×
860

861
    def step(self, dt):
9✔
862
        """Advance the numerical simulation of one single step.
863

864
        Important: the step dt is a maximum boundary that is *not* guaranteed
865
        to be met if dt is too large for the underlying numerical
866
        implementation. However, ``step(dt)`` should never cross the desired
867
        time step, i.e. if dt is small enough to ensure stability, step
868
        should match it.
869

870
        The caller will know how much has been actually advanced by looking
871
        at the output of ``step()`` or by monitoring ``self.t`` or `self.yr``
872

873
        Parameters
874
        ----------
875
        dt : float
876
             the step length in seconds
877

878
        Returns
879
        -------
880
        the actual dt chosen by the numerical implementation. Guaranteed to
881
        be dt or lower.
882
        """
883
        raise NotImplementedError
884

885
    def run_until(self, y1):
9✔
886
        """Runs the model from the current year up to a given year date y1.
887

888
        This function runs the model for the time difference y1-self.y0
889
        If self.y0 has not been specified at some point, it is 0 and y1 will
890
        be the time span in years to run the model for.
891

892
        Parameters
893
        ----------
894
        y1 : float
895
            Upper time span for how long the model should run
896
        """
897

898
        if self.required_model_steps == 'monthly':
8✔
899
            # We force timesteps to monthly frequencies for consistent results
900
            # among use cases (monthly or yearly output) and also to prevent
901
            # "too large" steps in the adaptive scheme.
902
            ts = utils.monthly_timeseries(self.yr, y1)
8✔
903
            # Add the last date to be sure we end on it - implementations
904
            # of `step()` and of the loop below should not run twice anyways
905
            ts = np.append(ts, y1)
8✔
906
        else:
907
            ts = np.arange(int(self.yr), int(y1+1))
2✔
908

909
        # Loop over the steps we want to meet
910
        for y in ts:
8✔
911
            t = (y - self.y0) * SEC_IN_YEAR
8✔
912
            # because of CFL, step() doesn't ensure that the end date is met
913
            # lets run the steps until we reach our desired date
914
            while self.t < t:
8✔
915
                self.step(t - self.t)
8✔
916

917
            # Check for domain bounds
918
            if self.check_for_boundaries:
8✔
919
                if self.fls[-1].thick[-1] > 10:
8✔
920
                    raise RuntimeError('Glacier exceeds domain boundaries, '
2✔
921
                                       'at year: {}'.format(self.yr))
922

923
            # Check for NaNs
924
            for fl in self.fls:
8✔
925
                if np.any(~np.isfinite(fl.thick)):
8✔
UNCOV
926
                    raise FloatingPointError('NaN in numerical solution, '
×
927
                                             'at year: {}'.format(self.yr))
928

929
    def run_until_and_store(self, y1,
9✔
930
                            diag_path=None,
931
                            fl_diag_path=False,
932
                            geom_path=False,
933
                            store_monthly_step=None,
934
                            stop_criterion=None,
935
                            fixed_geometry_spinup_yr=None,
936
                            dynamic_spinup_min_ice_thick=None,
937
                            make_compatible=False
938
                            ):
939
        """Runs the model and returns intermediate steps in xarray datasets.
940

941
        This function repeatedly calls FlowlineModel.run_until for either
942
        monthly or yearly time steps up till the upper time boundary y1.
943

944
        Parameters
945
        ----------
946
        y1 : int
947
            Upper time span for how long the model should run (needs to be
948
            a full year)
949
        diag_path : str
950
            Path and filename where to store the glacier-wide diagnostics
951
            dataset (length, area, volume, etc.) as controlled by
952
            cfg.PARAMS['store_diagnostic_variables'].
953
            The default (None) is to not store the dataset to disk but return
954
            the dataset to the user after execution.
955
        fl_diag_path : str, None or bool
956
            Path and filename where to store the model diagnostics along the
957
            flowline(s).
958
        geom_path : str, None or bool
959
            Path and filename where to store the model geometry dataset. This
960
            dataset contains all necessary info to retrieve the full glacier
961
            geometry after the run,  with a FileModel. This is stored
962
            on an annual basis and can be used to restart a run from a
963
            past simulation's geometry ("restart file").
964
            The default (False) prevents creating this dataset altogether
965
            (for optimisation purposes).
966
            Set this to None to not store the dataset to disk but return
967
            the dataset to the user after execution.
968
        store_monthly_step : Bool
969
            If True (False)  model diagnostics will be stored monthly (yearly).
970
            If unspecified, we follow the update of the MB model, which
971
            defaults to yearly (see __init__).
972
        stop_criterion : func
973
            a function evaluating the model state (and possibly evolution over
974
            time), and deciding when to stop the simulation. Its signature
975
            should look like:
976

977
            stop, new_state = stop_criterion(model, previous_state)
978

979
            where stop is a bool, and new_state a container (likely: dict)
980
            initialized by the function itself on the first call (previous_state
981
            can and should be None on the first call). See
982
            `zero_glacier_stop_criterion` for an example.
983
        fixed_geometry_spinup_yr : int
984
            if set to an integer, the model will artificially prolongate
985
            all outputs of run_until_and_store to encompass all time stamps
986
            starting from the chosen year. The only output affected are the
987
            glacier wide diagnostic files - all other outputs are set
988
            to constants during "spinup"
989
        dynamic_spinup_min_ice_thick : float or None
990
            if set to an float, additional variables are saved which are useful
991
            in combination with the dynamic spinup. In particular only grid
992
            points with a minimum ice thickness are considered for the total
993
            area or the total volume. This is useful to smooth out yearly
994
            fluctuations when matching to observations. The names of this new
995
            variables include the suffix _min_h (e.g. 'area_m2_min_h')
996
        make_compatible : bool
997
            if set to true this will add all variables to the resulting dataset
998
            so it could be combined with any other one. This is necessary if
999
            different spinup methods are used. For example if using the dynamic
1000
            spinup and setting fixed geometry spinup as fallback, the variable
1001
            'is_fixed_geometry_spinup' must be added to the dynamic spinup so
1002
            it is possible to compile both glaciers together.
1003
        Returns
1004
        -------
1005
        geom_ds : xarray.Dataset or None
1006
            stores the entire glacier geometry. It is useful to visualize the
1007
            glacier geometry or to restart a new run from a modelled geometry.
1008
            The glacier state is stored at the beginning of each hydrological
1009
            year (not in between in order to spare disk space).
1010
        diag_ds : xarray.Dataset
1011
            stores a few diagnostic variables such as the volume, area, length
1012
            and ELA of the glacier.
1013
        """
1014

1015
        if int(y1) != y1:
7✔
UNCOV
1016
            raise InvalidParamsError('run_until_and_store only accepts '
×
1017
                                     'integer year dates.')
1018

1019
        if not self.mb_model.hemisphere:
7✔
UNCOV
1020
            raise InvalidParamsError('run_until_and_store needs a '
×
1021
                                     'mass balance model with an unambiguous '
1022
                                     'hemisphere.')
1023

1024
        # This is only needed for consistency to be able to merge two runs
1025
        if dynamic_spinup_min_ice_thick is None:
7✔
1026
            dynamic_spinup_min_ice_thick = cfg.PARAMS['dynamic_spinup_min_ice_thick']
7✔
1027

1028
        # Do we have a spinup?
1029
        do_fixed_spinup = fixed_geometry_spinup_yr is not None
7✔
1030
        y0 = fixed_geometry_spinup_yr if do_fixed_spinup else self.yr
7✔
1031

1032
        # Do we need to create a geometry or flowline diagnostics dataset?
1033
        do_geom = geom_path is None or geom_path
7✔
1034
        do_fl_diag = fl_diag_path is None or fl_diag_path
7✔
1035

1036
        # time
1037
        yearly_time = np.arange(np.floor(y0), np.floor(y1)+1)
7✔
1038

1039
        if store_monthly_step is None:
7✔
1040
            store_monthly_step = self.mb_step == 'monthly'
4✔
1041

1042
        if store_monthly_step:
7✔
1043
            monthly_time = utils.monthly_timeseries(y0, y1)
2✔
1044
        else:
1045
            monthly_time = np.arange(np.floor(y0), np.floor(y1)+1)
7✔
1046

1047
        sm = cfg.PARAMS['hydro_month_' + self.mb_model.hemisphere]
7✔
1048

1049
        yrs, months = utils.floatyear_to_date(monthly_time)
7✔
1050
        cyrs, cmonths = utils.hydrodate_to_calendardate(yrs, months,
7✔
1051
                                                        start_month=sm)
1052

1053
        # init output
1054
        if geom_path:
7✔
1055
            self.to_geometry_netcdf(geom_path)
4✔
1056

1057
        ny = len(yearly_time)
7✔
1058
        if ny == 1:
7✔
1059
            yrs = [yrs]
1✔
1060
            cyrs = [cyrs]
1✔
1061
            months = [months]
1✔
1062
            cmonths = [cmonths]
1✔
1063
        nm = len(monthly_time)
7✔
1064

1065
        if do_geom or do_fl_diag:
7✔
1066
            sects = [(np.zeros((ny, fl.nx)) * np.NaN) for fl in self.fls]
4✔
1067
            widths = [(np.zeros((ny, fl.nx)) * np.NaN) for fl in self.fls]
4✔
1068
            buckets = [np.zeros(ny) for _ in self.fls]
4✔
1069

1070
        # Diagnostics dataset
1071
        diag_ds = xr.Dataset()
7✔
1072

1073
        # Global attributes
1074
        diag_ds.attrs['description'] = 'OGGM model output'
7✔
1075
        diag_ds.attrs['oggm_version'] = __version__
7✔
1076
        diag_ds.attrs['calendar'] = '365-day no leap'
7✔
1077
        diag_ds.attrs['creation_date'] = strftime("%Y-%m-%d %H:%M:%S",
7✔
1078
                                                  gmtime())
1079
        diag_ds.attrs['water_level'] = self.water_level
7✔
1080
        diag_ds.attrs['glen_a'] = self.glen_a
7✔
1081
        diag_ds.attrs['fs'] = self.fs
7✔
1082

1083
        # Add MB model attributes
1084
        diag_ds.attrs['mb_model_class'] = self.mb_model.__class__.__name__
7✔
1085
        for k, v in self.mb_model.__dict__.items():
7✔
1086
            if np.isscalar(v) and not k.startswith('_'):
7✔
1087
                diag_ds.attrs['mb_model_{}'.format(k)] = v
7✔
1088

1089
        # Coordinates
1090
        diag_ds.coords['time'] = ('time', monthly_time)
7✔
1091
        diag_ds.coords['hydro_year'] = ('time', yrs)
7✔
1092
        diag_ds.coords['hydro_month'] = ('time', months)
7✔
1093
        diag_ds.coords['calendar_year'] = ('time', cyrs)
7✔
1094
        diag_ds.coords['calendar_month'] = ('time', cmonths)
7✔
1095

1096
        diag_ds['time'].attrs['description'] = 'Floating hydrological year'
7✔
1097
        diag_ds['hydro_year'].attrs['description'] = 'Hydrological year'
7✔
1098
        diag_ds['hydro_month'].attrs['description'] = 'Hydrological month'
7✔
1099
        diag_ds['calendar_year'].attrs['description'] = 'Calendar year'
7✔
1100
        diag_ds['calendar_month'].attrs['description'] = 'Calendar month'
7✔
1101

1102
        # Variables and attributes
1103
        ovars = cfg.PARAMS['store_diagnostic_variables']
7✔
1104

1105
        if 'volume' in ovars:
7✔
1106
            diag_ds['volume_m3'] = ('time', np.zeros(nm) * np.NaN)
7✔
1107
            diag_ds['volume_m3'].attrs['description'] = 'Total glacier volume'
7✔
1108
            diag_ds['volume_m3'].attrs['unit'] = 'm 3'
7✔
1109
            # created here but only filled with a value if
1110
            # dynamic_spinup_min_ice_thick is not None
1111
            diag_ds['volume_m3_min_h'] = ('time', np.zeros(nm) * np.NaN)
7✔
1112
            diag_ds['volume_m3_min_h'].attrs['description'] = \
7✔
1113
                f'Total glacier volume of gridpoints with a minimum ice' \
1114
                f'thickness of {dynamic_spinup_min_ice_thick} m'
1115
            diag_ds['volume_m3_min_h'].attrs['unit'] = 'm 3'
7✔
1116

1117
        if 'volume_bsl' in ovars:
7✔
1118
            diag_ds['volume_bsl_m3'] = ('time', np.zeros(nm) * np.NaN)
7✔
1119
            diag_ds['volume_bsl_m3'].attrs['description'] = ('Glacier volume '
7✔
1120
                                                             'below '
1121
                                                             'sea-level')
1122
            diag_ds['volume_bsl_m3'].attrs['unit'] = 'm 3'
7✔
1123

1124
        if 'volume_bwl' in ovars:
7✔
1125
            diag_ds['volume_bwl_m3'] = ('time', np.zeros(nm) * np.NaN)
7✔
1126
            diag_ds['volume_bwl_m3'].attrs['description'] = ('Glacier volume '
7✔
1127
                                                             'below '
1128
                                                             'water-level')
1129
            diag_ds['volume_bwl_m3'].attrs['unit'] = 'm 3'
7✔
1130

1131
        if 'area' in ovars:
7✔
1132
            diag_ds['area_m2'] = ('time', np.zeros(nm) * np.NaN)
7✔
1133
            diag_ds['area_m2'].attrs['description'] = 'Total glacier area'
7✔
1134
            diag_ds['area_m2'].attrs['unit'] = 'm 2'
7✔
1135
            # created here but only filled with a value if
1136
            # dynamic_spinup_min_ice_thick is not None
1137
            diag_ds['area_m2_min_h'] = ('time', np.zeros(nm) * np.NaN)
7✔
1138
            diag_ds['area_m2_min_h'].attrs['description'] = \
7✔
1139
                f'Total glacier area of gridpoints with a minimum ice' \
1140
                f'thickness of {dynamic_spinup_min_ice_thick} m'
1141
            diag_ds['area_m2_min_h'].attrs['unit'] = 'm 2'
7✔
1142

1143
        if 'length' in ovars:
7✔
1144
            diag_ds['length_m'] = ('time', np.zeros(nm) * np.NaN)
7✔
1145
            diag_ds['length_m'].attrs['description'] = 'Glacier length'
7✔
1146
            diag_ds['length_m'].attrs['unit'] = 'm'
7✔
1147

1148
        if 'calving' in ovars:
7✔
1149
            diag_ds['calving_m3'] = ('time', np.zeros(nm) * np.NaN)
7✔
1150
            diag_ds['calving_m3'].attrs['description'] = ('Total accumulated '
7✔
1151
                                                          'calving flux')
1152
            diag_ds['calving_m3'].attrs['unit'] = 'm 3'
7✔
1153

1154
        if 'calving_rate' in ovars:
7✔
1155
            diag_ds['calving_rate_myr'] = ('time', np.zeros(nm) * np.NaN)
7✔
1156
            diag_ds['calving_rate_myr'].attrs['description'] = 'Calving rate'
7✔
1157
            diag_ds['calving_rate_myr'].attrs['unit'] = 'm yr-1'
7✔
1158

1159
        for gi in range(10):
7✔
1160
            vn = f'terminus_thick_{gi}'
7✔
1161
            if vn in ovars:
7✔
1162
                diag_ds[vn] = ('time', np.zeros(nm) * np.NaN)
1✔
1163
                diag_ds[vn].attrs['description'] = ('Thickness of grid point '
1✔
1164
                                                    f'{gi} from terminus.')
1165
                diag_ds[vn].attrs['unit'] = 'm'
1✔
1166

1167
        if do_fixed_spinup:
7✔
1168
            is_spinup_time = monthly_time < self.yr
2✔
1169
            diag_ds['is_fixed_geometry_spinup'] = ('time', is_spinup_time)
2✔
1170
            desc = 'Part of the series which are spinup'
2✔
1171
            diag_ds['is_fixed_geometry_spinup'].attrs['description'] = desc
2✔
1172
            diag_ds['is_fixed_geometry_spinup'].attrs['unit'] = '-'
2✔
1173
        elif make_compatible:
7✔
1174
            is_spinup_time = np.full(len(monthly_time), False, dtype=bool)
1✔
1175
            diag_ds['is_fixed_geometry_spinup'] = ('time', is_spinup_time)
1✔
1176
            desc = 'Part of the series which are spinup'
1✔
1177
            diag_ds['is_fixed_geometry_spinup'].attrs['description'] = desc
1✔
1178
            diag_ds['is_fixed_geometry_spinup'].attrs['unit'] = '-'
1✔
1179

1180
        fl_diag_dss = None
7✔
1181
        if do_fl_diag:
7✔
1182
            # Time invariant datasets
1183
            fl_diag_dss = [fl.to_diagnostics_dataset() for fl in self.fls]
1✔
1184

1185
            # Global attributes
1186
            for ds in fl_diag_dss:
1✔
1187
                ds.attrs['description'] = 'OGGM model output'
1✔
1188
                ds.attrs['oggm_version'] = __version__
1✔
1189
                ds.attrs['calendar'] = '365-day no leap'
1✔
1190
                ds.attrs['creation_date'] = strftime("%Y-%m-%d %H:%M:%S", gmtime())
1✔
1191
                ds.attrs['water_level'] = self.water_level
1✔
1192
                ds.attrs['glen_a'] = self.glen_a
1✔
1193
                ds.attrs['fs'] = self.fs
1✔
1194

1195
                # Add MB model attributes
1196
                ds.attrs['mb_model_class'] = self.mb_model.__class__.__name__
1✔
1197
                for k, v in self.mb_model.__dict__.items():
1✔
1198
                    if np.isscalar(v) and not k.startswith('_'):
1✔
1199
                        ds.attrs['mb_model_{}'.format(k)] = v
1✔
1200

1201
                # Coordinates
1202
                ds.coords['time'] = yearly_time
1✔
1203
                ds['time'].attrs['description'] = 'Floating hydrological year'
1✔
1204

1205
            # Variables and attributes
1206
            ovars_fl = cfg.PARAMS['store_fl_diagnostic_variables']
1✔
1207
            if 'volume' not in ovars_fl or 'area' not in ovars_fl:
1✔
UNCOV
1208
                raise InvalidParamsError('Flowline diagnostics need at least '
×
1209
                                         'volume and area as output.')
1210

1211
            for ds, sect, width, bucket in zip(fl_diag_dss, sects, widths, buckets):
1✔
1212
                if 'volume' in ovars_fl:
1✔
1213
                    ds['volume_m3'] = (('time', 'dis_along_flowline'), sect)
1✔
1214
                    ds['volume_m3'].attrs['description'] = 'Section volume'
1✔
1215
                    ds['volume_m3'].attrs['unit'] = 'm 3'
1✔
1216
                if 'volume_bsl' in ovars_fl:
1✔
1217
                    ds['volume_bsl_m3'] = (('time', 'dis_along_flowline'), sect * 0)
1✔
1218
                    ds['volume_bsl_m3'].attrs['description'] = 'Section volume below sea level'
1✔
1219
                    ds['volume_bsl_m3'].attrs['unit'] = 'm 3'
1✔
1220
                if 'volume_bwl' in ovars_fl:
1✔
1221
                    ds['volume_bwl_m3'] = (('time', 'dis_along_flowline'), sect * 0)
1✔
1222
                    ds['volume_bwl_m3'].attrs['description'] = 'Section volume below water level'
1✔
1223
                    ds['volume_bwl_m3'].attrs['unit'] = 'm 3'
1✔
1224
                if 'area' in ovars_fl:
1✔
1225
                    ds['area_m2'] = (('time', 'dis_along_flowline'), width)
1✔
1226
                    ds['area_m2'].attrs['description'] = 'Section area'
1✔
1227
                    ds['area_m2'].attrs['unit'] = 'm 2'
1✔
1228
                if 'thickness' in ovars_fl:
1✔
1229
                    ds['thickness_m'] = (('time', 'dis_along_flowline'), width * np.NaN)
1✔
1230
                    ds['thickness_m'].attrs['description'] = 'Section thickness'
1✔
1231
                    ds['thickness_m'].attrs['unit'] = 'm'
1✔
1232
                if 'ice_velocity' in ovars_fl:
1✔
1233
                    if not (hasattr(self, '_surf_vel_fac') or hasattr(self, 'u_stag')):
1✔
UNCOV
1234
                        raise InvalidParamsError('This flowline model does not seem '
×
1235
                                                 'to be able to provide surface '
1236
                                                 'velocities.')
1237
                    ds['ice_velocity_myr'] = (('time', 'dis_along_flowline'), width * np.NaN)
1✔
1238
                    ds['ice_velocity_myr'].attrs['description'] = 'Ice velocity at the surface'
1✔
1239
                    ds['ice_velocity_myr'].attrs['unit'] = 'm yr-1'
1✔
1240
                if 'calving_bucket' in ovars_fl:
1✔
1241
                    ds['calving_bucket_m3'] = (('time',), bucket)
1✔
1242
                    desc = 'Flowline calving bucket (volume not yet calved)'
1✔
1243
                    ds['calving_bucket_m3'].attrs['description'] = desc
1✔
1244
                    ds['calving_bucket_m3'].attrs['unit'] = 'm 3'
1✔
1245
                if do_fixed_spinup:
1✔
1246
                    ds['is_fixed_geometry_spinup'] = ('time', is_spinup_time)
1✔
1247
                    desc = 'Part of the series which are spinup'
1✔
1248
                    ds['is_fixed_geometry_spinup'].attrs['description'] = desc
1✔
1249
                    ds['is_fixed_geometry_spinup'].attrs['unit'] = '-'
1✔
1250

1251
        # First deal with spinup (we compute volume change only)
1252
        if do_fixed_spinup:
7✔
1253
            spinup_vol = monthly_time * 0
2✔
1254
            for fl_id, fl in enumerate(self.fls):
2✔
1255

1256
                h = fl.surface_h
2✔
1257
                a = fl.widths_m * fl.dx_meter
2✔
1258
                a[fl.section <= 0] = 0
2✔
1259

1260
                for j, yr in enumerate(monthly_time[is_spinup_time]):
2✔
1261
                    smb = self.get_mb(h, year=yr, fl_id=fl_id, fls=self.fls)
1✔
1262
                    spinup_vol[j] -= np.sum(smb * a)  # per second and minus because backwards
1✔
1263

1264
            # per unit time
1265
            dt = (monthly_time[1:] - monthly_time[:-1]) * cfg.SEC_IN_YEAR
2✔
1266
            spinup_vol[:-1] = spinup_vol[:-1] * dt
2✔
1267
            spinup_vol = np.cumsum(spinup_vol[::-1])[::-1]
2✔
1268

1269
        # Run
1270
        j = 0
7✔
1271
        prev_state = None  # for the stopping criterion
7✔
1272
        for i, (yr, mo) in enumerate(zip(monthly_time, months)):
7✔
1273

1274
            if yr > self.yr:
7✔
1275
                # Here we model run - otherwise (for spinup) we
1276
                # constantly store the same data
1277
                self.run_until(yr)
7✔
1278

1279
            # Glacier geometry
1280
            if (do_geom or do_fl_diag) and mo == 1:
7✔
1281
                for s, w, b, fl in zip(sects, widths, buckets, self.fls):
4✔
1282
                    s[j, :] = fl.section
4✔
1283
                    w[j, :] = fl.widths_m
4✔
1284
                    if self.is_tidewater:
4✔
1285
                        try:
3✔
1286
                            b[j] = fl.calving_bucket_m3
3✔
1287
                        except AttributeError:
×
UNCOV
1288
                            pass
×
1289

1290
                # Flowline diagnostics
1291
                if do_fl_diag:
4✔
1292
                    for fl_id, (ds, fl) in enumerate(zip(fl_diag_dss, self.fls)):
1✔
1293
                        # area and volume are already being taken care of above
1294
                        if 'thickness' in ovars_fl:
1✔
1295
                            ds['thickness_m'].data[j, :] = fl.thick
1✔
1296
                        if 'volume_bsl' in ovars_fl:
1✔
1297
                            ds['volume_bsl_m3'].data[j, :] = fl.volume_bsl_m3
1✔
1298
                        if 'volume_bwl' in ovars_fl:
1✔
1299
                            ds['volume_bwl_m3'].data[j, :] = fl.volume_bwl_m3
1✔
1300
                        if 'ice_velocity' in ovars_fl and (yr > self.y0):
1✔
1301
                            # Velocity can only be computed with dynamics
1302
                            var = self.u_stag[fl_id]
1✔
1303
                            val = (var[1:fl.nx + 1] + var[:fl.nx]) / 2 * self._surf_vel_fac
1✔
1304
                            ds['ice_velocity_myr'].data[j, :] = val * cfg.SEC_IN_YEAR
1✔
1305
                # j is the yearly index in case we have monthly output
1306
                # we have to count it ourselves
1307
                j += 1
4✔
1308

1309
            # Diagnostics
1310
            if 'volume' in ovars:
7✔
1311
                diag_ds['volume_m3'].data[i] = self.volume_m3
7✔
1312
                if dynamic_spinup_min_ice_thick is not None:
7✔
1313
                    diag_ds['volume_m3_min_h'].data[i] = np.sum([np.sum(
7✔
1314
                        (fl.section * fl.dx_meter)[fl.thick > dynamic_spinup_min_ice_thick])
1315
                        for fl in self.fls])
1316
            if 'area' in ovars:
7✔
1317
                diag_ds['area_m2'].data[i] = self.area_m2
7✔
1318
                if dynamic_spinup_min_ice_thick is not None:
7✔
1319
                    diag_ds['area_m2_min_h'].data[i] = np.sum([np.sum(
7✔
1320
                        fl.bin_area_m2[fl.thick > dynamic_spinup_min_ice_thick])
1321
                        for fl in self.fls])
1322
            if 'length' in ovars:
7✔
1323
                diag_ds['length_m'].data[i] = self.length_m
7✔
1324
            if 'calving' in ovars:
7✔
1325
                diag_ds['calving_m3'].data[i] = self.calving_m3_since_y0
7✔
1326
            if 'calving_rate' in ovars:
7✔
1327
                diag_ds['calving_rate_myr'].data[i] = self.calving_rate_myr
7✔
1328
            if 'volume_bsl' in ovars:
7✔
1329
                diag_ds['volume_bsl_m3'].data[i] = self.volume_bsl_m3
7✔
1330
            if 'volume_bwl' in ovars:
7✔
1331
                diag_ds['volume_bwl_m3'].data[i] = self.volume_bwl_m3
7✔
1332
            # Terminus thick is a bit more logic
1333
            ti = None
7✔
1334
            for gi in range(10):
7✔
1335
                vn = f'terminus_thick_{gi}'
7✔
1336
                if vn in ovars:
7✔
1337
                    if ti is None:
1✔
1338
                        ti = self.fls[-1].terminus_index
1✔
1339
                    diag_ds[vn].data[i] = self.fls[-1].thick[ti - gi]
1✔
1340

1341
            # Decide if we continue
1342
            if stop_criterion is not None:
7✔
1343
                stop, prev_state = stop_criterion(self, prev_state)
2✔
1344
                if stop:
2✔
1345
                    break
2✔
1346

1347
        # to datasets
1348
        geom_ds = None
7✔
1349
        if do_geom:
7✔
1350
            geom_ds = []
4✔
1351
            for (s, w, b) in zip(sects, widths, buckets):
4✔
1352
                ds = xr.Dataset()
4✔
1353
                ds.attrs['description'] = 'OGGM model output'
4✔
1354
                ds.attrs['oggm_version'] = __version__
4✔
1355
                ds.attrs['calendar'] = '365-day no leap'
4✔
1356
                ds.attrs['creation_date'] = strftime("%Y-%m-%d %H:%M:%S",
4✔
1357
                                                     gmtime())
1358
                ds.attrs['water_level'] = self.water_level
4✔
1359
                ds.attrs['glen_a'] = self.glen_a
4✔
1360
                ds.attrs['fs'] = self.fs
4✔
1361
                # Add MB model attributes
1362
                ds.attrs['mb_model_class'] = self.mb_model.__class__.__name__
4✔
1363
                for k, v in self.mb_model.__dict__.items():
4✔
1364
                    if np.isscalar(v) and not k.startswith('_'):
4✔
1365
                        ds.attrs['mb_model_{}'.format(k)] = v
4✔
1366

1367
                ds.coords['time'] = yearly_time
4✔
1368
                ds['time'].attrs['description'] = 'Floating hydrological year'
4✔
1369
                varcoords = OrderedDict(time=('time', yearly_time),
4✔
1370
                                        year=('time', yearly_time))
1371
                ds['ts_section'] = xr.DataArray(s, dims=('time', 'x'),
4✔
1372
                                                coords=varcoords)
1373
                ds['ts_width_m'] = xr.DataArray(w, dims=('time', 'x'),
4✔
1374
                                                coords=varcoords)
1375
                ds['ts_calving_bucket_m3'] = xr.DataArray(b, dims=('time', ),
4✔
1376
                                                          coords=varcoords)
1377

1378
                if stop_criterion is not None:
4✔
1379
                    # Remove probable NaNs
1380
                    ds = ds.dropna('time', subset=['ts_section'])
1✔
1381

1382
                geom_ds.append(ds)
4✔
1383

1384
        # Add the spinup volume to the diag
1385
        if do_fixed_spinup:
7✔
1386
            # If there is calving we need to trick as well
1387
            if 'calving_m3' in diag_ds and np.any(diag_ds['calving_m3'] > 0):
2✔
1388
                raise NotImplementedError('Calving and fixed_geometry_spinup_yr '
1389
                                          'not implemented yet.')
1390
            diag_ds['volume_m3'].data[:] += spinup_vol
2✔
1391

1392
        if stop_criterion is not None:
7✔
1393
            # Remove probable NaNs
1394
            diag_ds = diag_ds.dropna('time', subset=['volume_m3'])
2✔
1395

1396
        # write output?
1397
        if do_fl_diag:
7✔
1398
            # Unit conversions for these
1399
            for i, ds in enumerate(fl_diag_dss):
1✔
1400
                dx = ds.attrs['map_dx'] * ds.attrs['dx']
1✔
1401
                # No inplace because the other dataset uses them
1402
                # These variables are always there (see above)
1403
                ds['volume_m3'] = ds['volume_m3'] * dx
1✔
1404
                ds['area_m2'] = ds['area_m2'].where(ds['volume_m3'] > 0, 0) * dx
1✔
1405
                if stop_criterion is not None:
1✔
1406
                    # Remove probable NaNs
1407
                    fl_diag_dss[i] = ds.dropna('time', subset=['volume_m3'])
1✔
1408

1409
            # Write out?
1410
            if fl_diag_path not in [True, None]:
1✔
1411
                encode = {}
1✔
1412
                for v in fl_diag_dss[0]:
1✔
1413
                    encode[v] = {'zlib': True, 'complevel': 5}
1✔
1414

1415
                # Welcome ds
1416
                ds = xr.Dataset()
1✔
1417
                ds.attrs['description'] = ('OGGM model output on flowlines. '
1✔
1418
                                           'Check groups for data.')
1419
                ds.attrs['oggm_version'] = __version__
1✔
1420
                # This is useful to interpret the dataset afterwards
1421
                flows_to_id = []
1✔
1422
                for trib in self._tributary_indices:
1✔
1423
                    flows_to_id.append(trib[0] if trib[0] is not None else -1)
1✔
1424
                ds['flowlines'] = ('flowlines', np.arange(len(flows_to_id)))
1✔
1425
                ds['flows_to_id'] = ('flowlines', flows_to_id)
1✔
1426
                ds.to_netcdf(fl_diag_path, 'w')
1✔
1427
                for i, ds in enumerate(fl_diag_dss):
1✔
1428
                    ds.to_netcdf(fl_diag_path, 'a', group='fl_{}'.format(i),
1✔
1429
                                 encoding=encode)
1430

1431
        if do_geom and geom_path not in [True, None]:
7✔
1432
            encode = {'ts_section': {'zlib': True, 'complevel': 5},
4✔
1433
                      'ts_width_m': {'zlib': True, 'complevel': 5},
1434
                      }
1435
            for i, ds in enumerate(geom_ds):
4✔
1436
                ds.to_netcdf(geom_path, 'a', group='fl_{}'.format(i),
4✔
1437
                             encoding=encode)
1438

1439
            # Add calving to geom file because the FileModel can't infer it
1440
            if 'calving_m3' in diag_ds:
4✔
1441
                diag_ds[['calving_m3']].to_netcdf(geom_path, 'a')
4✔
1442

1443
        if diag_path not in [True, None]:
7✔
1444
            diag_ds.to_netcdf(diag_path)
5✔
1445

1446
        # Decide on what to give back
1447
        out = [diag_ds]
7✔
1448
        if fl_diag_dss is not None:
7✔
1449
            out.append(fl_diag_dss)
1✔
1450
        if geom_ds is not None:
7✔
1451
            out.append(geom_ds)
4✔
1452
        if len(out) == 1:
7✔
1453
            out = out[0]
6✔
1454
        else:
1455
            out = tuple(out)
4✔
1456
        return out
7✔
1457

1458
    def run_until_equilibrium(self, rate=0.001, ystep=5, max_ite=200):
9✔
1459
        """ Runs the model until an equilibrium state is reached.
1460

1461
        Be careful: This only works for CONSTANT (not time-dependent)
1462
        mass balance models.
1463
        Otherwise the returned state will not be in equilibrium! Don't try to
1464
        calculate an equilibrium state with a RandomMassBalance model!
1465
        """
1466

1467
        ite = 0
3✔
1468
        was_close_zero = 0
3✔
1469
        t_rate = 1
3✔
1470
        while (t_rate > rate) and (ite <= max_ite) and (was_close_zero < 5):
3✔
1471
            ite += 1
3✔
1472
            v_bef = self.volume_m3
3✔
1473
            self.run_until(self.yr + ystep)
3✔
1474
            v_af = self.volume_m3
3✔
1475
            if np.isclose(v_bef, 0., atol=1):
3✔
1476
                t_rate = 1
3✔
1477
                was_close_zero += 1
3✔
1478
            else:
1479
                t_rate = np.abs(v_af - v_bef) / v_bef
3✔
1480
        if ite > max_ite:
3✔
UNCOV
1481
            raise RuntimeError('Did not find equilibrium.')
×
1482

1483

1484
def flux_gate_with_build_up(year, flux_value=None, flux_gate_yr=None):
9✔
1485
    """Default scalar flux gate with build up period"""
1486
    fac = 1 - (flux_gate_yr - year) / flux_gate_yr
2✔
1487
    return flux_value * utils.clip_scalar(fac, 0, 1)
2✔
1488

1489

1490
class FluxBasedModel(FlowlineModel):
9✔
1491
    """The flowline model used by OGGM in production.
1492

1493
    It solves for the SIA along the flowline(s) using a staggered grid. It
1494
    computes the *ice flux* between grid points and transports the mass
1495
    accordingly (also between flowlines).
1496

1497
    This model is numerically less stable than fancier schemes, but it
1498
    is fast and works with multiple flowlines of any bed shape (rectangular,
1499
    parabolic, trapeze, and any combination of them).
1500

1501
    We test that it conserves mass in most cases, but not on very stiff cliffs.
1502
    """
1503

1504
    def __init__(self, flowlines, mb_model=None, y0=0., glen_a=None,
9✔
1505
                 fs=0., inplace=False, fixed_dt=None, cfl_number=None,
1506
                 min_dt=None, flux_gate_thickness=None,
1507
                 flux_gate=None, flux_gate_build_up=100,
1508
                 do_kcalving=None, calving_k=None, calving_use_limiter=None,
1509
                 calving_limiter_frac=None, water_level=None,
1510
                 **kwargs):
1511
        """Instantiate the model.
1512

1513
        Parameters
1514
        ----------
1515
        flowlines : list
1516
            the glacier flowlines
1517
        mb_model : MassBalanceModel
1518
            the mass balance model
1519
        y0 : int
1520
            initial year of the simulation
1521
        glen_a : float
1522
            Glen's creep parameter
1523
        fs : float
1524
            Oerlemans sliding parameter
1525
        inplace : bool
1526
            whether or not to make a copy of the flowline objects for the run
1527
            setting to True implies that your objects will be modified at run
1528
            time by the model (can help to spare memory)
1529
        fixed_dt : float
1530
            set to a value (in seconds) to prevent adaptive time-stepping.
1531
        cfl_number : float
1532
            Defaults to cfg.PARAMS['cfl_number'].
1533
            For adaptive time stepping (the default), dt is chosen from the
1534
            CFL criterion (dt = cfl_number * dx / max_u).
1535
            To choose the "best" CFL number we would need a stability
1536
            analysis - we used an empirical analysis (see blog post) and
1537
            settled on 0.02 for the default cfg.PARAMS['cfl_number'].
1538
        min_dt : float
1539
            Defaults to cfg.PARAMS['cfl_min_dt'].
1540
            At high velocities, time steps can become very small and your
1541
            model might run very slowly. In production, it might be useful to
1542
            set a limit below which the model will just error.
1543
        is_tidewater: bool, default: False
1544
            is this a tidewater glacier?
1545
        is_lake_terminating: bool, default: False
1546
            is this a lake terminating glacier?
1547
        mb_elev_feedback : str, default: 'annual'
1548
            'never', 'always', 'annual', or 'monthly': how often the
1549
            mass balance should be recomputed from the mass balance model.
1550
            'Never' is equivalent to 'annual' but without elevation feedback
1551
            at all (the heights are taken from the first call).
1552
        check_for_boundaries: bool, default: True
1553
            raise an error when the glacier grows bigger than the domain
1554
            boundaries
1555
        flux_gate_thickness : float or array
1556
            flux of ice from the left domain boundary (and tributaries).
1557
            Units of m of ice thickness. Note that unrealistic values won't be
1558
            met by the model, so this is really just a rough guidance.
1559
            It's better to use `flux_gate` instead.
1560
        flux_gate : float or function or array of floats or array of functions
1561
            flux of ice from the left domain boundary (and tributaries)
1562
            (unit: m3 of ice per second). If set to a high value, consider
1563
            changing the flux_gate_buildup time. You can also provide
1564
            a function (or an array of functions) returning the flux
1565
            (unit: m3 of ice per second) as a function of time.
1566
            This is overridden by `flux_gate_thickness` if provided.
1567
        flux_gate_buildup : int
1568
            number of years used to build up the flux gate to full value
1569
        do_kcalving : bool
1570
            switch on the k-calving parameterisation. Ignored if not a
1571
            tidewater glacier. Use the option from PARAMS per default
1572
        calving_k : float
1573
            the calving proportionality constant (units: yr-1). Use the
1574
            one from PARAMS per default
1575
        calving_use_limiter : bool
1576
            whether to switch on the calving limiter on the parameterisation
1577
            makes the calving fronts thicker but the model is more stable
1578
        calving_limiter_frac : float
1579
            limit the front slope to a fraction of the calving front.
1580
            "3" means 1/3. Setting it to 0 limits the slope to sea-level.
1581
        water_level : float
1582
            the water level. It should be zero m a.s.l, but:
1583
            - sometimes the frontal elevation is unrealistically high (or low).
1584
            - lake terminating glaciers
1585
            - other uncertainties
1586
            The default is 0. For lake terminating glaciers,
1587
            it is inferred from PARAMS['free_board_lake_terminating'].
1588
            The best way to set the water level for real glaciers is to use
1589
            the same as used for the inversion (this is what
1590
            `flowline_model_run` does for you)
1591
        """
1592
        super(FluxBasedModel, self).__init__(flowlines, mb_model=mb_model,
9✔
1593
                                             y0=y0, glen_a=glen_a, fs=fs,
1594
                                             inplace=inplace,
1595
                                             water_level=water_level,
1596
                                             **kwargs)
1597

1598
        self.fixed_dt = fixed_dt
9✔
1599
        if min_dt is None:
9✔
1600
            min_dt = cfg.PARAMS['cfl_min_dt']
9✔
1601
        if cfl_number is None:
9✔
1602
            cfl_number = cfg.PARAMS['cfl_number']
9✔
1603
        self.min_dt = min_dt
9✔
1604
        self.cfl_number = cfl_number
9✔
1605

1606
        # Do we want to use shape factors?
1607
        self.sf_func = None
9✔
1608
        use_sf = cfg.PARAMS.get('use_shape_factor_for_fluxbasedmodel')
9✔
1609
        if use_sf == 'Adhikari' or use_sf == 'Nye':
9✔
1610
            self.sf_func = utils.shape_factor_adhikari
1✔
1611
        elif use_sf == 'Huss':
9✔
1612
            self.sf_func = utils.shape_factor_huss
1✔
1613

1614
        # Calving params
1615
        if do_kcalving is None:
9✔
1616
            do_kcalving = cfg.PARAMS['use_kcalving_for_run']
9✔
1617
        self.do_calving = do_kcalving and self.is_tidewater
9✔
1618
        if calving_k is None:
9✔
1619
            calving_k = cfg.PARAMS['calving_k']
9✔
1620
        self.calving_k = calving_k / cfg.SEC_IN_YEAR
9✔
1621
        if calving_use_limiter is None:
9✔
1622
            calving_use_limiter = cfg.PARAMS['calving_use_limiter']
9✔
1623
        self.calving_use_limiter = calving_use_limiter
9✔
1624
        if calving_limiter_frac is None:
9✔
1625
            calving_limiter_frac = cfg.PARAMS['calving_limiter_frac']
9✔
1626
        if calving_limiter_frac > 0:
9✔
1627
            raise NotImplementedError('calving limiter other than 0 not '
1628
                                      'implemented yet')
1629
        self.calving_limiter_frac = calving_limiter_frac
9✔
1630

1631
        # Flux gate
1632
        self.flux_gate = utils.tolist(flux_gate, length=len(self.fls))
9✔
1633
        self.flux_gate_m3_since_y0 = 0.
9✔
1634
        if flux_gate_thickness is not None:
9✔
1635
            # Compute the theoretical ice flux from the slope at the top
1636
            flux_gate_thickness = utils.tolist(flux_gate_thickness,
1✔
1637
                                               length=len(self.fls))
1638
            self.flux_gate = []
1✔
1639
            for fl, fgt in zip(self.fls, flux_gate_thickness):
1✔
1640
                # We set the thickness to the desired value so that
1641
                # the widths work ok
1642
                fl = copy.deepcopy(fl)
1✔
1643
                fl.thick = fl.thick * 0 + fgt
1✔
1644
                slope = (fl.surface_h[0] - fl.surface_h[1]) / fl.dx_meter
1✔
1645
                if slope == 0:
1✔
UNCOV
1646
                    raise ValueError('I need a slope to compute the flux')
×
1647
                flux = find_sia_flux_from_thickness(slope,
1✔
1648
                                                    fl.widths_m[0],
1649
                                                    fgt,
1650
                                                    shape=fl.shape_str[0],
1651
                                                    glen_a=self.glen_a,
1652
                                                    fs=self.fs)
1653
                self.flux_gate.append(flux)
1✔
1654

1655
        # convert the floats to function calls
1656
        for i, fg in enumerate(self.flux_gate):
9✔
1657
            if fg is None:
9✔
1658
                continue
9✔
1659
            try:
2✔
1660
                # Do we have a function? If yes all good
1661
                fg(self.yr)
2✔
1662
            except TypeError:
2✔
1663
                # If not, make one
1664
                self.flux_gate[i] = partial(flux_gate_with_build_up,
2✔
1665
                                            flux_value=fg,
1666
                                            flux_gate_yr=(flux_gate_build_up +
1667
                                                          self.y0))
1668
        # Special output
1669
        self._surf_vel_fac = (self.glen_n + 2) / (self.glen_n + 1)
9✔
1670

1671
        # Optim
1672
        self.slope_stag = []
9✔
1673
        self.thick_stag = []
9✔
1674
        self.section_stag = []
9✔
1675
        self.u_stag = []
9✔
1676
        self.shapefac_stag = []
9✔
1677
        self.flux_stag = []
9✔
1678
        self.trib_flux = []
9✔
1679
        for fl, trib in zip(self.fls, self._tributary_indices):
9✔
1680
            nx = fl.nx
9✔
1681
            # This is not staggered
1682
            self.trib_flux.append(np.zeros(nx))
9✔
1683
            # We add an additional fake grid point at the end of tributaries
1684
            if trib[0] is not None:
9✔
1685
                nx = fl.nx + 1
7✔
1686
            # +1 is for the staggered grid
1687
            self.slope_stag.append(np.zeros(nx+1))
9✔
1688
            self.thick_stag.append(np.zeros(nx+1))
9✔
1689
            self.section_stag.append(np.zeros(nx+1))
9✔
1690
            self.u_stag.append(np.zeros(nx+1))
9✔
1691
            self.shapefac_stag.append(np.ones(nx+1))  # beware the ones!
9✔
1692
            self.flux_stag.append(np.zeros(nx+1))
9✔
1693

1694
    def step(self, dt):
9✔
1695
        """Advance one step."""
1696

1697
        # Just a check to avoid useless computations
1698
        if dt <= 0:
8✔
1699
            raise InvalidParamsError('dt needs to be strictly positive')
1✔
1700

1701
        # Simple container
1702
        mbs = []
8✔
1703

1704
        # Loop over tributaries to determine the flux rate
1705
        for fl_id, fl in enumerate(self.fls):
8✔
1706

1707
            # This is possibly less efficient than zip() but much clearer
1708
            trib = self._tributary_indices[fl_id]
8✔
1709
            slope_stag = self.slope_stag[fl_id]
8✔
1710
            thick_stag = self.thick_stag[fl_id]
8✔
1711
            section_stag = self.section_stag[fl_id]
8✔
1712
            sf_stag = self.shapefac_stag[fl_id]
8✔
1713
            flux_stag = self.flux_stag[fl_id]
8✔
1714
            trib_flux = self.trib_flux[fl_id]
8✔
1715
            u_stag = self.u_stag[fl_id]
8✔
1716
            flux_gate = self.flux_gate[fl_id]
8✔
1717

1718
            # Flowline state
1719
            surface_h = fl.surface_h
8✔
1720
            thick = fl.thick
8✔
1721
            section = fl.section
8✔
1722
            dx = fl.dx_meter
8✔
1723

1724
            # If it is a tributary, we use the branch it flows into to compute
1725
            # the slope of the last grid point
1726
            is_trib = trib[0] is not None
8✔
1727
            if is_trib:
8✔
1728
                fl_to = self.fls[trib[0]]
6✔
1729
                ide = fl.flows_to_indice
6✔
1730
                surface_h = np.append(surface_h, fl_to.surface_h[ide])
6✔
1731
                thick = np.append(thick, thick[-1])
6✔
1732
                section = np.append(section, section[-1])
6✔
1733
            elif self.do_calving and self.calving_use_limiter:
8✔
1734
                # We lower the max possible ice deformation
1735
                # by clipping the surface slope here. It is completely
1736
                # arbitrary but reduces ice deformation at the calving front.
1737
                # I think that in essence, it is also partly
1738
                # a "calving process", because this ice deformation must
1739
                # be less at the calving front. The result is that calving
1740
                # front "free boards" are quite high.
1741
                # Note that 0 is arbitrary, it could be any value below SL
1742
                surface_h = utils.clip_min(surface_h, self.water_level)
6✔
1743

1744
            # Staggered gradient
1745
            slope_stag[0] = 0
8✔
1746
            slope_stag[1:-1] = (surface_h[0:-1] - surface_h[1:]) / dx
8✔
1747
            slope_stag[-1] = slope_stag[-2]
8✔
1748

1749
            # Staggered thick
1750
            thick_stag[1:-1] = (thick[0:-1] + thick[1:]) / 2.
8✔
1751
            thick_stag[[0, -1]] = thick[[0, -1]]
8✔
1752

1753
            if self.sf_func is not None:
8✔
1754
                # TODO: maybe compute new shape factors only every year?
1755
                sf = self.sf_func(fl.widths_m, fl.thick, fl.is_rectangular)
1✔
1756
                if is_trib:
1✔
1757
                    # for inflowing tributary, the sf makes no sense
1758
                    sf = np.append(sf, 1.)
1✔
1759
                sf_stag[1:-1] = (sf[0:-1] + sf[1:]) / 2.
1✔
1760
                sf_stag[[0, -1]] = sf[[0, -1]]
1✔
1761

1762
            # Staggered velocity (Deformation + Sliding)
1763
            # _fd = 2/(N+2) * self.glen_a
1764
            N = self.glen_n
8✔
1765
            rhogh = (self.rho*G*slope_stag)**N
8✔
1766
            u_stag[:] = (thick_stag**(N+1)) * self._fd * rhogh * sf_stag**N + \
8✔
1767
                        (thick_stag**(N-1)) * self.fs * rhogh
1768

1769
            # Staggered section
1770
            section_stag[1:-1] = (section[0:-1] + section[1:]) / 2.
8✔
1771
            section_stag[[0, -1]] = section[[0, -1]]
8✔
1772

1773
            # Staggered flux rate
1774
            flux_stag[:] = u_stag * section_stag
8✔
1775

1776
            # Add boundary condition
1777
            if flux_gate is not None:
8✔
1778
                flux_stag[0] = flux_gate(self.yr)
2✔
1779

1780
            # CFL condition
1781
            if not self.fixed_dt:
8✔
1782
                maxu = np.max(np.abs(u_stag))
8✔
1783
                if maxu > cfg.FLOAT_EPS:
8✔
1784
                    cfl_dt = self.cfl_number * dx / maxu
8✔
1785
                else:
1786
                    cfl_dt = dt
4✔
1787

1788
                # Update dt only if necessary
1789
                if cfl_dt < dt:
8✔
1790
                    dt = cfl_dt
8✔
1791
                    if cfl_dt < self.min_dt:
8✔
1792
                        raise RuntimeError(
2✔
1793
                            'CFL error: required time step smaller '
1794
                            'than the minimum allowed: '
1795
                            '{:.1f}s vs {:.1f}s. Happening at '
1796
                            'simulation year {:.1f}, fl_id {}, '
1797
                            'bin_id {} and max_u {:.3f} m yr-1.'
1798
                            ''.format(cfl_dt, self.min_dt, self.yr, fl_id,
1799
                                      np.argmax(np.abs(u_stag)),
1800
                                      maxu * cfg.SEC_IN_YEAR))
1801

1802
            # Since we are in this loop, reset the tributary flux
1803
            trib_flux[:] = 0
8✔
1804

1805
            # We compute MB in this loop, before mass-redistribution occurs,
1806
            # so that MB models which rely on glacier geometry to decide things
1807
            # (like PyGEM) can do wo with a clean glacier state
1808
            mbs.append(self.get_mb(fl.surface_h, self.yr,
8✔
1809
                                   fl_id=fl_id, fls=self.fls))
1810

1811
        # Time step
1812
        if self.fixed_dt:
8✔
1813
            # change only if step dt is larger than the chosen dt
1814
            if self.fixed_dt < dt:
2✔
1815
                dt = self.fixed_dt
2✔
1816

1817
        # A second loop for the mass exchange
1818
        for fl_id, fl in enumerate(self.fls):
8✔
1819

1820
            flx_stag = self.flux_stag[fl_id]
8✔
1821
            trib_flux = self.trib_flux[fl_id]
8✔
1822
            tr = self._tributary_indices[fl_id]
8✔
1823

1824
            dx = fl.dx_meter
8✔
1825

1826
            is_trib = tr[0] is not None
8✔
1827

1828
            # For these we had an additional grid point
1829
            if is_trib:
8✔
1830
                flx_stag = flx_stag[:-1]
6✔
1831

1832
            # Mass balance
1833
            widths = fl.widths_m
8✔
1834
            mb = mbs[fl_id]
8✔
1835
            # Allow parabolic beds to grow
1836
            mb = dt * mb * np.where((mb > 0.) & (widths == 0), 10., widths)
8✔
1837

1838
            # Update section with ice flow and mass balance
1839
            new_section = (fl.section + (flx_stag[0:-1] - flx_stag[1:])*dt/dx +
8✔
1840
                           trib_flux*dt/dx + mb)
1841

1842
            # Keep positive values only and store
1843
            fl.section = utils.clip_min(new_section, 0)
8✔
1844

1845
            # If we use a flux-gate, store the total volume that came in
1846
            self.flux_gate_m3_since_y0 += flx_stag[0] * dt
8✔
1847

1848
            # Add the last flux to the tributary
1849
            # this works because the lines are sorted in order
1850
            if is_trib:
8✔
1851
                # tr tuple: line_index, start, stop, gaussian_kernel
1852
                self.trib_flux[tr[0]][tr[1]:tr[2]] += \
6✔
1853
                    utils.clip_min(flx_stag[-1], 0) * tr[3]
1854

1855
            # --- The rest is for calving only ---
1856
            self.calving_rate_myr = 0.
8✔
1857

1858
            # If tributary, do calving only if we are not transferring mass
1859
            if is_trib and flx_stag[-1] > 0:
8✔
1860
                continue
6✔
1861

1862
            # No need to do calving in these cases either
1863
            if not self.do_calving or not fl.has_ice():
8✔
1864
                continue
6✔
1865

1866
            # We do calving only if the last glacier bed pixel is below water
1867
            # (this is to avoid calving elsewhere than at the front)
1868
            if fl.bed_h[fl.thick > 0][-1] > self.water_level:
6✔
1869
                continue
5✔
1870

1871
            # We do calving only if there is some ice above wl
1872
            last_above_wl = np.nonzero((fl.surface_h > self.water_level) &
6✔
1873
                                       (fl.thick > 0))[0][-1]
1874
            if fl.bed_h[last_above_wl] > self.water_level:
6✔
1875
                continue
3✔
1876

1877
            # OK, we're really calving
1878
            section = fl.section
6✔
1879

1880
            # Calving law
1881
            h = fl.thick[last_above_wl]
6✔
1882
            d = h - (fl.surface_h[last_above_wl] - self.water_level)
6✔
1883
            k = self.calving_k
6✔
1884
            q_calving = k * d * h * fl.widths_m[last_above_wl]
6✔
1885
            # Add to the bucket and the diagnostics
1886
            fl.calving_bucket_m3 += q_calving * dt
6✔
1887
            self.calving_m3_since_y0 += q_calving * dt
6✔
1888
            self.calving_rate_myr = (q_calving / section[last_above_wl] *
6✔
1889
                                     cfg.SEC_IN_YEAR)
1890

1891
            # See if we have ice below sea-water to clean out first
1892
            below_sl = (fl.surface_h < self.water_level) & (fl.thick > 0)
6✔
1893
            to_remove = np.sum(section[below_sl]) * fl.dx_meter
6✔
1894
            if 0 < to_remove < fl.calving_bucket_m3:
6✔
1895
                # This is easy, we remove everything
1896
                section[below_sl] = 0
6✔
1897
                fl.calving_bucket_m3 -= to_remove
6✔
1898
            elif to_remove > 0:
6✔
1899
                # We can only remove part of if
1900
                section[below_sl] = 0
6✔
1901
                section[last_above_wl+1] = ((to_remove - fl.calving_bucket_m3)
6✔
1902
                                            / fl.dx_meter)
1903
                fl.calving_bucket_m3 = 0
6✔
1904

1905
            # The rest of the bucket might calve an entire grid point (or more?)
1906
            vol_last = section[last_above_wl] * fl.dx_meter
6✔
1907
            while fl.calving_bucket_m3 > vol_last:
6✔
1908
                fl.calving_bucket_m3 -= vol_last
5✔
1909
                section[last_above_wl] = 0
5✔
1910

1911
                # OK check if we need to continue (unlikely)
1912
                last_above_wl -= 1
5✔
1913
                vol_last = section[last_above_wl] * fl.dx_meter
5✔
1914

1915
            # We update the glacier with our changes
1916
            fl.section = section
6✔
1917

1918
        # Next step
1919
        self.t += dt
8✔
1920
        return dt
8✔
1921

1922
    def get_diagnostics(self, fl_id=-1):
9✔
1923
        """Obtain model diagnostics in a pandas DataFrame.
1924

1925
        Velocities in OGGM's FluxBasedModel are sometimes subject to
1926
        numerical instabilities. To deal with the issue, you can either
1927
        set a smaller ``PARAMS['cfl_number']`` (e.g. 0.01) or smooth the
1928
        output a bit, e.g. with ``df.rolling(5, center=True, min_periods=1).mean()``
1929

1930
        Parameters
1931
        ----------
1932
        fl_id : int
1933
            the index of the flowline of interest, from 0 to n_flowline-1.
1934
            Default is to take the last (main) one
1935

1936
        Returns
1937
        -------
1938
        a pandas DataFrame, which index is distance along flowline (m). Units:
1939
            - surface_h, bed_h, ice_tick, section_width: m
1940
            - section_area: m2
1941
            - slope: -
1942
            - ice_flux, tributary_flux: m3 of *ice* per second
1943
            - ice_velocity: m per second (depth-section integrated)
1944
            - surface_ice_velocity: m per second (corrected for surface - simplified)
1945
        """
1946
        import pandas as pd
2✔
1947

1948
        fl = self.fls[fl_id]
2✔
1949
        nx = fl.nx
2✔
1950

1951
        df = pd.DataFrame(index=fl.dx_meter * np.arange(nx))
2✔
1952
        df.index.name = 'distance_along_flowline'
2✔
1953
        df['surface_h'] = fl.surface_h
2✔
1954
        df['bed_h'] = fl.bed_h
2✔
1955
        df['ice_thick'] = fl.thick
2✔
1956
        df['section_width'] = fl.widths_m
2✔
1957
        df['section_area'] = fl.section
2✔
1958

1959
        # Staggered
1960
        var = self.slope_stag[fl_id]
2✔
1961
        df['slope'] = (var[1:nx+1] + var[:nx])/2
2✔
1962
        var = self.flux_stag[fl_id]
2✔
1963
        df['ice_flux'] = (var[1:nx+1] + var[:nx])/2
2✔
1964
        var = self.u_stag[fl_id]
2✔
1965
        df['ice_velocity'] = (var[1:nx+1] + var[:nx])/2
2✔
1966
        df['surface_ice_velocity'] = df['ice_velocity'] * self._surf_vel_fac
2✔
1967
        var = self.shapefac_stag[fl_id]
2✔
1968
        df['shape_fac'] = (var[1:nx+1] + var[:nx])/2
2✔
1969

1970
        # Not Staggered
1971
        df['tributary_flux'] = self.trib_flux[fl_id]
2✔
1972

1973
        return df
2✔
1974

1975

1976
class MassConservationChecker(FluxBasedModel):
9✔
1977
    """This checks if the FluxBasedModel is conserving mass."""
1978

1979
    def __init__(self, flowlines, **kwargs):
9✔
1980
        """ Instantiate.
1981

1982
        Parameters
1983
        ----------
1984

1985
        """
1986
        super(MassConservationChecker, self).__init__(flowlines, **kwargs)
1✔
1987
        self.total_mass = 0.
1✔
1988

1989
    def step(self, dt):
9✔
1990

1991
        mbs = []
1✔
1992
        sections = []
1✔
1993
        for fl in self.fls:
1✔
1994
            # Mass balance
1995
            widths = fl.widths_m
1✔
1996
            mb = self.get_mb(fl.surface_h, self.yr, fl_id=id(fl))
1✔
1997
            mbs.append(mb * widths)
1✔
1998
            sections.append(np.copy(fl.section))
1✔
1999
            dx = fl.dx_meter
1✔
2000

2001
        dt = super(MassConservationChecker, self).step(dt)
1✔
2002

2003
        for mb, sec in zip(mbs, sections):
1✔
2004
            mb = dt * mb
1✔
2005
            # there can't be more negative mb than there is section
2006
            # this isn't an exact solution unfortunately
2007
            # TODO: exact solution for mass conservation
2008
            mb = utils.clip_min(mb, -sec)
1✔
2009
            self.total_mass += np.sum(mb * dx)
1✔
2010

2011

2012
class KarthausModel(FlowlineModel):
9✔
2013
    """The actual model"""
2014

2015
    def __init__(self, flowlines, mb_model=None, y0=0., glen_a=None, fs=0.,
9✔
2016
                 fixed_dt=None, min_dt=SEC_IN_DAY, max_dt=31*SEC_IN_DAY,
2017
                 inplace=False, **kwargs):
2018
        """ Instantiate.
2019

2020
        Parameters
2021
        ----------
2022
        """
2023

2024
        if len(flowlines) > 1:
1✔
UNCOV
2025
            raise ValueError('Karthaus model does not work with tributaries.')
×
2026

2027
        super(KarthausModel, self).__init__(flowlines, mb_model=mb_model,
1✔
2028
                                            y0=y0, glen_a=glen_a, fs=fs,
2029
                                            inplace=inplace, **kwargs)
2030
        self.dt_warning = False,
1✔
2031
        if fixed_dt is not None:
1✔
2032
            min_dt = fixed_dt
1✔
2033
            max_dt = fixed_dt
1✔
2034
        self.min_dt = min_dt
1✔
2035
        self.max_dt = max_dt
1✔
2036

2037
    def step(self, dt):
9✔
2038
        """Advance one step."""
2039

2040
        # Just a check to avoid useless computations
2041
        if dt <= 0:
1✔
2042
            raise InvalidParamsError('dt needs to be strictly positive')
1✔
2043

2044
        # This is to guarantee a precise arrival on a specific date if asked
2045
        min_dt = dt if dt < self.min_dt else self.min_dt
1✔
2046
        dt = utils.clip_scalar(dt, min_dt, self.max_dt)
1✔
2047

2048
        fl = self.fls[0]
1✔
2049
        dx = fl.dx_meter
1✔
2050
        width = fl.widths_m
1✔
2051
        thick = fl.thick
1✔
2052

2053
        MassBalance = self.get_mb(fl.surface_h, self.yr, fl_id=id(fl))
1✔
2054

2055
        SurfaceHeight = fl.surface_h
1✔
2056

2057
        # Surface gradient
2058
        SurfaceGradient = np.zeros(fl.nx)
1✔
2059
        SurfaceGradient[1:fl.nx-1] = (SurfaceHeight[2:] -
1✔
2060
                                      SurfaceHeight[:fl.nx-2])/(2*dx)
2061
        SurfaceGradient[-1] = 0
1✔
2062
        SurfaceGradient[0] = 0
1✔
2063

2064
        # Diffusivity
2065
        N = self.glen_n
1✔
2066
        Diffusivity = width * (self.rho*G)**3 * thick**3 * SurfaceGradient**2
1✔
2067
        Diffusivity *= 2/(N+2) * self.glen_a * thick**2 + self.fs
1✔
2068

2069
        # on stagger
2070
        DiffusivityStaggered = np.zeros(fl.nx)
1✔
2071
        SurfaceGradientStaggered = np.zeros(fl.nx)
1✔
2072

2073
        DiffusivityStaggered[1:] = (Diffusivity[:fl.nx-1] + Diffusivity[1:])/2.
1✔
2074
        DiffusivityStaggered[0] = Diffusivity[0]
1✔
2075

2076
        SurfaceGradientStaggered[1:] = (SurfaceHeight[1:] -
1✔
2077
                                        SurfaceHeight[:fl.nx-1])/dx
2078
        SurfaceGradientStaggered[0] = 0
1✔
2079

2080
        GradxDiff = SurfaceGradientStaggered * DiffusivityStaggered
1✔
2081

2082
        # Yo
2083
        NewIceThickness = np.zeros(fl.nx)
1✔
2084
        NewIceThickness[:fl.nx-1] = (thick[:fl.nx-1] + (dt/width[0:fl.nx-1]) *
1✔
2085
                                     (GradxDiff[1:]-GradxDiff[:fl.nx-1])/dx +
2086
                                     dt * MassBalance[:fl.nx-1])
2087

2088
        NewIceThickness[-1] = thick[fl.nx-2]
1✔
2089

2090
        fl.thick = utils.clip_min(NewIceThickness, 0)
1✔
2091

2092
        # Next step
2093
        self.t += dt
1✔
2094
        return dt
1✔
2095

2096

2097
class SemiImplicitModel(FlowlineModel):
9✔
2098
    """Semi implicit flowline model.
2099

2100
    It solves the same equation as the FluxBasedModel, but the ice flux q is
2101
    implemented as q^t = D^t * (ds/dx)^(t+1).
2102

2103
    It supports only a single flowline (no tributaries) with bed shapes
2104
    rectangular, trapezoidal or a mixture of both.
2105
    """
2106

2107
    def __init__(self, flowlines, mb_model=None, y0=0., glen_a=None, fs=0.,
9✔
2108
                 inplace=False, fixed_dt=None, cfl_number=0.5, min_dt=None,
2109
                 **kwargs):
2110
        """Instantiate the model.
2111

2112
        Parameters
2113
        ----------
2114
        flowlines : list
2115
            the glacier flowlines
2116
        mb_model : MassBalanceModel
2117
            the mass balance model
2118
        y0 : int
2119
            initial year of the simulation
2120
        glen_a : float
2121
            Glen's creep parameter
2122
        fs : float
2123
            Oerlemans sliding parameter
2124
        inplace : bool
2125
            whether or not to make a copy of the flowline objects for the run
2126
            setting to True implies that your objects will be modified at run
2127
            time by the model (can help to spare memory)
2128
        fixed_dt : float
2129
            set to a value (in seconds) to prevent adaptive time-stepping.
2130
        cfl_number : float
2131
            For adaptive time stepping (the default), dt is chosen from the
2132
            CFL criterion (dt = cfl_number * dx^2 / max(D/w)).
2133
            Can be set to higher values compared to the FluxBasedModel.
2134
            Default is 0.5, but need further investigation.
2135
        min_dt : float
2136
            Defaults to cfg.PARAMS['cfl_min_dt'].
2137
            At high velocities, time steps can become very small and your
2138
            model might run very slowly. In production, it might be useful to
2139
            set a limit below which the model will just error.
2140
        kwargs : dict
2141
            Further keyword arguments for FlowlineModel
2142

2143
        """
2144

2145
        super(SemiImplicitModel, self).__init__(flowlines, mb_model=mb_model,
2✔
2146
                                                y0=y0, glen_a=glen_a, fs=fs,
2147
                                                inplace=inplace, **kwargs)
2148

2149
        if len(self.fls) > 1:
2✔
UNCOV
2150
            raise ValueError('Implicit model does not work with '
×
2151
                             'tributaries.')
2152

2153
        # convert pure RectangularBedFlowline to TrapezoidalBedFlowline with
2154
        # lambda = 0
2155
        if isinstance(self.fls[0], RectangularBedFlowline):
2✔
2156
            self.fls[0] = TrapezoidalBedFlowline(
1✔
2157
                line=self.fls[-1].line, dx=self.fls[-1].dx,
2158
                map_dx=self.fls[-1].map_dx, surface_h=self.fls[-1].surface_h,
2159
                bed_h=self.fls[-1].bed_h, widths=self.fls[-1].widths,
2160
                lambdas=0, rgi_id=self.fls[-1].rgi_id,
2161
                water_level=self.fls[-1].water_level, gdir=None)
2162

2163
        if isinstance(self.fls[0], MixedBedFlowline):
2✔
2164
            if ~np.all(self.fls[0].is_trapezoid):
2✔
UNCOV
2165
                raise ValueError('Implicit model only works with a pure '
×
2166
                                 'trapezoidal flowline! But different lambdas '
2167
                                 'along the flowline possible (lambda=0 is'
2168
                                 'rectangular).')
2169
        elif not isinstance(self.fls[0], TrapezoidalBedFlowline):
1✔
UNCOV
2170
            raise ValueError('Implicit model only works with a pure '
×
2171
                             'trapezoidal flowline! But different lambdas '
2172
                             'along the flowline possible (lambda=0 is'
2173
                             'rectangular).')
2174

2175
        if cfg.PARAMS['use_kcalving_for_run']:
2✔
2176
            raise NotImplementedError("Calving is not implemented in the"
2177
                                      "SemiImplicitModel! Set "
2178
                                      "cfg.PARAMS['use_kcalving_for_run'] = "
2179
                                      "False.")
2180

2181
        self.fixed_dt = fixed_dt
2✔
2182
        if min_dt is None:
2✔
2183
            min_dt = cfg.PARAMS['cfl_min_dt']
2✔
2184
        self.min_dt = min_dt
2✔
2185

2186
        if cfl_number is None:
2✔
UNCOV
2187
            cfl_number = cfg.PARAMS['cfl_number']
×
2188
        if cfl_number < 0.1:
2✔
UNCOV
2189
            raise InvalidParamsError("For the SemiImplicitModel you can use "
×
2190
                                     "cfl numbers in the order of 0.1 - 0.5 "
2191
                                     f"(you set {cfl_number}).")
2192
        self.cfl_number = cfl_number
2✔
2193

2194
        # Special output
2195
        self._surf_vel_fac = (self.glen_n + 2) / (self.glen_n + 1)
2✔
2196

2197
        # optim
2198
        nx = self.fls[-1].nx
2✔
2199
        bed_h_exp = np.concatenate(([self.fls[-1].bed_h[0]],
2✔
2200
                                    self.fls[-1].bed_h,
2201
                                    [self.fls[-1].bed_h[-1]]))
2202
        self.dbed_h_exp_dx = ((bed_h_exp[1:] - bed_h_exp[:-1]) /
2✔
2203
                              self.fls[0].dx_meter)
2204
        self.d_stag = [np.zeros(nx + 1)]
2✔
2205
        self.d_matrix_banded = np.zeros((3, nx))
2✔
2206
        w0 = self.fls[0]._w0_m
2✔
2207
        self.w0_stag = (w0[0:-1] + w0[1:]) / 2
2✔
2208
        self.rhog = (self.rho * G) ** self.glen_n
2✔
2209

2210
        # variables needed for the calculation of some diagnostics, this
2211
        # calculations are done with @property, because they are not computed
2212
        # on the fly during the dynamic run as in FluxBasedModel
2213
        self._u_stag = [np.zeros(nx + 1)]
2✔
2214
        self._flux_stag = [np.zeros(nx + 1)]
2✔
2215
        self._slope_stag = [np.zeros(nx + 1)]
2✔
2216
        self._thick_stag = [np.zeros(nx + 1)]
2✔
2217
        self._section_stag = [np.zeros(nx + 1)]
2✔
2218

2219
    @property
9✔
2220
    def slope_stag(self):
9✔
2221
        slope_stag = self._slope_stag[0]
2✔
2222

2223
        surface_h = self.fls[0].surface_h
2✔
2224
        dx = self.fls[0].dx_meter
2✔
2225

2226
        slope_stag[0] = 0
2✔
2227
        slope_stag[1:-1] = (surface_h[0:-1] - surface_h[1:]) / dx
2✔
2228
        slope_stag[-1] = slope_stag[-2]
2✔
2229

2230
        return [slope_stag]
2✔
2231

2232
    @property
9✔
2233
    def thick_stag(self):
9✔
2234
        thick_stag = self._thick_stag[0]
2✔
2235

2236
        thick = self.fls[0].thick
2✔
2237

2238
        thick_stag[1:-1] = (thick[0:-1] + thick[1:]) / 2.
2✔
2239
        thick_stag[[0, -1]] = thick[[0, -1]]
2✔
2240

2241
        return [thick_stag]
2✔
2242

2243
    @property
9✔
2244
    def section_stag(self):
9✔
2245
        section_stag = self._section_stag[0]
1✔
2246

2247
        section = self.fls[0].section
1✔
2248

2249
        section_stag[1:-1] = (section[0:-1] + section[1:]) / 2.
1✔
2250
        section_stag[[0, -1]] = section[[0, -1]]
1✔
2251

2252
        return [section_stag]
1✔
2253

2254
    @property
9✔
2255
    def u_stag(self):
9✔
2256
        u_stag = self._u_stag[0]
2✔
2257

2258
        slope_stag = self.slope_stag[0]
2✔
2259
        thick_stag = self.thick_stag[0]
2✔
2260
        N = self.glen_n
2✔
2261
        rhog = self.rhog
2✔
2262

2263
        rhogh = rhog * slope_stag ** N
2✔
2264

2265
        u_stag[:] = ((thick_stag**(N+1)) * self._fd * rhogh +
2✔
2266
                     (thick_stag**(N-1)) * self.fs * rhogh)
2267

2268
        return [u_stag]
2✔
2269

2270
    @property
9✔
2271
    def flux_stag(self):
9✔
2272
        flux_stag = self._flux_stag[0]
1✔
2273

2274
        section_stag = self.section_stag[0]
1✔
2275
        u_stag = self.u_stag[0]
1✔
2276

2277
        flux_stag[:] = u_stag * section_stag
1✔
2278

2279
        return [flux_stag]
1✔
2280

2281
    def step(self, dt):
9✔
2282
        """Advance one step."""
2283

2284
        # Just a check to avoid useless computations
2285
        if dt <= 0:
2✔
2286
            raise InvalidParamsError('dt needs to be strictly positive')
1✔
2287

2288
        # read out variables from current flowline
2289
        fl = self.fls[0]
2✔
2290
        dx = fl.dx_meter
2✔
2291
        width = fl.widths_m
2✔
2292
        thick = fl.thick
2✔
2293
        surface_h = fl.surface_h
2✔
2294

2295
        # some variables needed later
2296
        N = self.glen_n
2✔
2297
        rhog = self.rhog
2✔
2298

2299
        # calculate staggered variables
2300
        width_stag = (width[0:-1] + width[1:]) / 2
2✔
2301
        w0_stag = self.w0_stag
2✔
2302
        thick_stag = (thick[0:-1] + thick[1:]) / 2.
2✔
2303
        dsdx_stag = (surface_h[1:] - surface_h[0:-1]) / dx
2✔
2304

2305
        # calculate diffusivity
2306
        # boundary condition d_stag_0 = d_stag_end = 0
2307
        d_stag = self.d_stag[0]
2✔
2308
        d_stag[1:-1] = ((self._fd * thick_stag ** (N + 2) +
2✔
2309
                         self.fs * thick_stag ** N) * rhog *
2310
                        (w0_stag + width_stag) / 2 *
2311
                        np.abs(dsdx_stag) ** (N - 1))
2312

2313
        # Time step
2314
        if self.fixed_dt:
2✔
2315
            # change only if step dt is larger than the chosen dt
2316
            if self.fixed_dt < dt:
2✔
2317
                dt = self.fixed_dt
2✔
2318
        else:
2319
            # use stability criterion dt <= dx^2 / max(D/w) * cfl_number
2320
            divisor = np.max(np.abs(d_stag[1:-1] / width_stag))
2✔
2321
            if divisor > cfg.FLOAT_EPS:
2✔
2322
                cfl_dt = self.cfl_number * dx ** 2 / divisor
2✔
2323
            else:
2324
                cfl_dt = dt
1✔
2325

2326
            if cfl_dt < dt:
2✔
2327
                dt = cfl_dt
2✔
2328
                if cfl_dt < self.min_dt:
2✔
UNCOV
2329
                    raise RuntimeError(
×
2330
                        'CFL error: required time step smaller '
2331
                        'than the minimum allowed: '
2332
                        '{:.1f}s vs {:.1f}s. Happening at '
2333
                        'simulation year {:.1f}, fl_id {}, '
2334
                        'bin_id {} and max_D {:.3f} m2 yr-1.'
2335
                        ''.format(cfl_dt, self.min_dt, self.yr, 0,
2336
                                  np.argmax(np.abs(d_stag)),
2337
                                  divisor * cfg.SEC_IN_YEAR))
2338

2339
        # calculate diagonals of Amat
2340
        d0 = dt / dx ** 2 * (d_stag[:-1] + d_stag[1:]) / width
2✔
2341
        dm = - dt / dx ** 2 * d_stag[:-1] / width
2✔
2342
        dp = - dt / dx ** 2 * d_stag[1:] / width
2✔
2343

2344
        # construct banded form of the matrix, which is used during solving
2345
        # (see https://docs.scipy.org/doc/scipy/reference/generated/scipy.linalg.solve_banded.html)
2346
        # original matrix:
2347
        # d_matrix = (np.diag(dp[:-1], 1) +
2348
        #             np.diag(np.ones(len(d0)) + d0) +
2349
        #             np.diag(dm[1:], -1))
2350
        self.d_matrix_banded[0, 1:] = dp[:-1]
2✔
2351
        self.d_matrix_banded[1, :] = np.ones(len(d0)) + d0
2✔
2352
        self.d_matrix_banded[2, :-1] = dm[1:]
2✔
2353

2354
        # correction term for glacier bed (original equation is an equation for
2355
        # the surface height s, which is transformed in an equation for h, as
2356
        # s = h + b the term below comes from the '- b'
2357
        b_corr = - d_stag * self.dbed_h_exp_dx
2✔
2358

2359
        # prepare rhs
2360
        smb = self.get_mb(surface_h, self.yr, fl_id=0)
2✔
2361
        rhs = thick + smb * dt + dt / width * (b_corr[:-1] - b_corr[1:]) / dx
2✔
2362

2363
        # solve matrix and update flowline thickness
2364
        thick_new = utils.clip_min(
2✔
2365
            solve_banded((1, 1), self.d_matrix_banded, rhs),
2366
            0)
2367
        fl.thick = thick_new
2✔
2368

2369
        # Next step
2370
        self.t += dt
2✔
2371

2372
        return dt
2✔
2373

2374
    def get_diagnostics(self, fl_id=-1):
9✔
2375
        """Obtain model diagnostics in a pandas DataFrame.
2376

2377
        Parameters
2378
        ----------
2379
        fl_id : int
2380
            the index of the flowline of interest, from 0 to n_flowline-1.
2381
            Default is to take the last (main) one
2382

2383
        Returns
2384
        -------
2385
        a pandas DataFrame, which index is distance along flowline (m). Units:
2386
            - surface_h, bed_h, ice_tick, section_width: m
2387
            - section_area: m2
2388
            - slope: -
2389
            - ice_flux, tributary_flux: m3 of *ice* per second
2390
            - ice_velocity: m per second (depth-section integrated)
2391
            - surface_ice_velocity: m per second (corrected for surface - simplified)
2392
        """
2393
        import pandas as pd
1✔
2394

2395
        fl = self.fls[fl_id]
1✔
2396
        nx = fl.nx
1✔
2397

2398
        df = pd.DataFrame(index=fl.dx_meter * np.arange(nx))
1✔
2399
        df.index.name = 'distance_along_flowline'
1✔
2400
        df['surface_h'] = fl.surface_h
1✔
2401
        df['bed_h'] = fl.bed_h
1✔
2402
        df['ice_thick'] = fl.thick
1✔
2403
        df['section_width'] = fl.widths_m
1✔
2404
        df['section_area'] = fl.section
1✔
2405

2406
        # Staggered
2407
        var = self.slope_stag[fl_id]
1✔
2408
        df['slope'] = (var[1:nx+1] + var[:nx])/2
1✔
2409
        var = self.flux_stag[fl_id]
1✔
2410
        df['ice_flux'] = (var[1:nx+1] + var[:nx])/2
1✔
2411
        var = self.u_stag[fl_id]
1✔
2412
        df['ice_velocity'] = (var[1:nx+1] + var[:nx])/2
1✔
2413
        df['surface_ice_velocity'] = df['ice_velocity'] * self._surf_vel_fac
1✔
2414

2415
        return df
1✔
2416

2417

2418
class FileModel(object):
9✔
2419
    """Duck FlowlineModel which actually reads data out of a nc file."""
2420

2421
    def __init__(self, path):
9✔
2422
        """ Instantiate."""
2423

2424
        self.fls = glacier_from_netcdf(path)
5✔
2425

2426
        fl_tss = []
5✔
2427
        for flid, fl in enumerate(self.fls):
5✔
2428
            with xr.open_dataset(path, group='fl_{}'.format(flid)) as ds:
5✔
2429
                if flid == 0:
5✔
2430
                    # Populate time
2431
                    self.time = ds.time.values
5✔
2432
                    try:
5✔
2433
                        self.years = ds.year.values
5✔
UNCOV
2434
                    except AttributeError:
×
UNCOV
2435
                        raise InvalidWorkflowError('The provided model output '
×
2436
                                                   'file is incomplete (likely '
2437
                                                   'when the previous '
2438
                                                   'run failed) or corrupt.')
2439
                    try:
5✔
2440
                        self.months = ds.month.values
5✔
2441
                    except AttributeError:
5✔
2442
                        self.months = self.years * 0 + 1
5✔
2443

2444
                # Read out the data
2445
                fl_data = {
5✔
2446
                    'ts_section': ds.ts_section.values,
2447
                    'ts_width_m': ds.ts_width_m.values,
2448
                }
2449
                try:
5✔
2450
                    fl_data['ts_calving_bucket_m3'] = ds.ts_calving_bucket_m3.values
5✔
UNCOV
2451
                except AttributeError:
×
UNCOV
2452
                    fl_data['ts_calving_bucket_m3'] = self.years * 0
×
2453
                fl_tss.append(fl_data)
5✔
2454

2455
        self.fl_tss = fl_tss
5✔
2456
        self.last_yr = float(ds.time[-1])
5✔
2457

2458
        # Calving diags
2459
        try:
5✔
2460
            with xr.open_dataset(path) as ds:
5✔
2461
                self._calving_m3_since_y0 = ds.calving_m3.values
5✔
2462
                self.water_level = ds.water_level
5✔
2463
                self.do_calving = True
5✔
UNCOV
2464
        except AttributeError:
×
UNCOV
2465
            self._calving_m3_since_y0 = 0
×
UNCOV
2466
            self.do_calving = False
×
UNCOV
2467
            self.water_level = 0
×
2468

2469
        # time
2470
        self.reset_y0()
5✔
2471

2472
    def __enter__(self):
9✔
2473
        warnings.warn('FileModel no longer needs to be run as a '
1✔
2474
                      'context manager. You can safely remove the '
2475
                      '`with` statement.', FutureWarning)
2476
        return self
1✔
2477

2478
    def __exit__(self, exc_type, exc_value, traceback):
9✔
2479
        pass
1✔
2480

2481
    def reset_y0(self, y0=None):
9✔
2482
        """Reset the initial model time"""
2483

2484
        if y0 is None:
5✔
2485
            y0 = float(self.time[0])
5✔
2486
        self.y0 = y0
5✔
2487
        self.yr = y0
5✔
2488
        self._current_index = 0
5✔
2489

2490
    @property
9✔
2491
    def area_m2(self):
9✔
2492
        return np.sum([f.area_m2 for f in self.fls])
1✔
2493

2494
    @property
9✔
2495
    def volume_m3(self):
9✔
2496
        return np.sum([f.volume_m3 for f in self.fls])
1✔
2497

2498
    @property
9✔
2499
    def volume_km3(self):
9✔
2500
        return self.volume_m3 * 1e-9
1✔
2501

2502
    @property
9✔
2503
    def area_km2(self):
9✔
2504
        return self.area_m2 * 1e-6
1✔
2505

2506
    @property
9✔
2507
    def length_m(self):
9✔
UNCOV
2508
        return self.fls[-1].length_m
×
2509

2510
    @property
9✔
2511
    def calving_m3_since_y0(self):
9✔
2512
        if self.do_calving:
1✔
2513
            return self._calving_m3_since_y0[self._current_index]
1✔
2514
        else:
UNCOV
2515
            return 0
×
2516

2517
    def run_until(self, year=None, month=None):
9✔
2518
        """Mimics the model's behavior.
2519

2520
        Is quite slow tbh.
2521
        """
2522
        try:
4✔
2523
            if month is not None:
4✔
2524
                pok = np.nonzero((self.years == year) & (self.months == month))[0][0]
1✔
2525
            else:
2526
                pok = np.nonzero(self.time == year)[0][0]
4✔
2527
        except IndexError as err:
1✔
2528
            raise IndexError('Index year={}, month={} not available in '
1✔
2529
                             'FileModel.'.format(year, month)) from err
2530

2531
        self.yr = self.time[pok]
4✔
2532
        self._current_index = pok
4✔
2533

2534
        for fl, fl_ts in zip(self.fls, self.fl_tss):
4✔
2535
            fl.section = fl_ts['ts_section'][pok, :]
4✔
2536
            fl.calving_bucket_m3 = fl_ts['ts_calving_bucket_m3'][pok]
4✔
2537

2538
    def area_m2_ts(self, rollmin=0):
9✔
2539
        """rollmin is the number of years you want to smooth onto"""
2540
        sel = 0
3✔
2541
        for fl, fl_ts in zip(self.fls, self.fl_tss):
3✔
2542
            widths = np.where(fl_ts['ts_section'] > 0., fl_ts['ts_width_m'], 0.)
3✔
2543
            sel += widths.sum(axis=1) * fl.dx_meter
3✔
2544
        sel = pd.Series(data=sel, index=self.time, name='area_m2')
3✔
2545
        if rollmin != 0:
3✔
UNCOV
2546
            sel = sel.rolling(rollmin).min()
×
UNCOV
2547
            sel.iloc[0:rollmin] = sel.iloc[rollmin]
×
2548
        return sel
3✔
2549

2550
    def area_km2_ts(self, **kwargs):
9✔
2551
        return self.area_m2_ts(**kwargs) * 1e-6
3✔
2552

2553
    def volume_m3_ts(self):
9✔
2554
        sel = 0
3✔
2555
        for fl, fl_ts in zip(self.fls, self.fl_tss):
3✔
2556
            sel += fl_ts['ts_section'].sum(axis=1) * fl.dx_meter
3✔
2557
            sel -= fl_ts['ts_calving_bucket_m3']
3✔
2558
        return pd.Series(data=sel, index=self.time, name='volume_m3')
3✔
2559

2560
    def volume_km3_ts(self):
9✔
2561
        return self.volume_m3_ts() * 1e-9
3✔
2562

2563
    def length_m_ts(self, rollmin=0):
9✔
2564
        raise NotImplementedError('length_m_ts is no longer available in the '
2565
                                  'full output files. To obtain the length '
2566
                                  'time series, refer to the diagnostic '
2567
                                  'output file.')
2568

2569

2570
class MassRedistributionCurveModel(FlowlineModel):
9✔
2571
    """Glacier geometry updated using mass redistribution curves.
2572

2573
    Also known as the "delta-h method": This uses mass redistribution curves
2574
    from Huss et al. (2010) to update the glacier geometry.
2575

2576
    Code by David Rounce (PyGEM) and adapted by F. Maussion.
2577
    """
2578

2579
    def __init__(self, flowlines, mb_model=None, y0=0.,
9✔
2580
                 is_tidewater=False, water_level=None,
2581
                 do_kcalving=None, calving_k=None,
2582
                 advance_method=1,
2583
                 **kwargs):
2584
        """ Instantiate the model.
2585

2586
        Parameters
2587
        ----------
2588
        flowlines : list
2589
            the glacier flowlines
2590
        mb_model : MassBalanceModel
2591
            the mass balance model
2592
        y0 : int
2593
            initial year of the simulation
2594
        advance_method : int
2595
            different ways to handle positive MBs:
2596
            - 0: do nothing, i.e. simply let the glacier thicken instead of
2597
                 thinning
2598
            - 1: add some of the mass at the end of the glacier
2599
            - 2: add some of the mass at the end of the glacier, but
2600
                 differently
2601
        """
2602
        super(MassRedistributionCurveModel, self).__init__(flowlines,
2✔
2603
                                                           mb_model=mb_model,
2604
                                                           y0=y0,
2605
                                                           water_level=water_level,
2606
                                                           mb_elev_feedback='annual',
2607
                                                           required_model_steps='annual',
2608
                                                           **kwargs)
2609

2610
        if len(self.fls) > 1:
2✔
UNCOV
2611
            raise InvalidWorkflowError('MassRedistributionCurveModel is not '
×
2612
                                       'set up for multiple flowlines')
2613
        fl = self.fls[0]
2✔
2614
        self.glac_idx_initial = fl.thick.nonzero()[0]
2✔
2615
        self.y0 = y0
2✔
2616

2617
        # Some ways to deal with positive MB
2618
        self.advance_method = advance_method
2✔
2619

2620
        # Frontal ablation shenanigans
2621
        if do_kcalving is None:
2✔
2622
            do_kcalving = cfg.PARAMS['use_kcalving_for_run']
2✔
2623
        self.do_calving = do_kcalving and self.is_tidewater
2✔
2624
        if calving_k is None:
2✔
2625
            calving_k = cfg.PARAMS['calving_k']
2✔
2626

2627
        self.is_tidewater = is_tidewater
2✔
2628
        self.calving_k = calving_k
2✔
2629
        self.calving_m3_since_y0 = 0.  # total calving since time y0
2✔
2630

2631
    def step(self, dt):
9✔
2632
        """Advance one step. Here it should be one year"""
2633

2634
        # Just a check to avoid useless computations
2635
        if dt <= 0:
2✔
UNCOV
2636
            raise InvalidParamsError('dt needs to be strictly positive')
×
2637
        if dt > cfg.SEC_IN_YEAR:
2✔
2638
            # This should not happen from how run_until is built, but
2639
            # to match the adaptive time stepping scheme of other models
2640
            # we don't complain here and just do one year
UNCOV
2641
            dt = cfg.SEC_IN_YEAR
×
2642

2643
        elif dt < cfg.SEC_IN_YEAR:
2✔
2644
            # Here however we complain - we really want one year exactly
UNCOV
2645
            raise InvalidWorkflowError('I was asked to run for less than one '
×
2646
                                       'year. Delta-H models can\'t do that.')
2647

2648
        # Flowline state
2649
        fl = self.fls[0]
2✔
2650
        fl_id = 0
2✔
2651

2652
        height = fl.surface_h.copy()
2✔
2653
        section = fl.section.copy()
2✔
2654
        thick = fl.thick.copy()
2✔
2655
        width = fl.widths_m.copy()
2✔
2656

2657
        # FRONTAL ABLATION
2658
        if self.do_calving:
2✔
2659
            raise NotImplementedError('Frontal ablation not there yet.')
2660

2661
        # Redistribute mass if glacier is still there
2662
        if not np.any(section > 0):
2✔
2663
            # Do nothing
2664
            self.t += dt
1✔
2665
            return dt
1✔
2666

2667
        # Mass redistribution according to Huss empirical curves
2668
        # Annual glacier mass balance [m ice s-1]
2669
        mb = self.get_mb(height, year=self.yr, fls=self.fls, fl_id=fl_id)
2✔
2670
        # [m ice yr-1]
2671
        mb *= cfg.SEC_IN_YEAR
2✔
2672

2673
        # Ok now to the bulk of it
2674
        # Mass redistribution according to empirical equations from
2675
        # Huss and Hock (2015) accounting for retreat/advance.
2676
        # glac_idx_initial is required to ensure that the glacier does not
2677
        # advance to area where glacier did not exist before
2678
        # (e.g., retreat and advance over a vertical cliff)
2679

2680
        # Note: since OGGM uses the DEM, heights along the flowline do not
2681
        # necessarily decrease, i.e., there can be overdeepenings along the
2682
        # flowlines that occur as the glacier retreats. This is problematic
2683
        # for 'adding' a bin downstream in cases of glacier advance because
2684
        # you'd be moving new ice to a higher elevation. To avoid this
2685
        # unrealistic case, in the event that this would occur, the
2686
        # overdeepening will simply fill up with ice first until it reaches
2687
        # an elevation where it would put new ice into a downstream bin.
2688

2689
        # Bin area [m2]
2690
        bin_area = width * fl.dx_meter
2✔
2691
        bin_area[thick == 0] = 0
2✔
2692

2693
        # Annual glacier-wide volume change [m3]
2694
        # units: m3 ice per year
2695
        glacier_delta_v = (mb * bin_area).sum()
2✔
2696

2697
        # For hindcast simulations, volume change is the opposite
2698
        # We don't implement this in OGGM right now
2699

2700
        # If volume loss is more than the glacier volume, melt everything and
2701
        # stop here. Otherwise, redistribute mass loss/gains across the glacier
2702
        glacier_volume_total = (section * fl.dx_meter).sum()
2✔
2703
        if (glacier_volume_total + glacier_delta_v) < 0:
2✔
2704
            # Set all to zero and return
2705
            fl.section *= 0
1✔
2706
            return
1✔
2707

2708
        # Determine where glacier exists
2709
        glac_idx = thick.nonzero()[0]
2✔
2710

2711
        # Compute bin volume change [m3 ice yr-1] after redistribution
2712
        bin_delta_v = mass_redistribution_curve_huss(height, bin_area, mb,
2✔
2713
                                                     glac_idx, glacier_delta_v)
2714

2715
        # Here per construction bin_delta_v should be approx equal to glacier_delta_v
2716
        np.testing.assert_allclose(bin_delta_v.sum(), glacier_delta_v)
2✔
2717

2718
        # Update cross sectional area
2719
        # relevant issue: https://github.com/OGGM/oggm/issues/941
2720
        #  volume change divided by length (dx); units m2
2721
        delta_s = bin_delta_v / fl.dx_meter
2✔
2722

2723
        fl.section = utils.clip_min(section + delta_s, 0)
2✔
2724
        if not np.any(delta_s > 0):
2✔
2725
            # We shrink - all good
2726
            fl.section = utils.clip_min(section + delta_s, 0)
2✔
2727
            # Done
2728
            self.t += dt
2✔
2729
            return dt
2✔
2730

2731
        # We grow - that's bad because growing is hard
2732

2733
        # First see what happens with thickness
2734
        # (per construction the redistribution curves are all positive btw)
2735
        fl.section = utils.clip_min(section + delta_s, 0)
2✔
2736
        # We decide if we really want to advance or if we don't care
2737
        # Matthias and Dave use 5m, I find that too much, because not
2738
        # redistributing may lead to runaway effects
2739
        dh_thres = 1  # in meters
2✔
2740
        if np.all((fl.thick - thick) <= dh_thres):
2✔
2741
            # That was not much increase return
2742
            self.t += dt
2✔
2743
            return dt
2✔
2744

2745
        # Ok, we really grow then - back to previous state and decide on what to do
2746
        fl.section = section
2✔
2747

2748
        if (fl.thick[-2] > 0) or (self.advance_method == 0):
2✔
2749
            # Do not advance (same as in the melting case but thickening)
2750
            fl.section = utils.clip_min(section + delta_s, 0)
1✔
2751

2752
        if self.advance_method == 1:
2✔
2753
            # Just shift the redistribution by one pix
2754
            new_delta_s = np.append([0], delta_s)[:-1]
2✔
2755

2756
            # Redis param - how much of mass we want to shift (0 - 1)
2757
            # 0 would be like method 0 (do nothing)
2758
            # 1 would be to shift all mass by 1 pixel
2759
            a = 0.2
2✔
2760
            new_delta_s = a * new_delta_s + (1 - a) * delta_s
2✔
2761

2762
            # Make sure we are still preserving mass
2763
            new_delta_s *= delta_s.sum() / new_delta_s.sum()
2✔
2764

2765
            # Update section
2766
            fl.section = utils.clip_min(section + new_delta_s, 0)
2✔
2767

2768
        elif self.advance_method == 2:
2✔
2769

2770
            # How much of what's positive do we want to add in front
2771
            redis_perc = 0.01  # in %
2✔
2772

2773
            # Decide on volume that needs redistributed
2774
            section_redis = delta_s * redis_perc
2✔
2775

2776
            # The rest is added where it was
2777
            fl.section = utils.clip_min(section + delta_s - section_redis, 0)
2✔
2778

2779
            # Then lets put this "volume" where we can
2780
            section_redis = section_redis.sum()
2✔
2781
            while section_redis > 0:
2✔
2782
                # Get the terminus grid
2783
                orig_section = fl.section.copy()
2✔
2784
                p_term = np.nonzero(fl.thick > 0)[0][-1]
2✔
2785

2786
                # Put ice on the next bin, until ice is as high as terminus
2787
                # Anything else would require slope assumptions, but it would
2788
                # be possible
2789
                new_thick = fl.surface_h[p_term] - fl.bed_h[p_term + 1]
2✔
2790
                if new_thick > 0:
2✔
2791
                    # No deepening
2792
                    fl.thick[p_term + 1] = new_thick
2✔
2793
                    new_section = fl.section[p_term + 1]
2✔
2794
                    if new_section > section_redis:
2✔
2795
                        # This would be too much, just add what we have
2796
                        orig_section[p_term + 1] = section_redis
2✔
2797
                        fl.section = orig_section
2✔
2798
                        section_redis = 0
2✔
2799
                    else:
2800
                        # OK this bin is done, continue
2801
                        section_redis -= new_section
1✔
2802
                else:
2803
                    # We have a deepening, or we have to climb
2804
                    target_h = fl.bed_h[p_term + 1] + 1
1✔
2805
                    to_fill = (fl.surface_h < target_h) & (fl.thick > 0)
1✔
2806

2807
                    # Theoretical section area needed
2808
                    fl.thick[to_fill] = target_h - fl.bed_h[to_fill]
1✔
2809
                    new_section = fl.section.sum() - orig_section.sum()
1✔
2810
                    if new_section > section_redis:
1✔
2811
                        # This would be too much, just add what we have
2812
                        orig_section[to_fill] += new_section / np.sum(to_fill)
1✔
2813
                        fl.section = orig_section
1✔
2814
                        section_redis = 0
1✔
2815
                    else:
2816
                        # OK this bin is done, continue
2817
                        section_redis -= new_section
1✔
2818

2819
        elif self.advance_method == 3:
1✔
2820
            # A bit closer to Huss and Rounce maybe?
UNCOV
2821
            raise RuntimeError('not yet')
×
2822

2823
        # Done
2824
        self.t += dt
2✔
2825
        return dt
2✔
2826

2827

2828
def mass_redistribution_curve_huss(height, bin_area, mb, glac_idx, glacier_delta_v):
9✔
2829
    """Apply the mass redistribution curves from Huss and Hock (2015).
2830

2831
    This has to be followed by added logic which takes into consideration
2832
    retreat and advance.
2833

2834
    Parameters
2835
    ----------
2836
    height : np.ndarray
2837
        Glacier elevation [m] from previous year for each elevation bin
2838
    bin_area : np.ndarray
2839
        Glacier area [m2] from previous year for each elevation bin
2840
    mb : np.ndarray
2841
        Annual climatic mass balance [m ice yr-1] for each elevation bin for
2842
        a single year
2843
    glac_idx : np.ndarray
2844
        glacier indices for present timestep
2845
    glacier_delta_v : float
2846
        glacier-wide volume change [m3 ice yr-1] based on the annual mb
2847

2848
    Returns
2849
    -------
2850
    bin_volume_change : np.ndarray
2851
        Ice volume change [m3 yr-1] for each elevation bin
2852
    """
2853

2854
    # Apply mass redistribution curve
2855
    if glac_idx.shape[0] <= 3:
2✔
2856
        # No need for a curve when glacier is so small
2857
        return mb * bin_area
1✔
2858

2859
    # Select the parameters based on glacier area
2860
    if bin_area.sum() * 1e-6 > 20:
2✔
UNCOV
2861
        gamma, a, b, c = (6, -0.02, 0.12, 0)
×
2862
    elif bin_area.sum() * 1e-6 > 5:
2✔
2863
        gamma, a, b, c = (4, -0.05, 0.19, 0.01)
1✔
2864
    else:
2865
        gamma, a, b, c = (2, -0.30, 0.60, 0.09)
2✔
2866

2867
    # reset variables
2868
    delta_h_norm = bin_area * 0
2✔
2869

2870
    # Normalized elevation range [-]
2871
    #  (max elevation - bin elevation) / (max_elevation - min_elevation)
2872
    gla_height = height[glac_idx]
2✔
2873
    max_elev, min_elev = gla_height.max(), gla_height.min()
2✔
2874
    h_norm = (max_elev - gla_height) / (max_elev - min_elev)
2✔
2875

2876
    #  using indices as opposed to elevations automatically skips bins on
2877
    #  the glacier that have no area such that the normalization is done
2878
    #  only on bins where the glacier lies
2879
    # Normalized ice thickness change [-]
2880
    delta_h_norm[glac_idx] = (h_norm + a) ** gamma + b * (h_norm + a) + c
2✔
2881

2882
    # delta_h = (h_n + a)**gamma + b*(h_n + a) + c
2883
    # limit normalized ice thickness change to between 0 - 1
2884
    delta_h_norm = utils.clip_array(delta_h_norm, 0, 1)
2✔
2885

2886
    # Huss' ice thickness scaling factor, fs_huss [m ice]
2887
    #  units: m3 / (m2 * [-]) * (1000 m / 1 km) = m ice
2888
    fs_huss = glacier_delta_v / (bin_area * delta_h_norm).sum()
2✔
2889

2890
    # Volume change [m3 ice yr-1]
2891
    return delta_h_norm * fs_huss * bin_area
2✔
2892

2893

2894
def flowline_from_dataset(ds):
9✔
2895
    """Instantiates a flowline from an xarray Dataset."""
2896

2897
    cl = globals()[ds.attrs['class']]
5✔
2898
    line = shpg.LineString(ds['linecoords'].values)
5✔
2899
    args = dict(line=line, dx=ds.dx, map_dx=ds.map_dx,
5✔
2900
                surface_h=ds['surface_h'].values,
2901
                bed_h=ds['bed_h'].values)
2902

2903
    have = {'c', 'x', 'surface_h', 'linecoords', 'bed_h', 'z', 'p', 'n',
5✔
2904
            'time', 'month', 'year', 'ts_width_m', 'ts_section',
2905
            'ts_calving_bucket_m3'}
2906
    missing_vars = set(ds.variables.keys()).difference(have)
5✔
2907
    for k in missing_vars:
5✔
2908
        data = ds[k].values
5✔
2909
        if ds[k].dims[0] == 'z':
5✔
UNCOV
2910
            data = data[0]
×
2911
        args[k] = data
5✔
2912
    return cl(**args)
5✔
2913

2914

2915
def glacier_from_netcdf(path):
9✔
2916
    """Instantiates a list of flowlines from an xarray Dataset."""
2917

2918
    with xr.open_dataset(path) as ds:
5✔
2919
        fls = []
5✔
2920
        for flid in ds['flowlines'].values:
5✔
2921
            with xr.open_dataset(path, group='fl_{}'.format(flid)) as _ds:
5✔
2922
                fls.append(flowline_from_dataset(_ds))
5✔
2923

2924
        for i, fid in enumerate(ds['flows_to_id'].values):
5✔
2925
            if fid != -1:
5✔
2926
                fls[i].set_flows_to(fls[fid])
4✔
2927

2928
    # Adds the line level
2929
    for fl in fls:
5✔
2930
        fl.order = line_order(fl)
5✔
2931

2932
    return fls
5✔
2933

2934

2935
def calving_glacier_downstream_line(line, n_points):
9✔
2936
    """Extends a calving glacier flowline past the terminus."""
2937
    if line is None:
5✔
2938
        return None
1✔
2939
    x, y = line.coords.xy
5✔
2940
    dx = x[-1] - x[-2]
5✔
2941
    dy = y[-1] - y[-2]
5✔
2942
    x = np.append(x, x[-1] + dx * np.arange(1, n_points+1))
5✔
2943
    y = np.append(y, y[-1] + dy * np.arange(1, n_points+1))
5✔
2944
    return shpg.LineString(np.array([x, y]).T)
5✔
2945

2946

2947
def old_init_present_time_glacier(gdir, filesuffix=''):
9✔
2948
    """Init_present_time_glacier when trapezoid inversion was not possible."""
2949

2950
    # Some vars
2951
    map_dx = gdir.grid.dx
1✔
2952
    def_lambda = cfg.PARAMS['trapezoid_lambdas']
1✔
2953
    min_shape = cfg.PARAMS['mixed_min_shape']
1✔
2954

2955
    cls = gdir.read_pickle('inversion_flowlines')
1✔
2956
    invs = gdir.read_pickle('inversion_output')
1✔
2957

2958
    # Fill the tributaries
2959
    new_fls = []
1✔
2960
    flows_to_ids = []
1✔
2961
    for cl, inv in zip(cls, invs):
1✔
2962

2963
        # Get the data to make the model flowlines
2964
        line = cl.line
1✔
2965
        section = inv['volume'] / (cl.dx * map_dx)
1✔
2966
        surface_h = cl.surface_h
1✔
2967
        bed_h = surface_h - inv['thick']
1✔
2968
        widths_m = cl.widths * map_dx
1✔
2969

2970
        assert np.all(widths_m > 0)
1✔
2971
        bed_shape = 4 * inv['thick'] / (cl.widths * map_dx) ** 2
1✔
2972

2973
        lambdas = inv['thick'] * np.NaN
1✔
2974
        lambdas[bed_shape < min_shape] = def_lambda
1✔
2975
        lambdas[inv['is_rectangular']] = 0.
1✔
2976

2977
        # Last pix of not tidewater are always parab (see below)
2978
        if not gdir.is_tidewater and inv['is_last']:
1✔
2979
            lambdas[-5:] = np.nan
1✔
2980

2981
        # Update bed_h where we now have a trapeze
2982
        w0_m = cl.widths * map_dx - lambdas * inv['thick']
1✔
2983
        b = 2 * w0_m
1✔
2984
        a = 2 * lambdas
1✔
2985
        with np.errstate(divide='ignore', invalid='ignore'):
1✔
2986
            thick = (np.sqrt(b ** 2 + 4 * a * section) - b) / a
1✔
2987
        ptrap = (lambdas != 0) & np.isfinite(lambdas)
1✔
2988
        bed_h[ptrap] = cl.surface_h[ptrap] - thick[ptrap]
1✔
2989

2990
        # For the very last pixs of a glacier, the section might be zero after
2991
        # the inversion, and the bedshapes are chaotic. We interpolate from
2992
        # the downstream. This is not volume conservative
2993
        if not gdir.is_tidewater and inv['is_last']:
1✔
2994
            dic_ds = gdir.read_pickle('downstream_line')
1✔
2995
            bed_shape[-5:] = np.nan
1✔
2996

2997
            # Interpolate
2998
            bed_shape = utils.interp_nans(np.append(bed_shape,
1✔
2999
                                                    dic_ds['bedshapes'][0]))
3000
            bed_shape = utils.clip_min(bed_shape[:-1], min_shape)
1✔
3001

3002
            # Correct the section volume
3003
            h = inv['thick']
1✔
3004
            section[-5:] = (2 / 3 * h * np.sqrt(4 * h / bed_shape))[-5:]
1✔
3005

3006
            # Add the downstream
3007
            bed_shape = np.append(bed_shape, dic_ds['bedshapes'])
1✔
3008
            lambdas = np.append(lambdas, dic_ds['bedshapes'] * np.NaN)
1✔
3009
            section = np.append(section, dic_ds['bedshapes'] * 0.)
1✔
3010
            surface_h = np.append(surface_h, dic_ds['surface_h'])
1✔
3011
            bed_h = np.append(bed_h, dic_ds['surface_h'])
1✔
3012
            widths_m = np.append(widths_m, dic_ds['bedshapes'] * 0.)
1✔
3013
            line = dic_ds['full_line']
1✔
3014

3015
        if gdir.is_tidewater and inv['is_last']:
1✔
3016
            # Continue the bed a little
UNCOV
3017
            n_points = cfg.PARAMS['calving_line_extension']
×
UNCOV
3018
            cf_slope = cfg.PARAMS['calving_front_slope']
×
UNCOV
3019
            deepening = n_points * cl.dx * map_dx * cf_slope
×
3020

UNCOV
3021
            line = calving_glacier_downstream_line(line, n_points=n_points)
×
UNCOV
3022
            bed_shape = np.append(bed_shape, np.zeros(n_points))
×
UNCOV
3023
            lambdas = np.append(lambdas, np.zeros(n_points))
×
UNCOV
3024
            section = np.append(section, np.zeros(n_points))
×
3025
            # The bed slowly deepens
UNCOV
3026
            bed_down = np.linspace(bed_h[-1], bed_h[-1]-deepening, n_points)
×
UNCOV
3027
            bed_h = np.append(bed_h, bed_down)
×
UNCOV
3028
            surface_h = np.append(surface_h, bed_down)
×
UNCOV
3029
            widths_m = np.append(widths_m,
×
3030
                                 np.zeros(n_points) + np.mean(widths_m[-5:]))
3031

3032
        nfl = MixedBedFlowline(line=line, dx=cl.dx, map_dx=map_dx,
1✔
3033
                               surface_h=surface_h, bed_h=bed_h,
3034
                               section=section, bed_shape=bed_shape,
3035
                               is_trapezoid=np.isfinite(lambdas),
3036
                               lambdas=lambdas,
3037
                               widths_m=widths_m,
3038
                               rgi_id=cl.rgi_id)
3039

3040
        # Update attrs
3041
        nfl.mu_star = cl.mu_star
1✔
3042

3043
        if cl.flows_to:
1✔
3044
            flows_to_ids.append(cls.index(cl.flows_to))
1✔
3045
        else:
3046
            flows_to_ids.append(None)
1✔
3047

3048
        new_fls.append(nfl)
1✔
3049

3050
    # Finalize the linkages
3051
    for fl, fid in zip(new_fls, flows_to_ids):
1✔
3052
        if fid:
1✔
3053
            fl.set_flows_to(new_fls[fid])
1✔
3054

3055
    # Adds the line level
3056
    for fl in new_fls:
1✔
3057
        fl.order = line_order(fl)
1✔
3058

3059
    # Write the data
3060
    gdir.write_pickle(new_fls, 'model_flowlines', filesuffix=filesuffix)
1✔
3061

3062

3063
@entity_task(log, writes=['model_flowlines'])
9✔
3064
def init_present_time_glacier(gdir, filesuffix=''):
9✔
3065
    """Merges data from preprocessing tasks. First task after inversion!
3066

3067
    This updates the `mode_flowlines` file and creates a stand-alone numerical
3068
    glacier ready to run.
3069

3070
    Parameters
3071
    ----------
3072
    gdir : :py:class:`oggm.GlacierDirectory`
3073
        the glacier directory to process
3074
    filesuffix : str
3075
            append a suffix to the model_flowlines filename (e.g. useful for
3076
            dynamic mu_star calibration including an inversion, so the original
3077
            model_flowlines are not changed).
3078
    """
3079

3080
    # Some vars
3081
    invs = gdir.read_pickle('inversion_output')
6✔
3082
    if invs[0].get('is_trapezoid', None) is None:
6✔
3083
        return old_init_present_time_glacier(gdir, filesuffix=filesuffix)
1✔
3084

3085
    map_dx = gdir.grid.dx
6✔
3086
    def_lambda = cfg.PARAMS['trapezoid_lambdas']
6✔
3087
    cls = gdir.read_pickle('inversion_flowlines')
6✔
3088

3089
    # Fill the tributaries
3090
    new_fls = []
6✔
3091
    flows_to_ids = []
6✔
3092
    for cl, inv in zip(cls, invs):
6✔
3093

3094
        # Get the data to make the model flowlines
3095
        line = cl.line
6✔
3096
        section = inv['volume'] / (cl.dx * map_dx)
6✔
3097
        surface_h = cl.surface_h
6✔
3098
        bed_h = surface_h - inv['thick']
6✔
3099
        widths_m = cl.widths * map_dx
6✔
3100

3101
        assert np.all(widths_m > 0)
6✔
3102
        bed_shape = 4 * inv['thick'] / (cl.widths * map_dx) ** 2
6✔
3103

3104
        lambdas = inv['thick'] * np.NaN
6✔
3105
        lambdas[inv['is_trapezoid']] = def_lambda
6✔
3106
        lambdas[inv['is_rectangular']] = 0.
6✔
3107

3108
        # Where the flux and the thickness is zero we just assume trapezoid:
3109
        lambdas[bed_shape == 0] = def_lambda
6✔
3110

3111
        if not gdir.is_tidewater and inv['is_last']:
6✔
3112
            # for valley glaciers, simply add the downstream line, depending on
3113
            # selected shape parabola or trapezoidal
3114
            dic_ds = gdir.read_pickle('downstream_line')
5✔
3115
            if cfg.PARAMS['downstream_line_shape'] == 'parabola':
5✔
3116
                bed_shape = np.append(bed_shape, dic_ds['bedshapes'])
5✔
3117
                lambdas = np.append(lambdas, dic_ds['bedshapes'] * np.NaN)
5✔
3118
                widths_m = np.append(widths_m, dic_ds['bedshapes'] * 0.)
5✔
3119
            elif cfg.PARAMS['downstream_line_shape'] == 'trapezoidal':
1✔
3120
                bed_shape = np.append(bed_shape, dic_ds['bedshapes'] * np.NaN)
1✔
3121
                lambdas = np.append(lambdas, np.ones(len(dic_ds['w0s'])) *
1✔
3122
                                    def_lambda)
3123
                widths_m = np.append(widths_m, dic_ds['w0s'])
1✔
3124
            else:
3125
                raise InvalidParamsError(
1✔
3126
                    f"Unknown cfg.PARAMS['downstream_line_shape'] = "
3127
                    f"{cfg.PARAMS['downstream_line_shape']} (options are "
3128
                    f"'parabola' and 'trapezoidal').")
3129
            section = np.append(section, dic_ds['bedshapes'] * 0.)
5✔
3130
            surface_h = np.append(surface_h, dic_ds['surface_h'])
5✔
3131
            bed_h = np.append(bed_h, dic_ds['surface_h'])
5✔
3132
            line = dic_ds['full_line']
5✔
3133

3134
        if gdir.is_tidewater and inv['is_last']:
6✔
3135
            # Continue the bed a little
3136
            n_points = cfg.PARAMS['calving_line_extension']
5✔
3137
            cf_slope = cfg.PARAMS['calving_front_slope']
5✔
3138
            deepening = n_points * cl.dx * map_dx * cf_slope
5✔
3139

3140
            line = calving_glacier_downstream_line(line, n_points=n_points)
5✔
3141
            bed_shape = np.append(bed_shape, np.zeros(n_points))
5✔
3142
            lambdas = np.append(lambdas, np.zeros(n_points))
5✔
3143
            section = np.append(section, np.zeros(n_points))
5✔
3144
            # The bed slowly deepens
3145
            bed_down = np.linspace(bed_h[-1], bed_h[-1]-deepening, n_points)
5✔
3146
            bed_h = np.append(bed_h, bed_down)
5✔
3147
            surface_h = np.append(surface_h, bed_down)
5✔
3148
            widths_m = np.append(widths_m,
5✔
3149
                                 np.zeros(n_points) + np.mean(widths_m[-5:]))
3150

3151
        nfl = MixedBedFlowline(line=line, dx=cl.dx, map_dx=map_dx,
6✔
3152
                               surface_h=surface_h, bed_h=bed_h,
3153
                               section=section, bed_shape=bed_shape,
3154
                               is_trapezoid=np.isfinite(lambdas),
3155
                               lambdas=lambdas,
3156
                               widths_m=widths_m,
3157
                               rgi_id=cl.rgi_id,
3158
                               gdir=gdir)
3159

3160
        # Update attrs
3161
        nfl.mu_star = cl.mu_star
6✔
3162

3163
        if cl.flows_to:
6✔
3164
            flows_to_ids.append(cls.index(cl.flows_to))
6✔
3165
        else:
3166
            flows_to_ids.append(None)
6✔
3167

3168
        new_fls.append(nfl)
6✔
3169

3170
    # Finalize the linkages
3171
    for fl, fid in zip(new_fls, flows_to_ids):
6✔
3172
        if fid:
6✔
3173
            fl.set_flows_to(new_fls[fid])
6✔
3174

3175
    # Adds the line level
3176
    for fl in new_fls:
6✔
3177
        fl.order = line_order(fl)
6✔
3178

3179
    # Write the data
3180
    gdir.write_pickle(new_fls, 'model_flowlines', filesuffix=filesuffix)
6✔
3181

3182

3183
@entity_task(log)
9✔
3184
def flowline_model_run(gdir, output_filesuffix=None, mb_model=None,
9✔
3185
                       ys=None, ye=None, zero_initial_glacier=False,
3186
                       init_model_fls=None, store_monthly_step=False,
3187
                       fixed_geometry_spinup_yr=None,
3188
                       store_model_geometry=None,
3189
                       store_fl_diagnostics=None,
3190
                       water_level=None,
3191
                       evolution_model=FluxBasedModel, stop_criterion=None,
3192
                       init_model_filesuffix=None, init_model_yr=None,
3193
                       **kwargs):
3194
    """Runs a model simulation with the default time stepping scheme.
3195

3196
    Parameters
3197
    ----------
3198
    gdir : :py:class:`oggm.GlacierDirectory`
3199
        the glacier directory to process
3200
    output_filesuffix : str
3201
        this add a suffix to the output file (useful to avoid overwriting
3202
        previous experiments)
3203
    mb_model : :py:class:`core.MassBalanceModel`
3204
        a MassBalanceModel instance
3205
    ys : int
3206
        start year of the model run (default: from the config file)
3207
    ye : int
3208
        end year of the model run (default: from the config file)
3209
    zero_initial_glacier : bool
3210
        if true, the ice thickness is set to zero before the simulation
3211
    init_model_filesuffix : str
3212
        if you want to start from a previous model run state. Can be
3213
        combined with `init_model_yr`
3214
    init_model_yr : int
3215
        the year of the initial run you want to start from. The default
3216
        is to take the last year of the simulation.
3217
    init_model_fls : []
3218
        list of flowlines to use to initialise the model (the default is the
3219
        present_time_glacier file from the glacier directory)
3220
    store_monthly_step : bool
3221
        whether to store the diagnostic data at a monthly time step or not
3222
        (default is yearly)
3223
    store_model_geometry : bool
3224
        whether to store the full model geometry run file to disk or not.
3225
        (new in OGGM v1.4.1: default is to follow
3226
        cfg.PARAMS['store_model_geometry'])
3227
    store_fl_diagnostics : bool
3228
        whether to store the model flowline diagnostics to disk or not.
3229
        (default is to follow cfg.PARAMS['store_fl_diagnostics'])
3230
    evolution_model : :class:oggm.core.FlowlineModel
3231
        which evolution model to use. Default: FluxBasedModel
3232
    water_level : float
3233
        the water level. It should be zero m a.s.l, but:
3234
        - sometimes the frontal elevation is unrealistically high (or low).
3235
        - lake terminating glaciers
3236
        - other uncertainties
3237
        The default is to take the water level obtained from the ice
3238
        thickness inversion.
3239
    stop_criterion : func
3240
        a function which decides on when to stop the simulation. See
3241
        `run_until_and_store` documentation for more information.
3242
    kwargs : dict
3243
        kwargs to pass to the FluxBasedModel instance
3244
    fixed_geometry_spinup_yr : int
3245
        if set to an integer, the model will artificially prolongate
3246
        all outputs of run_until_and_store to encompass all time stamps
3247
        starting from the chosen year. The only output affected are the
3248
        glacier wide diagnostic files - all other outputs are set
3249
        to constants during "spinup"
3250
     """
3251

3252
    if init_model_filesuffix is not None:
5✔
3253
        fp = gdir.get_filepath('model_geometry',
1✔
3254
                               filesuffix=init_model_filesuffix)
3255
        fmod = FileModel(fp)
1✔
3256
        if init_model_yr is None:
1✔
3257
            init_model_yr = fmod.last_yr
1✔
3258
        fmod.run_until(init_model_yr)
1✔
3259
        init_model_fls = fmod.fls
1✔
3260

3261
    mb_elev_feedback = kwargs.get('mb_elev_feedback', 'annual')
5✔
3262
    if store_monthly_step and (mb_elev_feedback == 'annual'):
5✔
3263
        warnings.warn("The mass balance used to drive the ice dynamics model "
1✔
3264
                      "is updated yearly. If you want the output to be stored "
3265
                      "monthly and also reflect monthly processes, "
3266
                      "set store_monthly_step=True and "
3267
                      "mb_elev_feedback='monthly'. This is not recommended "
3268
                      "though: for monthly MB applications, we recommend to "
3269
                      "use the `run_with_hydro` task.")
3270

3271
    if cfg.PARAMS['use_inversion_params_for_run']:
5✔
3272
        diag = gdir.get_diagnostics()
5✔
3273
        fs = diag.get('inversion_fs', cfg.PARAMS['fs'])
5✔
3274
        glen_a = diag.get('inversion_glen_a', cfg.PARAMS['glen_a'])
5✔
3275
    else:
3276
        fs = cfg.PARAMS['fs']
1✔
3277
        glen_a = cfg.PARAMS['glen_a']
1✔
3278

3279
    kwargs.setdefault('fs', fs)
5✔
3280
    kwargs.setdefault('glen_a', glen_a)
5✔
3281

3282
    if store_model_geometry is None:
5✔
3283
        store_model_geometry = cfg.PARAMS['store_model_geometry']
5✔
3284

3285
    if store_fl_diagnostics is None:
5✔
3286
        store_fl_diagnostics = cfg.PARAMS['store_fl_diagnostics']
5✔
3287

3288
    if store_model_geometry:
5✔
3289
        geom_path = gdir.get_filepath('model_geometry',
4✔
3290
                                      filesuffix=output_filesuffix,
3291
                                      delete=True)
3292
    else:
3293
        geom_path = False
4✔
3294

3295
    if store_fl_diagnostics:
5✔
3296
        fl_diag_path = gdir.get_filepath('fl_diagnostics',
1✔
3297
                                         filesuffix=output_filesuffix,
3298
                                         delete=True)
3299
    else:
3300
        fl_diag_path = False
5✔
3301

3302
    diag_path = gdir.get_filepath('model_diagnostics',
5✔
3303
                                  filesuffix=output_filesuffix,
3304
                                  delete=True)
3305

3306
    if init_model_fls is None:
5✔
3307
        fls = gdir.read_pickle('model_flowlines')
5✔
3308
    else:
3309
        fls = copy.deepcopy(init_model_fls)
2✔
3310
    if zero_initial_glacier:
5✔
3311
        for fl in fls:
1✔
3312
            fl.thick = fl.thick * 0.
1✔
3313

3314
    if (cfg.PARAMS['use_kcalving_for_run'] and gdir.is_tidewater and
5✔
3315
            water_level is None):
3316
        # check for water level
3317
        water_level = gdir.get_diagnostics().get('calving_water_level', None)
3✔
3318
        if water_level is None:
3✔
UNCOV
3319
            raise InvalidWorkflowError('This tidewater glacier seems to not '
×
3320
                                       'have been inverted with the '
3321
                                       '`find_inversion_calving` task. Set '
3322
                                       "PARAMS['use_kcalving_for_run'] to "
3323
                                       '`False` or set `water_level` '
3324
                                       'to prevent this error.')
3325

3326
    model = evolution_model(fls, mb_model=mb_model, y0=ys,
5✔
3327
                           inplace=True,
3328
                           is_tidewater=gdir.is_tidewater,
3329
                           is_lake_terminating=gdir.is_lake_terminating,
3330
                           water_level=water_level,
3331
                           **kwargs)
3332

3333
    with np.warnings.catch_warnings():
5✔
3334
        # For operational runs we ignore the warnings
3335
        np.warnings.filterwarnings('ignore', category=RuntimeWarning)
5✔
3336
        model.run_until_and_store(ye,
5✔
3337
                                  geom_path=geom_path,
3338
                                  diag_path=diag_path,
3339
                                  fl_diag_path=fl_diag_path,
3340
                                  store_monthly_step=store_monthly_step,
3341
                                  fixed_geometry_spinup_yr=fixed_geometry_spinup_yr,
3342
                                  stop_criterion=stop_criterion)
3343

3344
    return model
5✔
3345

3346

3347
@entity_task(log)
9✔
3348
def run_random_climate(gdir, nyears=1000, y0=None, halfsize=15,
9✔
3349
                       bias=None, seed=None, temperature_bias=None,
3350
                       precipitation_factor=None,
3351
                       store_monthly_step=False,
3352
                       store_model_geometry=None,
3353
                       store_fl_diagnostics=None,
3354
                       climate_filename='climate_historical',
3355
                       mb_model=None,
3356
                       climate_input_filesuffix='',
3357
                       output_filesuffix='', init_model_fls=None,
3358
                       init_model_filesuffix=None,
3359
                       init_model_yr=None,
3360
                       zero_initial_glacier=False,
3361
                       unique_samples=False, **kwargs):
3362
    """Runs the random mass balance model for a given number of years.
3363

3364
    This will initialize a
3365
    :py:class:`oggm.core.massbalance.MultipleFlowlineMassBalance`,
3366
    and run a :py:func:`oggm.core.flowline.flowline_model_run`.
3367

3368
    Parameters
3369
    ----------
3370
    gdir : :py:class:`oggm.GlacierDirectory`
3371
        the glacier directory to process
3372
    nyears : int
3373
        length of the simulation
3374
    y0 : int, optional
3375
        central year of the random climate period. The default is to be
3376
        centred on t*.
3377
    halfsize : int, optional
3378
        the half-size of the time window (window size = 2 * halfsize + 1)
3379
    bias : float
3380
        bias of the mb model. Default is to use the calibrated one, which
3381
        is often a better idea. For t* experiments it can be useful to set it
3382
        to zero
3383
    seed : int
3384
        seed for the random generator. If you ignore this, the runs will be
3385
        different each time. Setting it to a fixed seed across glaciers can
3386
        be useful if you want to have the same climate years for all of them
3387
    temperature_bias : float
3388
        add a bias to the temperature timeseries
3389
    precipitation_factor: float
3390
        multiply a factor to the precipitation time series
3391
        default is None and means that the precipitation factor from the
3392
        calibration is applied which is cfg.PARAMS['prcp_scaling_factor']
3393
    store_monthly_step : bool
3394
        whether to store the diagnostic data at a monthly time step or not
3395
        (default is yearly)
3396
    store_model_geometry : bool
3397
        whether to store the full model geometry run file to disk or not.
3398
        (new in OGGM v1.4.1: default is to follow
3399
        cfg.PARAMS['store_model_geometry'])
3400
    store_fl_diagnostics : bool
3401
        whether to store the model flowline diagnostics to disk or not.
3402
        (default is to follow cfg.PARAMS['store_fl_diagnostics'])
3403
    climate_filename : str
3404
        name of the climate file, e.g. 'climate_historical' (default) or
3405
        'gcm_data'
3406
    mb_model : :py:class:`core.MassBalanceModel`
3407
        User-povided MassBalanceModel instance. Default is to use a
3408
        RandomMassBalance together with the provided parameters y0, halfsize,
3409
        bias, seed, climate_filename, climate_input_filesuffix and
3410
        unique_samples
3411
    climate_input_filesuffix: str
3412
        filesuffix for the input climate file
3413
    output_filesuffix : str
3414
        this add a suffix to the output file (useful to avoid overwriting
3415
        previous experiments)
3416
    init_model_filesuffix : str
3417
        if you want to start from a previous model run state. Can be
3418
        combined with `init_model_yr`
3419
    init_model_yr : int
3420
        the year of the initial run you want to start from. The default
3421
        is to take the last year of the simulation.
3422
    init_model_fls : []
3423
        list of flowlines to use to initialise the model (the default is the
3424
        present_time_glacier file from the glacier directory)
3425
    zero_initial_glacier : bool
3426
        if true, the ice thickness is set to zero before the simulation
3427
    unique_samples: bool
3428
        if true, chosen random mass balance years will only be available once
3429
        per random climate period-length
3430
        if false, every model year will be chosen from the random climate
3431
        period with the same probability
3432
    kwargs : dict
3433
        kwargs to pass to the FluxBasedModel instance
3434
    """
3435

3436
    if mb_model is None:
4✔
3437
        mb_model = MultipleFlowlineMassBalance(gdir,
4✔
3438
                                               mb_model_class=RandomMassBalance,
3439
                                               y0=y0, halfsize=halfsize,
3440
                                               bias=bias, seed=seed,
3441
                                               filename=climate_filename,
3442
                                               input_filesuffix=climate_input_filesuffix,
3443
                                               unique_samples=unique_samples)
3444

3445
    if temperature_bias is not None:
4✔
3446
        mb_model.temp_bias = temperature_bias
1✔
3447
    if precipitation_factor is not None:
4✔
UNCOV
3448
        mb_model.prcp_fac = precipitation_factor
×
3449

3450
    return flowline_model_run(gdir, output_filesuffix=output_filesuffix,
4✔
3451
                              mb_model=mb_model, ys=0, ye=nyears,
3452
                              store_monthly_step=store_monthly_step,
3453
                              store_model_geometry=store_model_geometry,
3454
                              store_fl_diagnostics=store_fl_diagnostics,
3455
                              init_model_filesuffix=init_model_filesuffix,
3456
                              init_model_yr=init_model_yr,
3457
                              init_model_fls=init_model_fls,
3458
                              zero_initial_glacier=zero_initial_glacier,
3459
                              **kwargs)
3460

3461

3462
@entity_task(log)
9✔
3463
def run_constant_climate(gdir, nyears=1000, y0=None, halfsize=15,
9✔
3464
                         bias=None, temperature_bias=None,
3465
                         precipitation_factor=None,
3466
                         store_monthly_step=False,
3467
                         store_model_geometry=None,
3468
                         store_fl_diagnostics=None,
3469
                         init_model_filesuffix=None,
3470
                         init_model_yr=None,
3471
                         output_filesuffix='',
3472
                         climate_filename='climate_historical',
3473
                         mb_model=None,
3474
                         climate_input_filesuffix='',
3475
                         init_model_fls=None,
3476
                         zero_initial_glacier=False,
3477
                         use_avg_climate=False,
3478
                         **kwargs):
3479
    """Runs the constant mass balance model for a given number of years.
3480

3481
    This will initialize a
3482
    :py:class:`oggm.core.massbalance.MultipleFlowlineMassBalance`,
3483
    and run a :py:func:`oggm.core.flowline.flowline_model_run`.
3484

3485
    Parameters
3486
    ----------
3487
    gdir : :py:class:`oggm.GlacierDirectory`
3488
        the glacier directory to process
3489
    nyears : int
3490
        length of the simulation (default: as long as needed for reaching
3491
        equilibrium)
3492
    y0 : int
3493
        central year of the requested climate period. The default is to be
3494
        centred on t*.
3495
    halfsize : int, optional
3496
        the half-size of the time window (window size = 2 * halfsize + 1)
3497
    bias : float
3498
        bias of the mb model. Default is to use the calibrated one, which
3499
        is often a better idea. For t* experiments it can be useful to set it
3500
        to zero
3501
    temperature_bias : float
3502
        add a bias to the temperature timeseries
3503
    precipitation_factor: float
3504
        multiply a factor to the precipitation time series
3505
        default is None and means that the precipitation factor from the
3506
        calibration is applied which is cfg.PARAMS['prcp_scaling_factor']
3507
    store_monthly_step : bool
3508
        whether to store the diagnostic data at a monthly time step or not
3509
        (default is yearly)
3510
    store_model_geometry : bool
3511
        whether to store the full model geometry run file to disk or not.
3512
        (new in OGGM v1.4.1: default is to follow
3513
        cfg.PARAMS['store_model_geometry'])
3514
    store_fl_diagnostics : bool
3515
        whether to store the model flowline diagnostics to disk or not.
3516
        (default is to follow cfg.PARAMS['store_fl_diagnostics'])
3517
    init_model_filesuffix : str
3518
        if you want to start from a previous model run state. Can be
3519
        combined with `init_model_yr`
3520
    init_model_yr : int
3521
        the year of the initial run you want to start from. The default
3522
        is to take the last year of the simulation.
3523
    climate_filename : str
3524
        name of the climate file, e.g. 'climate_historical' (default) or
3525
        'gcm_data'
3526
    mb_model : :py:class:`core.MassBalanceModel`
3527
        User-povided MassBalanceModel instance. Default is to use a
3528
        ConstantMassBalance together with the provided parameters y0, halfsize,
3529
        bias, climate_filename and climate_input_filesuffix.
3530
    climate_input_filesuffix: str
3531
        filesuffix for the input climate file
3532
    output_filesuffix : str
3533
        this add a suffix to the output file (useful to avoid overwriting
3534
        previous experiments)
3535
    zero_initial_glacier : bool
3536
        if true, the ice thickness is set to zero before the simulation
3537
    init_model_fls : []
3538
        list of flowlines to use to initialise the model (the default is the
3539
        present_time_glacier file from the glacier directory)
3540
    use_avg_climate : bool
3541
        use the average climate instead of the correct MB model. This is
3542
        for testing only!!!
3543
    kwargs : dict
3544
        kwargs to pass to the FluxBasedModel instance
3545
    """
3546

3547
    if use_avg_climate:
3✔
UNCOV
3548
        mb_model_class = AvgClimateMassBalance
×
3549
    else:
3550
        mb_model_class = ConstantMassBalance
3✔
3551

3552
    if mb_model is None:
3✔
3553
        mb_model = MultipleFlowlineMassBalance(gdir,
3✔
3554
                                               mb_model_class=mb_model_class,
3555
                                               y0=y0, halfsize=halfsize,
3556
                                               bias=bias,
3557
                                               filename=climate_filename,
3558
                                               input_filesuffix=climate_input_filesuffix)
3559

3560
    if temperature_bias is not None:
3✔
3561
        mb_model.temp_bias = temperature_bias
2✔
3562
    if precipitation_factor is not None:
3✔
UNCOV
3563
        mb_model.prcp_fac = precipitation_factor
×
3564

3565
    return flowline_model_run(gdir, output_filesuffix=output_filesuffix,
3✔
3566
                              mb_model=mb_model, ys=0, ye=nyears,
3567
                              store_monthly_step=store_monthly_step,
3568
                              store_model_geometry=store_model_geometry,
3569
                              store_fl_diagnostics=store_fl_diagnostics,
3570
                              init_model_filesuffix=init_model_filesuffix,
3571
                              init_model_yr=init_model_yr,
3572
                              init_model_fls=init_model_fls,
3573
                              zero_initial_glacier=zero_initial_glacier,
3574
                              **kwargs)
3575

3576

3577
@entity_task(log)
9✔
3578
def run_from_climate_data(gdir, ys=None, ye=None, min_ys=None, max_ys=None,
9✔
3579
                          fixed_geometry_spinup_yr=None,
3580
                          store_monthly_step=False,
3581
                          store_model_geometry=None,
3582
                          store_fl_diagnostics=None,
3583
                          climate_filename='climate_historical',
3584
                          mb_model=None,
3585
                          climate_input_filesuffix='', output_filesuffix='',
3586
                          init_model_filesuffix=None, init_model_yr=None,
3587
                          init_model_fls=None, zero_initial_glacier=False,
3588
                          bias=None, temperature_bias=None,
3589
                          precipitation_factor=None, **kwargs):
3590
    """ Runs a glacier with climate input from e.g. CRU or a GCM.
3591

3592
    This will initialize a
3593
    :py:class:`oggm.core.massbalance.MultipleFlowlineMassBalance`,
3594
    and run a :py:func:`oggm.core.flowline.flowline_model_run`.
3595

3596
    Parameters
3597
    ----------
3598
    gdir : :py:class:`oggm.GlacierDirectory`
3599
        the glacier directory to process
3600
    ys : int
3601
        start year of the model run (default: from the glacier geometry
3602
        date if init_model_filesuffix is None, else init_model_yr)
3603
    ye : int
3604
        end year of the model run (default: last year of the provided
3605
        climate file)
3606
    min_ys : int
3607
        if you want to impose a minimum start year, regardless if the glacier
3608
        inventory date is earlier (e.g. if climate data does not reach).
3609
    max_ys : int
3610
        if you want to impose a maximum start year, regardless if the glacier
3611
        inventory date is later (e.g. if climate data does not reach).
3612
    store_monthly_step : bool
3613
        whether to store the diagnostic data at a monthly time step or not
3614
        (default is yearly)
3615
    store_model_geometry : bool
3616
        whether to store the full model geometry run file to disk or not.
3617
        (new in OGGM v1.4.1: default is to follow
3618
        cfg.PARAMS['store_model_geometry'])
3619
    store_fl_diagnostics : bool
3620
        whether to store the model flowline diagnostics to disk or not.
3621
        (default is to follow cfg.PARAMS['store_fl_diagnostics'])
3622
    climate_filename : str
3623
        name of the climate file, e.g. 'climate_historical' (default) or
3624
        'gcm_data'
3625
    mb_model : :py:class:`core.MassBalanceModel`
3626
        User-povided MassBalanceModel instance. Default is to use a
3627
        PastMassBalance together with the provided parameters climate_filename,
3628
        bias and climate_input_filesuffix.
3629
    climate_input_filesuffix: str
3630
        filesuffix for the input climate file
3631
    output_filesuffix : str
3632
        for the output file
3633
    init_model_filesuffix : str
3634
        if you want to start from a previous model run state. Can be
3635
        combined with `init_model_yr`
3636
    init_model_yr : int
3637
        the year of the initial run you want to start from. The default
3638
        is to take the last year of the simulation.
3639
    init_model_fls : []
3640
        list of flowlines to use to initialise the model (the default is the
3641
        present_time_glacier file from the glacier directory).
3642
        Ignored if `init_model_filesuffix` is set
3643
    zero_initial_glacier : bool
3644
        if true, the ice thickness is set to zero before the simulation
3645
    bias : float
3646
        bias of the mb model. Default is to use the calibrated one, which
3647
        is often a better idea. For t* experiments it can be useful to set it
3648
        to zero
3649
    temperature_bias : float
3650
        add a bias to the temperature timeseries
3651
    precipitation_factor: float
3652
        multiply a factor to the precipitation time series
3653
        default is None and means that the precipitation factor from the
3654
        calibration is applied which is cfg.PARAMS['prcp_scaling_factor']
3655
    kwargs : dict
3656
        kwargs to pass to the FluxBasedModel instance
3657
    fixed_geometry_spinup_yr : int
3658
        if set to an integer, the model will artificially prolongate
3659
        all outputs of run_until_and_store to encompass all time stamps
3660
        starting from the chosen year. The only output affected are the
3661
        glacier wide diagnostic files - all other outputs are set
3662
        to constants during "spinup"
3663
    """
3664

3665
    if init_model_filesuffix is not None:
3✔
3666
        fp = gdir.get_filepath('model_geometry',
2✔
3667
                               filesuffix=init_model_filesuffix)
3668
        fmod = FileModel(fp)
2✔
3669
        if init_model_yr is None:
2✔
3670
            init_model_yr = fmod.last_yr
2✔
3671
        fmod.run_until(init_model_yr)
2✔
3672
        init_model_fls = fmod.fls
2✔
3673
        if ys is None:
2✔
3674
            ys = init_model_yr
2✔
3675

3676
    try:
3✔
3677
        rgi_year = gdir.rgi_date.year
3✔
3678
    except AttributeError:
3✔
3679
        rgi_year = gdir.rgi_date
3✔
3680

3681
    # Take from rgi date if not set yet
3682
    if ys is None:
3✔
3683
        # The RGI timestamp is in calendar date - we convert to hydro date,
3684
        # i.e. 2003 becomes 2004 if hydro_month is not 1 (January)
3685
        # (so that we don't count the MB year 2003 in the simulation)
3686
        # See also: https://github.com/OGGM/oggm/issues/1020
3687
        # even if hydro_month is 1, we prefer to start from Jan 2004
3688
        # as in the alps the rgi is from Aug 2003
3689
        ys = rgi_year + 1
3✔
3690

3691
    if ys <= rgi_year and init_model_filesuffix is None:
3✔
3692
        log.warning('You are attempting to run_with_climate_data at dates '
1✔
3693
                    'prior to the RGI inventory date. This may indicate some '
3694
                    'problem in your workflow. Consider using '
3695
                    '`fixed_geometry_spinup_yr` for example.')
3696

3697
    # Final crop
3698
    if min_ys is not None:
3✔
3699
        ys = ys if ys > min_ys else min_ys
3✔
3700
    if max_ys is not None:
3✔
3701
        ys = ys if ys < max_ys else max_ys
1✔
3702

3703
    if mb_model is None:
3✔
3704
        mb_model = MultipleFlowlineMassBalance(gdir,
3✔
3705
                                               mb_model_class=PastMassBalance,
3706
                                               filename=climate_filename,
3707
                                               bias=bias,
3708
                                               input_filesuffix=climate_input_filesuffix)
3709

3710
    if temperature_bias is not None:
3✔
UNCOV
3711
        mb_model.temp_bias = temperature_bias
×
3712
    if precipitation_factor is not None:
3✔
UNCOV
3713
        mb_model.prcp_fac = precipitation_factor
×
3714

3715
    if ye is None:
3✔
3716
        # Decide from climate (we can run the last year with data as well)
3717
        ye = mb_model.flowline_mb_models[0].ye + 1
2✔
3718

3719
    return flowline_model_run(gdir, output_filesuffix=output_filesuffix,
3✔
3720
                              mb_model=mb_model, ys=ys, ye=ye,
3721
                              store_monthly_step=store_monthly_step,
3722
                              store_model_geometry=store_model_geometry,
3723
                              store_fl_diagnostics=store_fl_diagnostics,
3724
                              init_model_fls=init_model_fls,
3725
                              zero_initial_glacier=zero_initial_glacier,
3726
                              fixed_geometry_spinup_yr=fixed_geometry_spinup_yr,
3727
                              **kwargs)
3728

3729

3730
@entity_task(log)
9✔
3731
def run_with_hydro(gdir, run_task=None, store_monthly_hydro=False,
9✔
3732
                   fixed_geometry_spinup_yr=None, ref_area_from_y0=False,
3733
                   ref_area_yr=None, ref_geometry_filesuffix=None,
3734
                   **kwargs):
3735
    """Run the flowline model and add hydro diagnostics.
3736

3737
    TODOs:
3738
        - Add the possibility to record MB during run to improve performance
3739
          (requires change in API)
3740
        - ...
3741

3742
    Parameters
3743
    ----------
3744
    run_task : func
3745
        any of the `run_*`` tasks in the oggm.flowline module.
3746
        The mass balance model used needs to have the `add_climate` output
3747
        kwarg available though.
3748
    store_monthly_hydro : bool
3749
        also compute monthly hydrological diagnostics. The monthly outputs
3750
        are stored in 2D fields (years, months)
3751
    ref_area_yr : int
3752
        the hydrological output is computed over a reference area, which
3753
        per default is the largest area covered by the glacier in the simulation
3754
        period. Use this kwarg to force a specific area to the state of the
3755
        glacier at the provided simulation year.
3756
    ref_area_from_y0 : bool
3757
        overwrite ref_area_yr to the first year of the timeseries
3758
    ref_geometry_filesuffix : str
3759
        this kwarg allows to copy the reference area from a previous simulation
3760
        (useful for projections with historical spinup for example).
3761
        Set to a model_geometry file filesuffix that is present in the
3762
        current directory (e.g. `_historical` for pre-processed gdirs).
3763
        If set, ref_area_yr and ref_area_from_y0 refer to this file instead.
3764
    fixed_geometry_spinup_yr : int
3765
        if set to an integer, the model will artificially prolongate
3766
        all outputs of run_until_and_store to encompass all time stamps
3767
        starting from the chosen year. The only output affected are the
3768
        glacier wide diagnostic files - all other outputs are set
3769
        to constants during "spinup"
3770
    **kwargs : all valid kwargs for ``run_task``
3771
    """
3772

3773
    # Make sure it'll return something
3774
    kwargs['return_value'] = True
1✔
3775

3776
    # Check that kwargs and params are compatible
3777
    if kwargs.get('store_monthly_step', False):
1✔
UNCOV
3778
        raise InvalidParamsError('run_with_hydro only compatible with '
×
3779
                                 'store_monthly_step=False.')
3780
    if kwargs.get('mb_elev_feedback', 'annual') != 'annual':
1✔
UNCOV
3781
        raise InvalidParamsError('run_with_hydro only compatible with '
×
3782
                                 "mb_elev_feedback='annual' (yes, even "
3783
                                 "when asked for monthly hydro output).")
3784
    if not cfg.PARAMS['store_model_geometry']:
1✔
UNCOV
3785
        raise InvalidParamsError('run_with_hydro only works with '
×
3786
                                 "PARAMS['store_model_geometry'] = True "
3787
                                 "for now.")
3788

3789
    if fixed_geometry_spinup_yr is not None:
1✔
3790
        kwargs['fixed_geometry_spinup_yr'] = fixed_geometry_spinup_yr
1✔
3791

3792
    out = run_task(gdir, **kwargs)
1✔
3793

3794
    if out is None:
1✔
UNCOV
3795
        raise InvalidWorkflowError('The run task ({}) did not run '
×
3796
                                   'successfully.'.format(run_task.__name__))
3797

3798
    do_spinup = fixed_geometry_spinup_yr is not None
1✔
3799
    if do_spinup:
1✔
3800
        start_dyna_model_yr = out.y0
1✔
3801

3802
    # Mass balance model used during the run
3803
    mb_mod = out.mb_model
1✔
3804

3805
    # Glacier geometry during the run
3806
    suffix = kwargs.get('output_filesuffix', '')
1✔
3807

3808
    # We start by fetching the reference model geometry
3809
    # The one we just computed
3810
    fmod = FileModel(gdir.get_filepath('model_geometry', filesuffix=suffix))
1✔
3811
    # The last one is the final state - we can't compute MB for that
3812
    years = fmod.years[:-1]
1✔
3813

3814
    if ref_geometry_filesuffix:
1✔
3815
        if not ref_area_from_y0 and ref_area_yr is None:
1✔
UNCOV
3816
            raise InvalidParamsError('If `ref_geometry_filesuffix` is set, '
×
3817
                                     'users need to specify `ref_area_from_y0`'
3818
                                     ' or `ref_area_yr`')
3819
        # User provided
3820
        fmod_ref = FileModel(gdir.get_filepath('model_geometry',
1✔
3821
                                               filesuffix=ref_geometry_filesuffix))
3822
    else:
3823
        # ours as well
3824
        fmod_ref = fmod
1✔
3825

3826
    # Check input
3827
    if ref_area_from_y0:
1✔
3828
        ref_area_yr = fmod_ref.years[0]
1✔
3829

3830
    # Geometry at year yr to start with + off-glacier snow bucket
3831
    if ref_area_yr is not None:
1✔
3832
        if ref_area_yr not in fmod_ref.years:
1✔
UNCOV
3833
            raise InvalidParamsError('The chosen ref_area_yr is not '
×
3834
                                     'available!')
3835
        fmod_ref.run_until(ref_area_yr)
1✔
3836

3837
    bin_area_2ds = []
1✔
3838
    bin_elev_2ds = []
1✔
3839
    ref_areas = []
1✔
3840
    snow_buckets = []
1✔
3841
    for fl in fmod_ref.fls:
1✔
3842
        # Glacier area on bins
3843
        bin_area = fl.bin_area_m2
1✔
3844
        ref_areas.append(bin_area)
1✔
3845
        snow_buckets.append(bin_area * 0)
1✔
3846

3847
        # Output 2d data
3848
        shape = len(years), len(bin_area)
1✔
3849
        bin_area_2ds.append(np.empty(shape, np.float64))
1✔
3850
        bin_elev_2ds.append(np.empty(shape, np.float64))
1✔
3851

3852
    # Ok now fetch all geometry data in a first loop
3853
    # We do that because we might want to get the largest possible area (default)
3854
    # and we want to minimize the number of calls to run_until
3855
    for i, yr in enumerate(years):
1✔
3856
        fmod.run_until(yr)
1✔
3857
        for fl_id, (fl, bin_area_2d, bin_elev_2d) in \
1✔
3858
                enumerate(zip(fmod.fls, bin_area_2ds, bin_elev_2ds)):
3859
            # Time varying bins
3860
            bin_area_2d[i, :] = fl.bin_area_m2
1✔
3861
            bin_elev_2d[i, :] = fl.surface_h
1✔
3862

3863
    if ref_area_yr is None:
1✔
3864
        # Ok we get the max area instead
3865
        for ref_area, bin_area_2d in zip(ref_areas, bin_area_2ds):
1✔
3866
            ref_area[:] = bin_area_2d.max(axis=0)
1✔
3867

3868
    # Ok now we have arrays, we can work with that
3869
    # -> second time varying loop is for mass balance
3870
    months = [1]
1✔
3871
    seconds = cfg.SEC_IN_YEAR
1✔
3872
    ntime = len(years) + 1
1✔
3873
    oshape = (ntime, 1)
1✔
3874
    if store_monthly_hydro:
1✔
3875
        months = np.arange(1, 13)
1✔
3876
        seconds = cfg.SEC_IN_MONTH
1✔
3877
        oshape = (ntime, 12)
1✔
3878

3879
    out = {
1✔
3880
        'off_area': {
3881
            'description': 'Off-glacier area',
3882
            'unit': 'm 2',
3883
            'data': np.zeros(ntime),
3884
        },
3885
        'on_area': {
3886
            'description': 'On-glacier area',
3887
            'unit': 'm 2',
3888
            'data': np.zeros(ntime),
3889
        },
3890
        'melt_off_glacier': {
3891
            'description': 'Off-glacier melt',
3892
            'unit': 'kg yr-1',
3893
            'data': np.zeros(oshape),
3894
        },
3895
        'melt_on_glacier': {
3896
            'description': 'On-glacier melt',
3897
            'unit': 'kg yr-1',
3898
            'data': np.zeros(oshape),
3899
        },
3900
        'melt_residual_off_glacier': {
3901
            'description': 'Off-glacier melt due to MB model residual',
3902
            'unit': 'kg yr-1',
3903
            'data': np.zeros(oshape),
3904
        },
3905
        'melt_residual_on_glacier': {
3906
            'description': 'On-glacier melt due to MB model residual',
3907
            'unit': 'kg yr-1',
3908
            'data': np.zeros(oshape),
3909
        },
3910
        'liq_prcp_off_glacier': {
3911
            'description': 'Off-glacier liquid precipitation',
3912
            'unit': 'kg yr-1',
3913
            'data': np.zeros(oshape),
3914
        },
3915
        'liq_prcp_on_glacier': {
3916
            'description': 'On-glacier liquid precipitation',
3917
            'unit': 'kg yr-1',
3918
            'data': np.zeros(oshape),
3919
        },
3920
        'snowfall_off_glacier': {
3921
            'description': 'Off-glacier solid precipitation',
3922
            'unit': 'kg yr-1',
3923
            'data': np.zeros(oshape),
3924
        },
3925
        'snowfall_on_glacier': {
3926
            'description': 'On-glacier solid precipitation',
3927
            'unit': 'kg yr-1',
3928
            'data': np.zeros(oshape),
3929
        },
3930
        'snow_bucket': {
3931
            'description': 'Off-glacier snow reservoir (state variable)',
3932
            'unit': 'kg',
3933
            'data': np.zeros(oshape),
3934
        },
3935
        'model_mb': {
3936
            'description': 'Annual mass balance from dynamical model',
3937
            'unit': 'kg yr-1',
3938
            'data': np.zeros(ntime),
3939
        },
3940
        'residual_mb': {
3941
            'description': 'Difference (before correction) between mb model and dyn model melt',
3942
            'unit': 'kg yr-1',
3943
            'data': np.zeros(oshape),
3944
        },
3945
    }
3946

3947
    # Initialize
3948
    fmod.run_until(years[0])
1✔
3949
    prev_model_vol = fmod.volume_m3
1✔
3950

3951
    for i, yr in enumerate(years):
1✔
3952

3953
        # Now the loop over the months
3954
        for m in months:
1✔
3955

3956
            # A bit silly but avoid double counting in monthly ts
3957
            off_area_out = 0
1✔
3958
            on_area_out = 0
1✔
3959

3960
            for fl_id, (ref_area, snow_bucket, bin_area_2d, bin_elev_2d) in \
1✔
3961
                    enumerate(zip(ref_areas, snow_buckets, bin_area_2ds, bin_elev_2ds)):
3962

3963
                bin_area = bin_area_2d[i, :]
1✔
3964
                bin_elev = bin_elev_2d[i, :]
1✔
3965

3966
                # Make sure we have no negative contribution when glaciers are out
3967
                off_area = utils.clip_min(ref_area - bin_area, 0)
1✔
3968

3969
                try:
1✔
3970
                    if store_monthly_hydro:
1✔
3971
                        flt_yr = utils.date_to_floatyear(int(yr), m)
1✔
3972
                        mb_out = mb_mod.get_monthly_mb(bin_elev, fl_id=fl_id,
1✔
3973
                                                       year=flt_yr,
3974
                                                       add_climate=True)
3975
                        mb, _, _, prcp, prcpsol = mb_out
1✔
3976
                    else:
3977
                        mb_out = mb_mod.get_annual_mb(bin_elev, fl_id=fl_id,
1✔
3978
                                                      year=yr, add_climate=True)
3979
                        mb, _, _, prcp, prcpsol = mb_out
1✔
UNCOV
3980
                except ValueError as e:
×
UNCOV
3981
                    if 'too many values to unpack' in str(e):
×
UNCOV
3982
                        raise InvalidWorkflowError('Run with hydro needs a MB '
×
3983
                                                   'model able to add climate '
3984
                                                   'info to `get_annual_mb`.')
UNCOV
3985
                    raise
×
3986

3987
                # Here we use mass (kg yr-1) not ice volume
3988
                mb *= seconds * cfg.PARAMS['ice_density']
1✔
3989

3990
                # Bias of the mb model is a fake melt term that we need to deal with
3991
                # This is here for correction purposes later
3992
                mb_bias = mb_mod.bias * seconds / cfg.SEC_IN_YEAR
1✔
3993

3994
                liq_prcp_on_g = (prcp - prcpsol) * bin_area
1✔
3995
                liq_prcp_off_g = (prcp - prcpsol) * off_area
1✔
3996

3997
                prcpsol_on_g = prcpsol * bin_area
1✔
3998
                prcpsol_off_g = prcpsol * off_area
1✔
3999

4000
                # IMPORTANT: this does not guarantee that melt cannot be negative
4001
                # the reason is the MB residual that here can only be understood
4002
                # as a fake melt process.
4003
                # In particular at the monthly scale this can lead to negative
4004
                # or winter positive melt - we try to mitigate this
4005
                # issue at the end of the year
4006
                melt_on_g = (prcpsol - mb) * bin_area
1✔
4007
                melt_off_g = (prcpsol - mb) * off_area
1✔
4008

4009
                if mb_mod.bias == 0:
1✔
4010
                    # Here we can add an additional sanity check
4011
                    # These thresholds are arbitrary for now. TODO: remove
4012
                    if np.any(melt_on_g < -1):
1✔
UNCOV
4013
                        log.warning('WARNING: Melt on glacier is negative although it '
×
4014
                                    'should not be. If you have time, check '
4015
                                    'whats going on. Melt: {}'.format(melt_on_g.min()))
4016
                    if np.any(melt_off_g < -1):
1✔
UNCOV
4017
                        log.warning('WARNING: Melt off glacier is negative although it '
×
4018
                                    'should not be. If you have time, check '
4019
                                    'whats going on. Melt: {}'.format(melt_off_g.min()))
4020

4021
                    # We clip anyway
4022
                    melt_on_g = utils.clip_min(melt_on_g, 0)
1✔
4023
                    melt_off_g = utils.clip_min(melt_off_g, 0)
1✔
4024

4025
                # This is the bad boy
4026
                bias_on_g = mb_bias * bin_area
1✔
4027
                bias_off_g = mb_bias * off_area
1✔
4028

4029
                # Update bucket with accumulation and melt
4030
                snow_bucket += prcpsol_off_g
1✔
4031
                # It can only melt that much
4032
                melt_off_g = np.where((snow_bucket - melt_off_g) >= 0, melt_off_g, snow_bucket)
1✔
4033
                # Update bucket
4034
                snow_bucket -= melt_off_g
1✔
4035

4036
                # This is recomputed each month but well
4037
                off_area_out += np.sum(off_area)
1✔
4038
                on_area_out += np.sum(bin_area)
1✔
4039

4040
                # Monthly out
4041
                out['melt_off_glacier']['data'][i, m-1] += np.sum(melt_off_g)
1✔
4042
                out['melt_on_glacier']['data'][i, m-1] += np.sum(melt_on_g)
1✔
4043
                out['melt_residual_off_glacier']['data'][i, m-1] += np.sum(bias_off_g)
1✔
4044
                out['melt_residual_on_glacier']['data'][i, m-1] += np.sum(bias_on_g)
1✔
4045
                out['liq_prcp_off_glacier']['data'][i, m-1] += np.sum(liq_prcp_off_g)
1✔
4046
                out['liq_prcp_on_glacier']['data'][i, m-1] += np.sum(liq_prcp_on_g)
1✔
4047
                out['snowfall_off_glacier']['data'][i, m-1] += np.sum(prcpsol_off_g)
1✔
4048
                out['snowfall_on_glacier']['data'][i, m-1] += np.sum(prcpsol_on_g)
1✔
4049

4050
                # Snow bucket is a state variable - stored at end of timestamp
4051
                if store_monthly_hydro:
1✔
4052
                    if m == 12:
1✔
4053
                        out['snow_bucket']['data'][i+1, 0] += np.sum(snow_bucket)
1✔
4054
                    else:
4055
                        out['snow_bucket']['data'][i, m] += np.sum(snow_bucket)
1✔
4056
                else:
4057
                    out['snow_bucket']['data'][i+1, m-1] += np.sum(snow_bucket)
1✔
4058

4059
        # Update the annual data
4060
        out['off_area']['data'][i] = off_area_out
1✔
4061
        out['on_area']['data'][i] = on_area_out
1✔
4062

4063
        # If monthly, put the residual where we can
4064
        if store_monthly_hydro and mb_mod.bias != 0:
1✔
4065
            for melt, bias in zip(
1✔
4066
                    [
4067
                        out['melt_on_glacier']['data'][i, :],
4068
                        out['melt_off_glacier']['data'][i, :],
4069
                    ],
4070
                    [
4071
                        out['melt_residual_on_glacier']['data'][i, :],
4072
                        out['melt_residual_off_glacier']['data'][i, :],
4073
                    ],
4074
            ):
4075

4076
                real_melt = melt - bias
1✔
4077
                to_correct = utils.clip_min(real_melt, 0)
1✔
4078
                to_correct_sum = np.sum(to_correct)
1✔
4079
                if (to_correct_sum > 1e-7) and (np.sum(melt) > 0):
1✔
4080
                    # Ok we correct the positive melt instead
4081
                    fac = np.sum(melt) / to_correct_sum
1✔
4082
                    melt[:] = to_correct * fac
1✔
4083

4084
        if do_spinup and yr < start_dyna_model_yr:
1✔
4085
            residual_mb = 0
1✔
4086
            model_mb = (out['snowfall_on_glacier']['data'][i, :].sum() -
1✔
4087
                        out['melt_on_glacier']['data'][i, :].sum())
4088
        else:
4089
            # Correct for mass-conservation and match the ice-dynamics model
4090
            fmod.run_until(yr + 1)
1✔
4091
            model_mb = (fmod.volume_m3 - prev_model_vol) * cfg.PARAMS['ice_density']
1✔
4092
            prev_model_vol = fmod.volume_m3
1✔
4093

4094
            reconstructed_mb = (out['snowfall_on_glacier']['data'][i, :].sum() -
1✔
4095
                                out['melt_on_glacier']['data'][i, :].sum())
4096
            residual_mb = model_mb - reconstructed_mb
1✔
4097

4098
        # Now correct
4099
        if residual_mb == 0:
1✔
4100
            pass
1✔
4101
        elif store_monthly_hydro:
1✔
4102
            # We try to correct the melt only where there is some
4103
            asum = out['melt_on_glacier']['data'][i, :].sum()
1✔
4104
            if asum > 1e-7 and (residual_mb / asum < 1):
1✔
4105
                # try to find a fac
4106
                fac = 1 - residual_mb / asum
1✔
4107
                corr = out['melt_on_glacier']['data'][i, :] * fac
1✔
4108
                residual_mb = out['melt_on_glacier']['data'][i, :] - corr
1✔
4109
                out['melt_on_glacier']['data'][i, :] = corr
1✔
4110
            else:
4111
                # We simply spread over the months
4112
                residual_mb /= 12
1✔
4113
                out['melt_on_glacier']['data'][i, :] = (out['melt_on_glacier']['data'][i, :] -
1✔
4114
                                                        residual_mb)
4115
        else:
4116
            # We simply apply the residual - no choice here
4117
            out['melt_on_glacier']['data'][i, :] = (out['melt_on_glacier']['data'][i, :] -
1✔
4118
                                                    residual_mb)
4119

4120
        out['model_mb']['data'][i] = model_mb
1✔
4121
        out['residual_mb']['data'][i] = residual_mb
1✔
4122

4123
    # Convert to xarray
4124
    out_vars = cfg.PARAMS['store_diagnostic_variables']
1✔
4125
    ods = xr.Dataset()
1✔
4126
    ods.coords['time'] = fmod.years
1✔
4127
    if store_monthly_hydro:
1✔
4128
        ods.coords['month_2d'] = ('month_2d', np.arange(1, 13))
1✔
4129
        # For the user later
4130
        sm = cfg.PARAMS['hydro_month_' + mb_mod.hemisphere]
1✔
4131
        ods.coords['calendar_month_2d'] = ('month_2d', (np.arange(12) + sm - 1) % 12 + 1)
1✔
4132
    for varname, d in out.items():
1✔
4133
        data = d.pop('data')
1✔
4134
        if varname not in out_vars:
1✔
4135
            continue
1✔
4136
        if len(data.shape) == 2:
1✔
4137
            # First the annual agg
4138
            if varname == 'snow_bucket':
1✔
4139
                # Snowbucket is a state variable
4140
                ods[varname] = ('time', data[:, 0])
1✔
4141
            else:
4142
                # Last year is never good
4143
                data[-1, :] = np.NaN
1✔
4144
                ods[varname] = ('time', np.sum(data, axis=1))
1✔
4145
            # Then the monthly ones
4146
            if store_monthly_hydro:
1✔
4147
                ods[varname + '_monthly'] = (('time', 'month_2d'), data)
1✔
4148
        else:
4149
            assert varname != 'snow_bucket'
1✔
4150
            data[-1] = np.NaN
1✔
4151
            ods[varname] = ('time', data)
1✔
4152
        for k, v in d.items():
1✔
4153
            ods[varname].attrs[k] = v
1✔
4154

4155
    # Append the output to the existing diagnostics
4156
    fpath = gdir.get_filepath('model_diagnostics', filesuffix=suffix)
1✔
4157
    ods.to_netcdf(fpath, mode='a')
1✔
4158

4159

4160
def zero_glacier_stop_criterion(model, state, n_zero=5, n_years=20):
9✔
4161
    """Stop the simulation when the glacier volume is zero for a given period
4162

4163
    To be passed as kwarg to `run_until_and_store`.
4164

4165
    Parameters
4166
    ----------
4167
    model : the model class
4168
    state : a dict
4169
    n_zero : number of 0 volume years
4170
    n_years : number of years to consider
4171

4172
    Returns
4173
    -------
4174
    stop (True/False), new_state
4175
    """
4176

4177
    if state is None:
2✔
4178
        # OK first call
4179
        state = {}
2✔
4180

4181
    if 'was_zero' not in state:
2✔
4182
        # Maybe the state is from another criteria
4183
        state['was_zero'] = []
2✔
4184

4185
    if model.yr != int(model.yr):
2✔
4186
        # We consider only full model years
4187
        return False, state
1✔
4188

4189
    if model.volume_m3 == 0:
2✔
4190
        if len(state['was_zero']) < n_years:
2✔
4191
            return False, state
1✔
4192
        if np.sum(state['was_zero'][-n_years:]) >= n_zero - 1:
1✔
4193
            return True, state
1✔
4194
        else:
4195
            state['was_zero'] = np.append(state['was_zero'], [True])
1✔
4196
    else:
4197
        state['was_zero'] = np.append(state['was_zero'], [False])
2✔
4198

4199
    return False, state
2✔
4200

4201

4202
def spec_mb_stop_criterion(model, state, spec_mb_threshold=50, n_years=60):
9✔
4203
    """Stop the simulation when the specific MB is close to zero for a given period.
4204

4205
    To be passed as kwarg to `run_until_and_store`.
4206

4207
    Parameters
4208
    ----------
4209
    model : the model class
4210
    state : a dict
4211
    spec_mb_threshold : the specific MB threshold (in mm w.e. per year)
4212
    n_years : number of years to consider
4213

4214
    Returns
4215
    -------
4216
    stop (True/False), new_state
4217
    """
4218

4219
    if state is None:
2✔
4220
        # OK first call
4221
        state = {}
1✔
4222

4223
    if 'spec_mb' not in state:
2✔
4224
        # Maybe the state is from another criteria
4225
        state['spec_mb'] = []
2✔
4226
        state['volume_m3'] = []
2✔
4227

4228
    if model.yr != int(model.yr):
2✔
4229
        # We consider only full model years
UNCOV
4230
        return False, state
×
4231

4232
    area = model.area_m2
2✔
4233
    volume = model.volume_m3
2✔
4234
    if area < 1 or len(state['volume_m3']) == 0:
2✔
4235
        spec_mb = np.NaN
2✔
4236
    else:
4237
        spec_mb = (volume - state['volume_m3'][-1]) / area * cfg.PARAMS['ice_density']
2✔
4238

4239
    state['spec_mb'] = np.append(state['spec_mb'], [spec_mb])
2✔
4240
    state['volume_m3'] = np.append(state['volume_m3'], [volume])
2✔
4241

4242
    if len(state['spec_mb']) < n_years:
2✔
4243
        return False, state
2✔
4244

4245
    mbavg = np.nanmean(state['spec_mb'][-n_years:])
2✔
4246
    if abs(mbavg) <= spec_mb_threshold:
2✔
4247
        return True, state
2✔
4248
    else:
4249
        return False, state
2✔
4250

4251

4252
def equilibrium_stop_criterion(model, state,
9✔
4253
                               spec_mb_threshold=50, n_years_specmb=60,
4254
                               n_zero=5, n_years_zero=20):
4255
    """Stop the simulation when of og spec_mb and zero_volume criteria are met.
4256

4257
    To be passed as kwarg to `run_until_and_store`.
4258

4259
    Parameters
4260
    ----------
4261
    model : the model class
4262
    state : a dict
4263
    spec_mb_threshold : the specific MB threshold (in mm w.e. per year)
4264
    n_years_specmb : number of years to consider for the spec_mb criterion
4265
    n_zero : number of 0 volume years
4266
    n_years_zero : number of years to consider for the zero volume criterion.
4267

4268
    Returns
4269
    -------
4270
    stop (True/False), new_state
4271
    """
4272

4273
    if state is None:
1✔
4274
        # OK first call
4275
        state = {}
1✔
4276
    s1, state = zero_glacier_stop_criterion(model, state, n_years=n_years_zero,
1✔
4277
                                            n_zero=n_zero)
4278
    s2, state = spec_mb_stop_criterion(model, state, n_years=n_years_specmb,
1✔
4279
                                       spec_mb_threshold=spec_mb_threshold)
4280
    return s1 or s2, state
1✔
4281

4282

4283
def merge_to_one_glacier(main, tribs, filename='climate_historical',
9✔
4284
                         input_filesuffix=''):
4285
    """Merge multiple tributary glacier flowlines to a main glacier
4286

4287
    This function will merge multiple tributary glaciers to a main glacier
4288
    and write modified `model_flowlines` to the main GlacierDirectory.
4289
    The provided tributaries must have an intersecting downstream line.
4290
    To be sure about this, use `intersect_downstream_lines` first.
4291
    This function is mainly responsible to reproject the flowlines, set
4292
    flowline attributes and to copy additional files, like the necessary climate
4293
    files.
4294

4295
    Parameters
4296
    ----------
4297
    main : oggm.GlacierDirectory
4298
        The new GDir of the glacier of interest
4299
    tribs : list or dictionary containing oggm.GlacierDirectories
4300
        true tributary glaciers to the main glacier
4301
    filename: str
4302
        Baseline climate file
4303
    input_filesuffix: str
4304
        Filesuffix to the climate file
4305
    """
4306

4307
    # read flowlines of the Main glacier
4308
    fls = main.read_pickle('model_flowlines')
1✔
4309
    mfl = fls.pop(-1)  # remove main line from list and treat separately
1✔
4310

4311
    for trib in tribs:
1✔
4312

4313
        # read tributary flowlines and append to list
4314
        tfls = trib.read_pickle('model_flowlines')
1✔
4315

4316
        # copy climate file and local_mustar to new gdir
4317
        # if we have a merge-merge situation we need to copy multiple files
4318
        rgiids = set([fl.rgi_id for fl in tfls])
1✔
4319

4320
        for uid in rgiids:
1✔
4321
            if len(rgiids) == 1:
1✔
4322
                # we do not have a merge-merge situation
4323
                in_id = ''
1✔
4324
                out_id = trib.rgi_id
1✔
4325
            else:
4326
                in_id = '_' + uid
1✔
4327
                out_id = uid
1✔
4328

4329
            climfile_in = filename + in_id + input_filesuffix + '.nc'
1✔
4330
            climfile_out = filename + '_' + out_id + input_filesuffix + '.nc'
1✔
4331
            shutil.copyfile(os.path.join(trib.dir, climfile_in),
1✔
4332
                            os.path.join(main.dir, climfile_out))
4333

4334
            _m = os.path.basename(trib.get_filepath('local_mustar')).split('.')
1✔
4335
            muin = _m[0] + in_id + '.' + _m[1]
1✔
4336
            muout = _m[0] + '_' + out_id + '.' + _m[1]
1✔
4337

4338
            shutil.copyfile(os.path.join(trib.dir, muin),
1✔
4339
                            os.path.join(main.dir, muout))
4340

4341
        # sort flowlines descending
4342
        tfls.sort(key=lambda x: x.order, reverse=True)
1✔
4343

4344
        # loop over tributaries and reproject to main glacier
4345
        for nr, tfl in enumerate(tfls):
1✔
4346

4347
            # 1. Step: Change projection to the main glaciers grid
4348
            _line = salem.transform_geometry(tfl.line,
1✔
4349
                                             crs=trib.grid, to_crs=main.grid)
4350

4351
            # 2. set new line
4352
            tfl.set_line(_line)
1✔
4353

4354
            # 3. set map attributes
4355
            dx = [shpg.Point(tfl.line.coords[i]).distance(
1✔
4356
                shpg.Point(tfl.line.coords[i+1]))
4357
                for i, pt in enumerate(tfl.line.coords[:-1])]  # get distance
4358
            # and check if equally spaced
4359
            if not np.allclose(dx, np.mean(dx), atol=1e-2):
1✔
UNCOV
4360
                raise RuntimeError('Flowline is not evenly spaced.')
×
4361

4362
            tfl.dx = np.mean(dx).round(2)
1✔
4363
            tfl.map_dx = mfl.map_dx
1✔
4364
            tfl.dx_meter = tfl.map_dx * tfl.dx
1✔
4365

4366
            # 3. remove attributes, they will be set again later
4367
            tfl.inflow_points = []
1✔
4368
            tfl.inflows = []
1✔
4369

4370
            # 4. set flows to, mainly to update flows_to_point coordinates
4371
            if tfl.flows_to is not None:
1✔
4372
                tfl.set_flows_to(tfl.flows_to)
1✔
4373

4374
        # append tributary flowlines to list
4375
        fls += tfls
1✔
4376

4377
    # add main flowline to the end
4378
    fls = fls + [mfl]
1✔
4379

4380
    # Finally write the flowlines
4381
    main.write_pickle(fls, 'model_flowlines')
1✔
4382

4383

4384
def clean_merged_flowlines(gdir, buffer=None):
9✔
4385
    """Order and cut merged flowlines to size.
4386

4387
    After matching flowlines were found and merged to one glacier directory
4388
    this function makes them nice:
4389
    There should only be one flowline per bed, so overlapping lines have to be
4390
    cut, attributed to a another flowline and ordered.
4391

4392
    Parameters
4393
    ----------
4394
    gdir : oggm.GlacierDirectory
4395
        The GDir of the glacier of interest
4396
    buffer: float
4397
        Buffer around the flowlines to find overlaps
4398
    """
4399

4400
    # No buffer does not work
4401
    if buffer is None:
1✔
4402
        buffer = cfg.PARAMS['kbuffer']
1✔
4403

4404
    # Number of pixels to arbitrarily remove at junctions
4405
    lid = int(cfg.PARAMS['flowline_junction_pix'])
1✔
4406

4407
    fls = gdir.read_pickle('model_flowlines')
1✔
4408

4409
    # separate the main main flowline
4410
    mainfl = fls.pop(-1)
1✔
4411

4412
    # split fls in main and tribs
4413
    mfls = [fl for fl in fls if fl.flows_to is None]
1✔
4414
    tfls = [fl for fl in fls if fl not in mfls]
1✔
4415

4416
    # --- first treat the main flowlines ---
4417
    # sort by order and length as a second choice
4418
    mfls.sort(key=lambda x: (x.order, len(x.inflows), x.length_m),
1✔
4419
              reverse=False)
4420

4421
    merged = []
1✔
4422

4423
    # for fl1 in mfls:
4424
    while len(mfls) > 0:
1✔
4425
        fl1 = mfls.pop(0)
1✔
4426

4427
        ol_index = []  # list of index from first overlap
1✔
4428

4429
        # loop over other main lines and main main line
4430
        for fl2 in mfls + [mainfl]:
1✔
4431

4432
            # calculate overlap, maybe use larger buffer here only to find it
4433
            _overlap = fl1.line.intersection(fl2.line.buffer(buffer*2))
1✔
4434

4435
            # calculate indice of first overlap if overlap length > 0
4436
            oix = 9999
1✔
4437
            if _overlap.length > 0 and fl1 != fl2 and fl2.flows_to != fl1:
1✔
4438
                if isinstance(_overlap, shpg.MultiLineString):
1✔
UNCOV
4439
                    if _overlap.geoms[0].coords[0] == fl1.line.coords[0]:
×
4440
                        # if the head of overlap is same as the first line,
4441
                        # best guess is, that the heads are close topgether!
UNCOV
4442
                        _ov1 = _overlap.geoms[1].coords[1]
×
4443
                    else:
UNCOV
4444
                        _ov1 = _overlap.geoms[0].coords[1]
×
4445
                else:
4446
                    _ov1 = _overlap.coords[1]
1✔
4447
                for _i, _p in enumerate(fl1.line.coords):
1✔
4448
                    if _p == _ov1:
1✔
4449
                        oix = _i
1✔
4450
                # low indices are more likely due to an wrong overlap
4451
                if oix < 10:
1✔
4452
                    oix = 9999
1✔
4453
            ol_index.append(oix)
1✔
4454

4455
        ol_index = np.array(ol_index)
1✔
4456
        if np.all(ol_index == 9999):
1✔
4457
            log.warning('Glacier %s could not be merged, removed!' %
1✔
4458
                        fl1.rgi_id)
4459
            # remove possible tributary flowlines
4460
            tfls = [fl for fl in tfls if fl.rgi_id != fl1.rgi_id]
1✔
4461
            # skip rest of this while loop
4462
            continue
1✔
4463

4464
        # make this based on first overlap, but consider order and or length
4465
        minx = ol_index[ol_index <= ol_index.min()+10][-1]
1✔
4466
        i = np.where(ol_index == minx)[0][-1]
1✔
4467
        _olline = (mfls + [mainfl])[i]
1✔
4468

4469
        # 1. cut line to size
4470
        _line = fl1.line
1✔
4471

4472
        bufferuse = buffer
1✔
4473
        while bufferuse > 0:
1✔
4474
            _overlap = _line.intersection(_olline.line.buffer(bufferuse))
1✔
4475
            _linediff = _line.difference(_overlap)  # cut to new line
1✔
4476

4477
            # if the tributary flowline is longer than the main line,
4478
            # _line will contain multiple LineStrings: only keep the first
4479
            if isinstance(_linediff, shpg.MultiLineString):
1✔
UNCOV
4480
                _linediff = _linediff.geoms[0]
×
4481

4482
            if len(_linediff.coords) < 10:
1✔
UNCOV
4483
                bufferuse -= 1
×
4484
            else:
4485
                break
1✔
4486

4487
        if bufferuse <= 0:
1✔
UNCOV
4488
            log.warning('Glacier %s would be to short after merge, removed!' %
×
4489
                        fl1.rgi_id)
4490
            # remove possible tributary flowlines
UNCOV
4491
            tfls = [fl for fl in tfls if fl.rgi_id != fl1.rgi_id]
×
4492
            # skip rest of this while loop
UNCOV
4493
            continue
×
4494

4495
        # remove cfg.PARAMS['flowline_junction_pix'] from the _line
4496
        # gives a bigger gap at the junction and makes sure the last
4497
        # point is not corrupted in terms of spacing
4498
        _line = shpg.LineString(_linediff.coords[:-lid])
1✔
4499

4500
        # 2. set new line
4501
        fl1.set_line(_line)
1✔
4502

4503
        # 3. set flow to attributes. This also adds inflow values to other
4504
        fl1.set_flows_to(_olline)
1✔
4505

4506
        # change the array size of tributary flowline attributes
4507
        for atr, value in fl1.__dict__.items():
1✔
4508
            if atr in ['_ptrap', '_prec']:
1✔
4509
                # those are indices, remove those above nx
4510
                fl1.__setattr__(atr, value[value < fl1.nx])
1✔
4511
            elif isinstance(value, np.ndarray) and (len(value) > fl1.nx):
1✔
4512
                # those are actual parameters on the grid
4513
                fl1.__setattr__(atr, value[:fl1.nx])
1✔
4514

4515
        merged.append(fl1)
1✔
4516
    allfls = merged + tfls
1✔
4517

4518
    # now check all lines for possible cut offs
4519
    for fl in allfls:
1✔
4520
        try:
1✔
4521
            fl.flows_to_indice
1✔
UNCOV
4522
        except AssertionError:
×
UNCOV
4523
            mfl = fl.flows_to
×
4524
            # remove it from original
UNCOV
4525
            mfl.inflow_points.remove(fl.flows_to_point)
×
UNCOV
4526
            mfl.inflows.remove(fl)
×
4527

UNCOV
4528
            prdis = mfl.line.project(fl.tail)
×
UNCOV
4529
            mfl_keep = mfl
×
4530

UNCOV
4531
            while mfl.flows_to is not None:
×
UNCOV
4532
                prdis2 = mfl.flows_to.line.project(fl.tail)
×
UNCOV
4533
                if prdis2 < prdis:
×
UNCOV
4534
                    mfl_keep = mfl
×
UNCOV
4535
                    prdis = prdis2
×
UNCOV
4536
                mfl = mfl.flows_to
×
4537

4538
            # we should be good to add this line here
UNCOV
4539
            fl.set_flows_to(mfl_keep.flows_to)
×
4540

4541
    allfls = allfls + [mainfl]
1✔
4542

4543
    for fl in allfls:
1✔
4544
        fl.inflows = []
1✔
4545
        fl.inflow_points = []
1✔
4546
        if hasattr(fl, '_lazy_flows_to_indice'):
1✔
4547
            delattr(fl, '_lazy_flows_to_indice')
1✔
4548
        if hasattr(fl, '_lazy_inflow_indices'):
1✔
UNCOV
4549
            delattr(fl, '_lazy_inflow_indices')
×
4550

4551
    for fl in allfls:
1✔
4552
        if fl.flows_to is not None:
1✔
4553
            fl.set_flows_to(fl.flows_to)
1✔
4554

4555
    for fl in allfls:
1✔
4556
        fl.order = line_order(fl)
1✔
4557

4558
    # order flowlines in descending way
4559
    allfls.sort(key=lambda x: x.order, reverse=False)
1✔
4560

4561
    # assert last flowline is main flowline
4562
    assert allfls[-1] == mainfl
1✔
4563

4564
    # Finally write the flowlines
4565
    gdir.write_pickle(allfls, 'model_flowlines')
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc