• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

OGGM / oggm / 3836586642

pending completion
3836586642

Pull #1510

github

GitHub
Merge 2a19b5c72 into 8ab12c504
Pull Request #1510: Fix issue with TANDEM DEM and minor RGI7 issue

2 of 4 new or added lines in 2 files covered. (50.0%)

249 existing lines in 7 files now uncovered.

12099 of 13681 relevant lines covered (88.44%)

3.61 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

90.51
/oggm/core/gis.py
1
""" Handling of the local glacier map and masks. Defines the first tasks
2
to be realized by any OGGM pre-processing workflow.
3

4
"""
5
# Built ins
6
import os
9✔
7
import logging
9✔
8
import warnings
9✔
9
from packaging.version import Version
9✔
10
from functools import partial
9✔
11

12
# External libs
13
import numpy as np
9✔
14
import shapely.ops
9✔
15
import pandas as pd
9✔
16
import xarray as xr
9✔
17
import shapely.geometry as shpg
9✔
18
import scipy.signal
9✔
19
from scipy.ndimage import label, distance_transform_edt, binary_erosion
9✔
20
from scipy.interpolate import griddata
9✔
21
from scipy import optimize as optimization
9✔
22

23
# Optional libs
24
try:
9✔
25
    import salem
9✔
26
    from salem.gis import transform_proj
8✔
27
except ImportError:
28
    pass
29
try:
9✔
30
    import pyproj
9✔
31
except ImportError:
32
    pass
33
try:
9✔
34
    import geopandas as gpd
9✔
35
except ImportError:
36
    pass
37
try:
9✔
38
    import skimage.draw as skdraw
9✔
39
except ImportError:
40
    pass
41
try:
9✔
42
    import rasterio
9✔
43
    from rasterio.warp import reproject, Resampling
8✔
44
    from rasterio.mask import mask as riomask
8✔
45
    try:
8✔
46
        # rasterio V > 1.0
47
        from rasterio.merge import merge as merge_tool
8✔
48
    except ImportError:
49
        from rasterio.tools.merge import merge as merge_tool
50
except ImportError:
51
    pass
52

53
# Locals
54
from oggm import entity_task, utils
9✔
55
import oggm.cfg as cfg
9✔
56
from oggm.exceptions import (InvalidParamsError, InvalidGeometryError,
9✔
57
                             InvalidDEMError, GeometryError,
58
                             InvalidWorkflowError)
59
from oggm.utils import (tuple2int, get_topo_file, is_dem_source_available,
9✔
60
                        nicenumber, ncDataset, tolist)
61

62

63
# Module logger
64
log = logging.getLogger(__name__)
9✔
65

66
# Needed later
67
label_struct = np.ones((3, 3))
9✔
68

69

70
def _parse_source_text():
9✔
71
    fp = os.path.join(os.path.abspath(os.path.dirname(cfg.__file__)),
9✔
72
                      'data', 'dem_sources.txt')
73

74
    out = dict()
9✔
75
    cur_key = None
9✔
76
    with open(fp, 'r', encoding='utf-8') as fr:
9✔
77
        this_text = []
9✔
78
        for l in fr.readlines():
9✔
79
            l = l.strip()
9✔
80
            if l and (l[0] == '[' and l[-1] == ']'):
9✔
81
                if cur_key:
9✔
82
                    out[cur_key] = '\n'.join(this_text)
9✔
83
                this_text = []
9✔
84
                cur_key = l.strip('[]')
9✔
85
                continue
9✔
86
            this_text.append(l)
9✔
87
    out[cur_key] = '\n'.join(this_text)
9✔
88
    return out
9✔
89

90

91
DEM_SOURCE_INFO = _parse_source_text()
9✔
92

93

94
def gaussian_blur(in_array, size):
9✔
95
    """Applies a Gaussian filter to a 2d array.
96

97
    Parameters
98
    ----------
99
    in_array : numpy.array
100
        The array to smooth.
101
    size : int
102
        The half size of the smoothing window.
103

104
    Returns
105
    -------
106
    a smoothed numpy.array
107
    """
108

109
    # expand in_array to fit edge of kernel
110
    padded_array = np.pad(in_array, size, 'symmetric')
6✔
111

112
    # build kernel
113
    x, y = np.mgrid[-size:size + 1, -size:size + 1]
6✔
114
    g = np.exp(-(x**2 / float(size) + y**2 / float(size)))
6✔
115
    g = (g / g.sum()).astype(in_array.dtype)
6✔
116

117
    # do the Gaussian blur
118
    return scipy.signal.fftconvolve(padded_array, g, mode='valid')
6✔
119

120

121
def _interp_polygon(polygon, dx):
9✔
122
    """Interpolates an irregular polygon to a regular step dx.
123

124
    Interior geometries are also interpolated if they are longer then 3*dx,
125
    otherwise they are ignored.
126

127
    Parameters
128
    ----------
129
    polygon: The shapely.geometry.Polygon instance to interpolate
130
    dx : the step (float)
131

132
    Returns
133
    -------
134
    an interpolated shapely.geometry.Polygon class instance.
135
    """
136

137
    # remove last (duplex) point to build a LineString from the LinearRing
138
    line = shpg.LineString(np.asarray(polygon.exterior.xy).T)
6✔
139

140
    e_line = []
6✔
141
    for distance in np.arange(0.0, line.length, dx):
6✔
142
        e_line.append(*line.interpolate(distance).coords)
6✔
143
    e_line = shpg.LinearRing(e_line)
6✔
144

145
    i_lines = []
6✔
146
    for ipoly in polygon.interiors:
6✔
147
        line = shpg.LineString(np.asarray(ipoly.xy).T)
6✔
148
        if line.length < 3*dx:
6✔
149
            continue
6✔
150
        i_points = []
6✔
151
        for distance in np.arange(0.0, line.length, dx):
6✔
152
            i_points.append(*line.interpolate(distance).coords)
6✔
153
        i_lines.append(shpg.LinearRing(i_points))
6✔
154

155
    return shpg.Polygon(e_line, i_lines)
6✔
156

157

158
def _polygon_to_pix(polygon):
9✔
159
    """Transforms polygon coordinates to integer pixel coordinates. It makes
160
    the geometry easier to handle and reduces the number of points.
161

162
    Parameters
163
    ----------
164
    polygon: the shapely.geometry.Polygon instance to transform.
165

166
    Returns
167
    -------
168
    a shapely.geometry.Polygon class instance.
169
    """
170

171
    def project(x, y):
6✔
172
        return np.rint(x).astype(np.int64), np.rint(y).astype(np.int64)
6✔
173

174
    def project_coarse(x, y, c=2):
6✔
175
        return ((np.rint(x/c)*c).astype(np.int64),
×
176
                (np.rint(y/c)*c).astype(np.int64))
177

178
    poly_pix = shapely.ops.transform(project, polygon)
6✔
179

180
    # simple trick to correct invalid polys:
181
    tmp = poly_pix.buffer(0)
6✔
182

183
    # try to deal with a bug in buffer where the corrected poly would be null
184
    c = 2
6✔
185
    while tmp.length == 0 and c < 7:
6✔
186
        project = partial(project_coarse, c=c)
×
187
        poly_pix = shapely.ops.transform(project_coarse, polygon)
×
188
        tmp = poly_pix.buffer(0)
×
189
        c += 1
×
190

191
    # We tried all we could
192
    if tmp.length == 0:
6✔
193
        raise InvalidGeometryError('This glacier geometry is not valid for '
×
194
                                   'OGGM.')
195

196
    # sometimes the glacier gets cut out in parts
197
    if tmp.type == 'MultiPolygon':
6✔
198
        # If only small arms are cut out, remove them
199
        area = np.array([_tmp.area for _tmp in tmp.geoms])
6✔
200
        _tokeep = np.argmax(area).item()
6✔
201
        tmp = tmp.geoms[_tokeep]
6✔
202

203
        # check that the other parts really are small,
204
        # otherwise replace tmp with something better
205
        area = area / area[_tokeep]
6✔
206
        for _a in area:
6✔
207
            if _a != 1 and _a > 0.05:
6✔
208
                # these are extremely thin glaciers
209
                # eg. RGI40-11.01381 RGI40-11.01697 params.d1 = 5. and d2 = 8.
210
                # make them bigger until its ok
211
                for b in np.arange(0., 1., 0.01):
×
212
                    tmp = shapely.ops.transform(project, polygon.buffer(b))
×
213
                    tmp = tmp.buffer(0)
×
214
                    if tmp.type == 'MultiPolygon':
×
215
                        continue
×
216
                    if tmp.is_valid:
×
217
                        break
×
218
                if b == 0.99:
×
219
                    raise InvalidGeometryError('This glacier geometry is not '
×
220
                                               'valid for OGGM.')
221

222
    if not tmp.is_valid:
6✔
223
        raise InvalidGeometryError('This glacier geometry is not valid '
×
224
                                   'for OGGM.')
225

226
    return tmp
6✔
227

228

229
def glacier_grid_params(gdir):
9✔
230
    """Define the glacier grid map based on the user params."""
231

232
    # Get the local map proj params and glacier extent
233
    gdf = gdir.read_shapefile('outlines')
6✔
234

235
    # Get the map proj
236
    utm_proj = salem.check_crs(gdf.crs)
6✔
237

238
    # Get glacier extent
239
    xx, yy = gdf.iloc[0]['geometry'].exterior.xy
6✔
240

241
    # Define glacier area to use
242
    area = gdir.rgi_area_km2
6✔
243

244
    # Choose a spatial resolution with respect to the glacier area
245
    dxmethod = cfg.PARAMS['grid_dx_method']
6✔
246
    if dxmethod == 'linear':
6✔
247
        dx = np.rint(cfg.PARAMS['d1'] * area + cfg.PARAMS['d2'])
1✔
248
    elif dxmethod == 'square':
6✔
249
        dx = np.rint(cfg.PARAMS['d1'] * np.sqrt(area) + cfg.PARAMS['d2'])
6✔
250
    elif dxmethod == 'fixed':
2✔
251
        dx = np.rint(cfg.PARAMS['fixed_dx'])
2✔
252
    else:
253
        raise InvalidParamsError('grid_dx_method not supported: {}'
×
254
                                 .format(dxmethod))
255
    # Additional trick for varying dx
256
    if dxmethod in ['linear', 'square']:
6✔
257
        dx = utils.clip_scalar(dx, cfg.PARAMS['d2'], cfg.PARAMS['dmax'])
6✔
258

259
    log.debug('(%s) area %.2f km, dx=%.1f', gdir.rgi_id, area, dx)
6✔
260

261
    # Safety check
262
    border = cfg.PARAMS['border']
6✔
263
    if border > 1000:
6✔
264
        raise InvalidParamsError("You have set a cfg.PARAMS['border'] value "
×
265
                                 "of {}. ".format(cfg.PARAMS['border']) +
266
                                 'This a very large value, which is '
267
                                 'currently not supported in OGGM.')
268

269
    # For tidewater glaciers we force border to 10
270
    if gdir.is_tidewater and cfg.PARAMS['clip_tidewater_border']:
6✔
271
        border = 10
5✔
272

273
    # Corners, incl. a buffer of N pix
274
    ulx = np.min(xx) - border * dx
6✔
275
    lrx = np.max(xx) + border * dx
6✔
276
    uly = np.max(yy) + border * dx
6✔
277
    lry = np.min(yy) - border * dx
6✔
278
    # n pixels
279
    nx = int((lrx - ulx) / dx)
6✔
280
    ny = int((uly - lry) / dx)
6✔
281

282
    return utm_proj, nx, ny, ulx, uly, dx
6✔
283

284

285
@entity_task(log, writes=['glacier_grid', 'dem', 'outlines'])
9✔
286
def define_glacier_region(gdir, entity=None, source=None):
9✔
287
    """Very first task after initialization: define the glacier's local grid.
288

289
    Defines the local projection (Transverse Mercator), centered on the
290
    glacier. There is some options to set the resolution of the local grid.
291
    It can be adapted depending on the size of the glacier with::
292

293
        dx (m) = d1 * AREA (km) + d2 ; clipped to dmax
294

295
    or be set to a fixed value. See ``params.cfg`` for setting these options.
296
    Default values of the adapted mode lead to a resolution of 50 m for
297
    Hintereisferner, which is approx. 8 km2 large.
298
    After defining the grid, the topography and the outlines of the glacier
299
    are transformed into the local projection. The default interpolation for
300
    the topography is `cubic`.
301

302
    Parameters
303
    ----------
304
    gdir : :py:class:`oggm.GlacierDirectory`
305
        where to write the data
306
    entity : geopandas.GeoSeries
307
        the glacier geometry to process - DEPRECATED. It is now ignored
308
    source : str or list of str, optional
309
        If you want to force the use of a certain DEM source. Available are:
310
          - 'USER' : file set in cfg.PATHS['dem_file']
311
          - 'SRTM' : http://srtm.csi.cgiar.org/
312
          - 'GIMP' : https://bpcrc.osu.edu/gdg/data/gimpdem
313
          - 'RAMP' : https://nsidc.org/data/nsidc-0082/versions/2/documentation
314
          - 'REMA' : https://www.pgc.umn.edu/data/rema/
315
          - 'DEM3' : http://viewfinderpanoramas.org/
316
          - 'ASTER' : https://asterweb.jpl.nasa.gov/gdem.asp
317
          - 'TANDEM' : https://geoservice.dlr.de/web/dataguide/tdm90/
318
          - 'ARCTICDEM' : https://www.pgc.umn.edu/data/arcticdem/
319
          - 'AW3D30' : https://www.eorc.jaxa.jp
320
          - 'MAPZEN' : https://registry.opendata.aws/terrain-tiles/
321
          - 'ALASKA' : https://www.the-cryosphere.net/8/503/2014/
322
          - 'COPDEM30' : Copernicus DEM GLO30 https://spacedata.copernicus.eu/web/cscda/cop-dem-faq
323
          - 'COPDEM90' : Copernicus DEM GLO90 https://spacedata.copernicus.eu/web/cscda/cop-dem-faq
324
          - 'NASADEM':  https://doi.org/10.5069/G93T9FD9
325
    """
326

327
    utm_proj, nx, ny, ulx, uly, dx = glacier_grid_params(gdir)
6✔
328

329
    # Back to lon, lat for DEM download/preparation
330
    tmp_grid = salem.Grid(proj=utm_proj, nxny=(nx, ny), x0y0=(ulx, uly),
6✔
331
                          dxdy=(dx, -dx), pixel_ref='corner')
332
    minlon, maxlon, minlat, maxlat = tmp_grid.extent_in_crs(crs=salem.wgs84)
6✔
333

334
    # Open DEM
335
    # We test DEM availability for glacier only (maps can grow big)
336
    if isinstance(source, list):  # when multiple sources are provided, try them sequentially
6✔
337
        for src in source:
×
338
            source_exists = is_dem_source_available(src, *gdir.extent_ll)
×
339
            if source_exists:
×
340
                source = src  # pick the first source which exists
×
341
                break
×
342
    else:
343
        source_exists = is_dem_source_available(source, *gdir.extent_ll)
6✔
344
        
345
    if not source_exists:
6✔
346
        raise InvalidWorkflowError(f'Source: {source} is not available for '
×
347
                                   f'glacier {gdir.rgi_id} with border '
348
                                   f"{cfg.PARAMS['border']}")
349
    dem_list, dem_source = get_topo_file((minlon, maxlon), (minlat, maxlat),
6✔
350
                                         rgi_id=gdir.rgi_id,
351
                                         dx_meter=dx,
352
                                         source=source)
353
    log.debug('(%s) DEM source: %s', gdir.rgi_id, dem_source)
6✔
354
    log.debug('(%s) N DEM Files: %s', gdir.rgi_id, len(dem_list))
6✔
355

356
    # Decide how to tag nodata
357
    def _get_nodata(rio_ds):
6✔
358
        nodata = rio_ds[0].meta.get('nodata', None)
6✔
359
        if nodata is None:
6✔
360
            # badly tagged geotiffs, let's do it ourselves
361
            nodata = -32767 if source == 'TANDEM' else -9999
5✔
362
        return nodata
6✔
363

364
    # A glacier area can cover more than one tile:
365
    if len(dem_list) == 1:
6✔
366
        dem_dss = [rasterio.open(dem_list[0])]  # if one tile, just open it
6✔
367
        dem_data = rasterio.band(dem_dss[0], 1)
6✔
368
        if Version(rasterio.__version__) >= Version('1.0'):
6✔
369
            src_transform = dem_dss[0].transform
6✔
370
        else:
371
            src_transform = dem_dss[0].affine
×
372
        nodata = _get_nodata(dem_dss)
6✔
373
    else:
374
        dem_dss = [rasterio.open(s) for s in dem_list]  # list of rasters
×
375
        nodata = _get_nodata(dem_dss)
×
376
        dem_data, src_transform = merge_tool(dem_dss, nodata=nodata)  # merge
×
377

378
    # Use Grid properties to create a transform (see rasterio cookbook)
379
    dst_transform = rasterio.transform.from_origin(
6✔
380
        ulx, uly, dx, dx  # sign change (2nd dx) is done by rasterio.transform
381
    )
382

383
    # Set up profile for writing output
384
    profile = dem_dss[0].profile
6✔
385
    profile.update({
6✔
386
        'crs': utm_proj.srs,
387
        'transform': dst_transform,
388
        'nodata': nodata,
389
        'width': nx,
390
        'height': ny,
391
        'driver': 'GTiff'
392
    })
393

394
    # Could be extended so that the cfg file takes all Resampling.* methods
395
    if cfg.PARAMS['topo_interp'] == 'bilinear':
6✔
396
        resampling = Resampling.bilinear
×
397
    elif cfg.PARAMS['topo_interp'] == 'cubic':
6✔
398
        resampling = Resampling.cubic
6✔
399
    else:
400
        raise InvalidParamsError('{} interpolation not understood'
×
401
                                 .format(cfg.PARAMS['topo_interp']))
402

403
    dem_reproj = gdir.get_filepath('dem')
6✔
404
    profile.pop('blockxsize', None)
6✔
405
    profile.pop('blockysize', None)
6✔
406
    profile.pop('compress', None)
6✔
407
    with rasterio.open(dem_reproj, 'w', **profile) as dest:
6✔
408
        dst_array = np.empty((ny, nx), dtype=dem_dss[0].dtypes[0])
6✔
409
        reproject(
6✔
410
            # Source parameters
411
            source=dem_data,
412
            src_crs=dem_dss[0].crs,
413
            src_transform=src_transform,
414
            src_nodata=nodata,
415
            # Destination parameters
416
            destination=dst_array,
417
            dst_transform=dst_transform,
418
            dst_crs=utm_proj.srs,
419
            dst_nodata=nodata,
420
            # Configuration
421
            resampling=resampling)
422
        dest.write(dst_array, 1)
6✔
423

424
    for dem_ds in dem_dss:
6✔
425
        dem_ds.close()
6✔
426

427
    # Glacier grid
428
    x0y0 = (ulx+dx/2, uly-dx/2)  # To pixel center coordinates
6✔
429
    glacier_grid = salem.Grid(proj=utm_proj, nxny=(nx, ny), dxdy=(dx, -dx),
6✔
430
                              x0y0=x0y0)
431
    glacier_grid.to_json(gdir.get_filepath('glacier_grid'))
6✔
432

433
    # Write DEM source info
434
    gdir.add_to_diagnostics('dem_source', dem_source)
6✔
435
    source_txt = DEM_SOURCE_INFO.get(dem_source, dem_source)
6✔
436
    with open(gdir.get_filepath('dem_source'), 'w') as fw:
6✔
437
        fw.write(source_txt)
6✔
438
        fw.write('\n\n')
6✔
439
        fw.write('# Data files\n\n')
6✔
440
        for fname in dem_list:
6✔
441
            fw.write('{}\n'.format(os.path.basename(fname)))
6✔
442

443

444
def rasterio_to_gdir(gdir, input_file, output_file_name,
9✔
445
                     resampling='cubic'):
446
    """Reprojects a file that rasterio can read into the glacier directory.
447

448
    Parameters
449
    ----------
450
    gdir : :py:class:`oggm.GlacierDirectory`
451
        the glacier directory
452
    input_file : str
453
        path to the file to reproject
454
    output_file_name : str
455
        name of the output file (must be in cfg.BASENAMES)
456
    resampling : str
457
        nearest', 'bilinear', 'cubic', 'cubic_spline', or one of
458
        https://rasterio.readthedocs.io/en/latest/topics/resampling.html
459
    """
460

461
    output_file = gdir.get_filepath(output_file_name)
2✔
462
    assert '.tif' in output_file, 'output_file should end with .tif'
2✔
463

464
    if not gdir.has_file('dem'):
2✔
465
        raise InvalidWorkflowError('Need a dem.tif file to reproject to')
×
466

467
    with rasterio.open(input_file) as src:
2✔
468

469
        kwargs = src.meta.copy()
2✔
470
        data = src.read(1)
2✔
471

472
        with rasterio.open(gdir.get_filepath('dem')) as tpl:
2✔
473

474
            kwargs.update({
2✔
475
                'crs': tpl.crs,
476
                'transform': tpl.transform,
477
                'width': tpl.width,
478
                'height': tpl.height
479
            })
480

481
            with rasterio.open(output_file, 'w', **kwargs) as dst:
2✔
482
                for i in range(1, src.count + 1):
2✔
483

484
                    dest = np.zeros(shape=(tpl.height, tpl.width),
2✔
485
                                    dtype=data.dtype)
486

487
                    reproject(
2✔
488
                        source=rasterio.band(src, i),
489
                        destination=dest,
490
                        src_transform=src.transform,
491
                        src_crs=src.crs,
492
                        dst_transform=tpl.transform,
493
                        dst_crs=tpl.crs,
494
                        resampling=getattr(Resampling, resampling)
495
                    )
496

497
                    dst.write(dest, indexes=i)
2✔
498

499

500
def read_geotiff_dem(gdir):
9✔
501
    """Reads (and masks out) the DEM out of the gdir's geotiff file.
502

503
    Parameters
504
    ----------
505
    gdir : :py:class:`oggm.GlacierDirectory`
506
        the glacier directory
507

508
    Returns
509
    -------
510
    2D np.float32 array
511
    """
512
    with rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') as ds:
6✔
513
        topo = ds.read(1).astype(rasterio.float32)
6✔
514
        topo[topo <= -999.] = np.NaN
6✔
515
        topo[ds.read_masks(1) == 0] = np.NaN
6✔
516
    return topo
6✔
517

518

519
class GriddedNcdfFile(object):
9✔
520
    """Creates or opens a gridded netcdf file template.
521

522
    The other variables have to be created and filled by the calling
523
    routine.
524
    """
525
    def __init__(self, gdir, basename='gridded_data', reset=False):
9✔
526
        self.fpath = gdir.get_filepath(basename)
6✔
527
        self.grid = gdir.grid
6✔
528
        if reset and os.path.exists(self.fpath):
6✔
529
            os.remove(self.fpath)
1✔
530

531
    def __enter__(self):
9✔
532

533
        if os.path.exists(self.fpath):
6✔
534
            # Already there - just append
535
            self.nc = ncDataset(self.fpath, 'a', format='NETCDF4')
6✔
536
            return self.nc
6✔
537

538
        # Create and fill
539
        nc = ncDataset(self.fpath, 'w', format='NETCDF4')
6✔
540

541
        nc.createDimension('x', self.grid.nx)
6✔
542
        nc.createDimension('y', self.grid.ny)
6✔
543

544
        nc.author = 'OGGM'
6✔
545
        nc.author_info = 'Open Global Glacier Model'
6✔
546
        nc.pyproj_srs = self.grid.proj.srs
6✔
547

548
        x = self.grid.x0 + np.arange(self.grid.nx) * self.grid.dx
6✔
549
        y = self.grid.y0 + np.arange(self.grid.ny) * self.grid.dy
6✔
550

551
        v = nc.createVariable('x', 'f4', ('x',), zlib=True)
6✔
552
        v.units = 'm'
6✔
553
        v.long_name = 'x coordinate of projection'
6✔
554
        v.standard_name = 'projection_x_coordinate'
6✔
555
        v[:] = x
6✔
556

557
        v = nc.createVariable('y', 'f4', ('y',), zlib=True)
6✔
558
        v.units = 'm'
6✔
559
        v.long_name = 'y coordinate of projection'
6✔
560
        v.standard_name = 'projection_y_coordinate'
6✔
561
        v[:] = y
6✔
562

563
        self.nc = nc
6✔
564
        return nc
6✔
565

566
    def __exit__(self, exc_type, exc_value, exc_traceback):
9✔
567
        self.nc.close()
6✔
568

569

570
@entity_task(log, writes=['gridded_data'])
9✔
571
def process_dem(gdir):
9✔
572
    """Reads the DEM from the tiff, attempts to fill voids and apply smooth.
573

574
    The data is then written to `gridded_data.nc`.
575

576
    Parameters
577
    ----------
578
    gdir : :py:class:`oggm.GlacierDirectory`
579
        where to write the data
580
    """
581

582
    # open srtm tif-file:
583
    dem = read_geotiff_dem(gdir)
6✔
584

585
    # Grid
586
    nx = gdir.grid.nx
6✔
587
    ny = gdir.grid.ny
6✔
588

589
    # Correct the DEM
590
    valid_mask = np.isfinite(dem)
6✔
591
    if np.all(~valid_mask):
6✔
592
        raise InvalidDEMError('Not a single valid grid point in DEM')
×
593

594
    if np.any(~valid_mask):
6✔
595
        # We interpolate
596
        if np.sum(~valid_mask) > (0.25 * nx * ny):
1✔
597
            log.info('({}) more than 25% NaNs in DEM'.format(gdir.rgi_id))
×
598
        xx, yy = gdir.grid.ij_coordinates
1✔
599
        pnan = np.nonzero(~valid_mask)
1✔
600
        pok = np.nonzero(valid_mask)
1✔
601
        points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T
1✔
602
        inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T
1✔
603
        try:
1✔
604
            dem[pnan] = griddata(points, np.ravel(dem[pok]), inter,
1✔
605
                                 method='linear')
606
        except ValueError:
×
607
            raise InvalidDEMError('DEM interpolation not possible.')
×
608
        log.info(gdir.rgi_id + ': DEM needed interpolation.')
1✔
609
        gdir.add_to_diagnostics('dem_needed_interpolation', True)
1✔
610
        gdir.add_to_diagnostics('dem_invalid_perc', len(pnan[0]) / (nx * ny))
1✔
611

612
    isfinite = np.isfinite(dem)
6✔
613
    if np.any(~isfinite):
6✔
614
        # interpolation will still leave NaNs in DEM:
615
        # extrapolate with NN if needed (e.g. coastal areas)
616
        xx, yy = gdir.grid.ij_coordinates
1✔
617
        pnan = np.nonzero(~isfinite)
1✔
618
        pok = np.nonzero(isfinite)
1✔
619
        points = np.array((np.ravel(yy[pok]), np.ravel(xx[pok]))).T
1✔
620
        inter = np.array((np.ravel(yy[pnan]), np.ravel(xx[pnan]))).T
1✔
621
        try:
1✔
622
            dem[pnan] = griddata(points, np.ravel(dem[pok]), inter,
1✔
623
                                 method='nearest')
624
        except ValueError:
×
625
            raise InvalidDEMError('DEM extrapolation not possible.')
×
626
        log.info(gdir.rgi_id + ': DEM needed extrapolation.')
1✔
627
        gdir.add_to_diagnostics('dem_needed_extrapolation', True)
1✔
628
        gdir.add_to_diagnostics('dem_extrapol_perc', len(pnan[0]) / (nx * ny))
1✔
629

630
    if np.min(dem) == np.max(dem):
6✔
631
        raise InvalidDEMError('({}) min equal max in the DEM.'
×
632
                              .format(gdir.rgi_id))
633

634
    # Clip topography to 0 m a.s.l.
635
    utils.clip_min(dem, 0, out=dem)
6✔
636

637
    # Smooth DEM?
638
    if cfg.PARAMS['smooth_window'] > 0.:
6✔
639
        gsize = np.rint(cfg.PARAMS['smooth_window'] / gdir.grid.dx)
6✔
640
        smoothed_dem = gaussian_blur(dem, int(gsize))
6✔
641
    else:
642
        smoothed_dem = dem.copy()
×
643

644
    # Clip topography to 0 m a.s.l.
645
    utils.clip_min(smoothed_dem, 0, out=smoothed_dem)
6✔
646

647
    # Write to file
648
    with GriddedNcdfFile(gdir, reset=True) as nc:
6✔
649

650
        v = nc.createVariable('topo', 'f4', ('y', 'x',), zlib=True)
6✔
651
        v.units = 'm'
6✔
652
        v.long_name = 'DEM topography'
6✔
653
        v[:] = dem
6✔
654

655
        v = nc.createVariable('topo_smoothed', 'f4', ('y', 'x',), zlib=True)
6✔
656
        v.units = 'm'
6✔
657
        v.long_name = ('DEM topography smoothed with radius: '
6✔
658
                       '{:.1} m'.format(cfg.PARAMS['smooth_window']))
659
        v[:] = smoothed_dem
6✔
660

661
        # If there was some invalid data store this as well
662
        v = nc.createVariable('topo_valid_mask', 'i1', ('y', 'x',), zlib=True)
6✔
663
        v.units = '-'
6✔
664
        v.long_name = 'DEM validity mask according to geotiff input (1-0)'
6✔
665
        v[:] = valid_mask.astype(int)
6✔
666

667
        # add some meta stats and close
668
        nc.max_h_dem = np.nanmax(dem)
6✔
669
        nc.min_h_dem = np.nanmin(dem)
6✔
670

671

672
@entity_task(log, writes=['gridded_data', 'geometries'])
9✔
673
def glacier_masks(gdir):
9✔
674
    """Makes a gridded mask of the glacier outlines that can be used by OGGM.
675

676
    For a more robust solution (not OGGM compatible) see simple_glacier_masks.
677

678
    Parameters
679
    ----------
680
    gdir : :py:class:`oggm.GlacierDirectory`
681
        where to write the data
682
    """
683

684
    # In case nominal, just raise
685
    if gdir.is_nominal:
6✔
686
        raise GeometryError('{} is a nominal glacier.'.format(gdir.rgi_id))
1✔
687

688
    if not os.path.exists(gdir.get_filepath('gridded_data')):
6✔
689
        # In a possible future, we might actually want to raise a
690
        # deprecation warning here
691
        process_dem(gdir)
6✔
692

693
    # Geometries
694
    geometry = gdir.read_shapefile('outlines').geometry[0]
6✔
695

696
    # Interpolate shape to a regular path
697
    glacier_poly_hr = _interp_polygon(geometry, gdir.grid.dx)
6✔
698

699
    # Transform geometry into grid coordinates
700
    # It has to be in pix center coordinates because of how skimage works
701
    def proj(x, y):
6✔
702
        grid = gdir.grid.center_grid
6✔
703
        return grid.transform(x, y, crs=grid.proj)
6✔
704
    glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr)
6✔
705

706
    # simple trick to correct invalid polys:
707
    # http://stackoverflow.com/questions/20833344/
708
    # fix-invalid-polygon-python-shapely
709
    glacier_poly_hr = glacier_poly_hr.buffer(0)
6✔
710
    if not glacier_poly_hr.is_valid:
6✔
711
        raise InvalidGeometryError('This glacier geometry is not valid.')
×
712

713
    # Rounded nearest pix
714
    glacier_poly_pix = _polygon_to_pix(glacier_poly_hr)
6✔
715
    if glacier_poly_pix.exterior is None:
6✔
716
        raise InvalidGeometryError('Problem in converting glacier geometry '
×
717
                                   'to grid resolution.')
718

719
    # Compute the glacier mask (currently: center pixels + touched)
720
    nx, ny = gdir.grid.nx, gdir.grid.ny
6✔
721
    glacier_mask = np.zeros((ny, nx), dtype=np.uint8)
6✔
722
    glacier_ext = np.zeros((ny, nx), dtype=np.uint8)
6✔
723
    (x, y) = glacier_poly_pix.exterior.xy
6✔
724
    glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1
6✔
725
    for gint in glacier_poly_pix.interiors:
6✔
726
        x, y = tuple2int(gint.xy)
6✔
727
        glacier_mask[skdraw.polygon(y, x)] = 0
6✔
728
        glacier_mask[y, x] = 0  # on the nunataks, no
6✔
729
    x, y = tuple2int(glacier_poly_pix.exterior.xy)
6✔
730
    glacier_mask[y, x] = 1
6✔
731
    glacier_ext[y, x] = 1
6✔
732

733
    # Because of the 0 values at nunataks boundaries, some "Ice Islands"
734
    # can happen within nunataks (e.g.: RGI40-11.00062)
735
    # See if we can filter them out easily
736
    regions, nregions = label(glacier_mask, structure=label_struct)
6✔
737
    if nregions > 1:
6✔
738
        log.debug('(%s) we had to cut an island in the mask', gdir.rgi_id)
3✔
739
        # Check the size of those
740
        region_sizes = [np.sum(regions == r) for r in np.arange(1, nregions+1)]
3✔
741
        am = np.argmax(region_sizes)
3✔
742
        # Check not a strange glacier
743
        sr = region_sizes.pop(am)
3✔
744
        for ss in region_sizes:
3✔
745
            assert (ss / sr) < 0.1
3✔
746
        glacier_mask[:] = 0
3✔
747
        glacier_mask[np.where(regions == (am+1))] = 1
3✔
748

749
    # Write geometries
750
    geometries = dict()
6✔
751
    geometries['polygon_hr'] = glacier_poly_hr
6✔
752
    geometries['polygon_pix'] = glacier_poly_pix
6✔
753
    geometries['polygon_area'] = geometry.area
6✔
754
    gdir.write_pickle(geometries, 'geometries')
6✔
755

756
    # write out the grids in the netcdf file
757
    with GriddedNcdfFile(gdir) as nc:
6✔
758

759
        if 'glacier_mask' not in nc.variables:
6✔
760
            v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ),
6✔
761
                                  zlib=True)
762
            v.units = '-'
6✔
763
            v.long_name = 'Glacier mask'
6✔
764
        else:
765
            v = nc.variables['glacier_mask']
1✔
766
        v[:] = glacier_mask
6✔
767

768
        if 'glacier_ext' not in nc.variables:
6✔
769
            v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ),
6✔
770
                                  zlib=True)
771
            v.units = '-'
6✔
772
            v.long_name = 'Glacier external boundaries'
6✔
773
        else:
774
            v = nc.variables['glacier_ext']
1✔
775
        v[:] = glacier_ext
6✔
776

777
        dem = nc.variables['topo'][:]
6✔
778

779
        if 'topo_valid_mask' not in nc.variables:
6✔
780
            msg = ('You seem to be running from old preprocessed directories. '
×
781
                   'See https://github.com/OGGM/oggm/issues/1095 for a fix.')
782
            raise InvalidWorkflowError(msg)
×
783
        valid_mask = nc.variables['topo_valid_mask'][:]
6✔
784

785
        # Last sanity check based on the masked dem
786
        tmp_max = np.max(dem[np.where(glacier_mask == 1)])
6✔
787
        tmp_min = np.min(dem[np.where(glacier_mask == 1)])
6✔
788
        if tmp_max < (tmp_min + 1):
6✔
789
            raise InvalidDEMError('({}) min equal max in the masked DEM.'
×
790
                                  .format(gdir.rgi_id))
791

792
        # Log DEM that needed processing within the glacier mask
793
        if gdir.get_diagnostics().get('dem_needed_interpolation', False):
6✔
794
            pnan = (valid_mask == 0) & glacier_mask
1✔
795
            gdir.add_to_diagnostics('dem_invalid_perc_in_mask',
1✔
796
                                    np.sum(pnan) / np.sum(glacier_mask))
797

798
        # add some meta stats and close
799
        dem_on_g = dem[np.where(glacier_mask)]
6✔
800
        nc.max_h_glacier = np.nanmax(dem_on_g)
6✔
801
        nc.min_h_glacier = np.nanmin(dem_on_g)
6✔
802

803

804
@entity_task(log, writes=['gridded_data', 'hypsometry'])
9✔
805
def simple_glacier_masks(gdir, write_hypsometry=False):
9✔
806
    """Compute glacier masks based on much simpler rules than OGGM's default.
807

808
    This is therefore more robust: we use this function to compute glacier
809
    hypsometries.
810

811
    Parameters
812
    ----------
813
    gdir : :py:class:`oggm.GlacierDirectory`
814
        where to write the data
815
    write_hypsometry : bool
816
        whether to write out the hypsometry file or not - it is used by e.g,
817
        rgitools
818
    """
819

820
    # In case nominal, just raise
821
    if gdir.is_nominal:
4✔
822
        raise GeometryError('{} is a nominal glacier.'.format(gdir.rgi_id))
×
823

824
    if not os.path.exists(gdir.get_filepath('gridded_data')):
4✔
825
        # In a possible future, we might actually want to raise a
826
        # deprecation warning here
827
        process_dem(gdir)
4✔
828

829
    # Geometries
830
    geometry = gdir.read_shapefile('outlines').geometry[0]
4✔
831

832
    # rio metadata
833
    with rasterio.open(gdir.get_filepath('dem'), 'r', driver='GTiff') as ds:
4✔
834
        data = ds.read(1).astype(rasterio.float32)
4✔
835
        profile = ds.profile
4✔
836

837
    # simple trick to correct invalid polys:
838
    # http://stackoverflow.com/questions/20833344/
839
    # fix-invalid-polygon-python-shapely
840
    geometry = geometry.buffer(0)
4✔
841
    if not geometry.is_valid:
4✔
842
        raise InvalidDEMError('This glacier geometry is not valid.')
×
843

844
    # Compute the glacier mask using rasterio
845
    # Small detour as mask only accepts DataReader objects
846
    profile['dtype'] = 'int16'
4✔
847
    profile.pop('nodata', None)
4✔
848
    with rasterio.io.MemoryFile() as memfile:
4✔
849
        with memfile.open(**profile) as dataset:
4✔
850
            dataset.write(data.astype(np.int16)[np.newaxis, ...])
4✔
851
        dem_data = rasterio.open(memfile.name)
4✔
852
        masked_dem, _ = riomask(dem_data, [shpg.mapping(geometry)],
4✔
853
                                filled=False)
854
    glacier_mask = ~masked_dem[0, ...].mask
4✔
855

856
    # Same without nunataks
857
    with rasterio.io.MemoryFile() as memfile:
4✔
858
        with memfile.open(**profile) as dataset:
4✔
859
            dataset.write(data.astype(np.int16)[np.newaxis, ...])
4✔
860
        dem_data = rasterio.open(memfile.name)
4✔
861
        poly = shpg.mapping(shpg.Polygon(geometry.exterior))
4✔
862
        masked_dem, _ = riomask(dem_data, [poly],
4✔
863
                                filled=False)
864
    glacier_mask_nonuna = ~masked_dem[0, ...].mask
4✔
865

866
    # Glacier exterior excluding nunataks
867
    erode = binary_erosion(glacier_mask_nonuna)
4✔
868
    glacier_ext = glacier_mask_nonuna ^ erode
4✔
869
    glacier_ext = np.where(glacier_mask_nonuna, glacier_ext, 0)
4✔
870

871
    dem = read_geotiff_dem(gdir)
4✔
872

873
    # Last sanity check based on the masked dem
874
    tmp_max = np.nanmax(dem[glacier_mask])
4✔
875
    tmp_min = np.nanmin(dem[glacier_mask])
4✔
876
    if tmp_max < (tmp_min + 1):
4✔
877
        raise InvalidDEMError('({}) min equal max in the masked DEM.'
×
878
                              .format(gdir.rgi_id))
879

880
    # write out the grids in the netcdf file
881
    with GriddedNcdfFile(gdir) as nc:
4✔
882

883
        if 'glacier_mask' not in nc.variables:
4✔
884
            v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ),
4✔
885
                                  zlib=True)
886
            v.units = '-'
4✔
887
            v.long_name = 'Glacier mask'
4✔
888
        else:
889
            v = nc.variables['glacier_mask']
1✔
890
        v[:] = glacier_mask
4✔
891

892
        if 'glacier_ext' not in nc.variables:
4✔
893
            v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ),
4✔
894
                                  zlib=True)
895
            v.units = '-'
4✔
896
            v.long_name = 'Glacier external boundaries'
4✔
897
        else:
898
            v = nc.variables['glacier_ext']
1✔
899
        v[:] = glacier_ext
4✔
900

901
        # Log DEM that needed processing within the glacier mask
902
        if 'topo_valid_mask' not in nc.variables:
4✔
903
            msg = ('You seem to be running from old preprocessed directories. '
×
904
                   'See https://github.com/OGGM/oggm/issues/1095 for a fix.')
905
            raise InvalidWorkflowError(msg)
×
906
        valid_mask = nc.variables['topo_valid_mask'][:]
4✔
907
        if gdir.get_diagnostics().get('dem_needed_interpolation', False):
4✔
908
            pnan = (valid_mask == 0) & glacier_mask
×
909
            gdir.add_to_diagnostics('dem_invalid_perc_in_mask',
×
910
                                    np.sum(pnan) / np.sum(glacier_mask))
911

912
        # add some meta stats and close
913
        nc.max_h_dem = np.nanmax(dem)
4✔
914
        nc.min_h_dem = np.nanmin(dem)
4✔
915
        dem_on_g = dem[np.where(glacier_mask)]
4✔
916
        nc.max_h_glacier = np.nanmax(dem_on_g)
4✔
917
        nc.min_h_glacier = np.nanmin(dem_on_g)
4✔
918

919
        # Last sanity check
920
        if nc.max_h_glacier < (nc.min_h_glacier + 1):
4✔
921
            raise InvalidDEMError('({}) min equal max in the masked DEM.'
×
922
                                  .format(gdir.rgi_id))
923

924
    # hypsometry if asked for
925
    if not write_hypsometry:
4✔
926
        return
4✔
927

928
    bsize = 50.
1✔
929
    dem_on_ice = dem[glacier_mask]
1✔
930
    bins = np.arange(nicenumber(dem_on_ice.min(), bsize, lower=True),
1✔
931
                     nicenumber(dem_on_ice.max(), bsize) + 0.01, bsize)
932

933
    h, _ = np.histogram(dem_on_ice, bins)
1✔
934
    h = h / np.sum(h) * 1000  # in permil
1✔
935

936
    # We want to convert the bins to ints but preserve their sum to 1000
937
    # Start with everything rounded down, then round up the numbers with the
938
    # highest fractional parts until the desired sum is reached.
939
    hi = np.floor(h).astype(int)
1✔
940
    hup = np.ceil(h).astype(int)
1✔
941
    aso = np.argsort(hup - h)
1✔
942
    for i in aso:
1✔
943
        hi[i] = hup[i]
1✔
944
        if np.sum(hi) == 1000:
1✔
945
            break
1✔
946

947
    # slope
948
    sy, sx = np.gradient(dem, gdir.grid.dx)
1✔
949
    aspect = np.arctan2(np.mean(-sx[glacier_mask]), np.mean(sy[glacier_mask]))
1✔
950
    aspect = np.rad2deg(aspect)
1✔
951
    if aspect < 0:
1✔
952
        aspect += 360
×
953
    slope = np.arctan(np.sqrt(sx ** 2 + sy ** 2))
1✔
954
    avg_slope = np.rad2deg(np.mean(slope[glacier_mask]))
1✔
955

956
    # write
957
    df = pd.DataFrame()
1✔
958
    df['RGIId'] = [gdir.rgi_id]
1✔
959
    df['GLIMSId'] = [gdir.glims_id]
1✔
960
    df['Zmin'] = [dem_on_ice.min()]
1✔
961
    df['Zmax'] = [dem_on_ice.max()]
1✔
962
    df['Zmed'] = [np.median(dem_on_ice)]
1✔
963
    df['Area'] = [gdir.rgi_area_km2]
1✔
964
    df['Slope'] = [avg_slope]
1✔
965
    df['Aspect'] = [aspect]
1✔
966
    for b, bs in zip(hi, (bins[1:] + bins[:-1])/2):
1✔
967
        df['{}'.format(np.round(bs).astype(int))] = [b]
1✔
968
    df.to_csv(gdir.get_filepath('hypsometry'), index=False)
1✔
969

970

971
@entity_task(log, writes=['glacier_mask'])
9✔
972
def rasterio_glacier_mask(gdir, source=None):
9✔
973
    """Writes a 1-0 glacier mask GeoTiff with the same dimensions as dem.tif
974

975

976
    Parameters
977
    ----------
978
    gdir : :py:class:`oggm.GlacierDirectory`
979
        the glacier in question
980
    source : str
981

982
        - None (default): the task reads `dem.tif` from the GDir root
983
        - 'ALL': try to open any folder from `utils.DEM_SOURCE` and use first
984
        - any of `utils.DEM_SOURCE`: try only that one
985
    """
986

987
    if source is None:
1✔
988
        dempath = gdir.get_filepath('dem')
1✔
989
    elif source in utils.DEM_SOURCES:
1✔
990
        dempath = os.path.join(gdir.dir, source, 'dem.tif')
1✔
991
    else:
UNCOV
992
        for src in utils.DEM_SOURCES:
×
UNCOV
993
            dempath = os.path.join(gdir.dir, src, 'dem.tif')
×
UNCOV
994
            if os.path.isfile(dempath):
×
UNCOV
995
                break
×
996

997
    if not os.path.isfile(dempath):
1✔
998
        raise ValueError('The specified source does not give a valid DEM file')
1✔
999

1000
    # read dem profile
1001
    with rasterio.open(dempath, 'r', driver='GTiff') as ds:
1✔
1002
        profile = ds.profile
1✔
1003

1004
    # don't even bother reading the actual DEM, just mimic it
1005
    data = np.zeros((ds.height, ds.width))
1✔
1006

1007
    # Read RGI outlines
1008
    geometry = gdir.read_shapefile('outlines').geometry[0]
1✔
1009

1010
    # simple trick to correct invalid polys:
1011
    # http://stackoverflow.com/questions/20833344/
1012
    # fix-invalid-polygon-python-shapely
1013
    geometry = geometry.buffer(0)
1✔
1014
    if not geometry.is_valid:
1✔
1015
        raise InvalidDEMError('This glacier geometry is not valid.')
×
1016

1017
    # Compute the glacier mask using rasterio
1018
    # Small detour as mask only accepts DataReader objects
1019
    with rasterio.io.MemoryFile() as memfile:
1✔
1020
        with memfile.open(**profile) as dataset:
1✔
1021
            dataset.write(data.astype(profile['dtype'])[np.newaxis, ...])
1✔
1022
        dem_data = rasterio.open(memfile.name)
1✔
1023
        masked_dem, _ = riomask(dem_data, [shpg.mapping(geometry)],
1✔
1024
                                filled=False)
1025
    glacier_mask = ~masked_dem[0, ...].mask
1✔
1026

1027
    # parameters to for the new tif
1028
    nodata = -32767
1✔
1029
    dtype = rasterio.int16
1✔
1030

1031
    # let's use integer
1032
    out = glacier_mask.astype(dtype)
1✔
1033

1034
    # and check for sanity
1035
    if not np.all(np.unique(out) == np.array([0, 1])):
1✔
1036
        raise InvalidDEMError('({}) masked DEM does not consist of 0/1 only.'
×
1037
                              .format(gdir.rgi_id))
1038

1039
    # Update existing profile for output
1040
    profile.update({
1✔
1041
        'dtype': dtype,
1042
        'nodata': nodata,
1043
    })
1044

1045
    with rasterio.open(gdir.get_filepath('glacier_mask'), 'w', **profile) as r:
1✔
1046
        r.write(out.astype(dtype), 1)
1✔
1047

1048

1049
@entity_task(log, writes=['gridded_data'])
9✔
1050
def gridded_attributes(gdir):
9✔
1051
    """Adds attributes to the gridded file, useful for thickness interpolation.
1052

1053
    This could be useful for distributed ice thickness models.
1054
    The raster data are added to the gridded_data file.
1055

1056
    Parameters
1057
    ----------
1058
    gdir : :py:class:`oggm.GlacierDirectory`
1059
        where to write the data
1060
    """
1061

1062
    # Variables
1063
    grids_file = gdir.get_filepath('gridded_data')
5✔
1064
    with ncDataset(grids_file) as nc:
5✔
1065
        topo_smoothed = nc.variables['topo_smoothed'][:]
5✔
1066
        glacier_mask = nc.variables['glacier_mask'][:]
5✔
1067

1068
    # Glacier exterior including nunataks
1069
    erode = binary_erosion(glacier_mask)
5✔
1070
    glacier_ext = glacier_mask ^ erode
5✔
1071
    glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0)
5✔
1072

1073
    # Intersects between glaciers
1074
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
5✔
1075
    if gdir.has_file('intersects'):
5✔
1076
        # read and transform to grid
1077
        gdf = gdir.read_shapefile('intersects')
4✔
1078
        salem.transform_geopandas(gdf, to_crs=gdir.grid, inplace=True)
4✔
1079
        gdfi = pd.concat([gdfi, gdf[['geometry']]])
4✔
1080

1081
    # Ice divide mask
1082
    # Probably not the fastest way to do this, but it works
1083
    dist = np.array([])
5✔
1084
    jj, ii = np.where(glacier_ext)
5✔
1085
    for j, i in zip(jj, ii):
5✔
1086
        dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j))))
5✔
1087

1088
    with warnings.catch_warnings():
5✔
1089
        warnings.filterwarnings("ignore", category=RuntimeWarning)
5✔
1090
        pok = np.where(dist <= 1)
5✔
1091
    glacier_ext_intersect = glacier_ext * 0
5✔
1092
    glacier_ext_intersect[jj[pok], ii[pok]] = 1
5✔
1093

1094
    # Distance from border mask - Scipy does the job
1095
    dx = gdir.grid.dx
5✔
1096
    dis_from_border = 1 + glacier_ext_intersect - glacier_ext
5✔
1097
    dis_from_border = distance_transform_edt(dis_from_border) * dx
5✔
1098

1099
    # Slope
1100
    glen_n = cfg.PARAMS['glen_n']
5✔
1101
    sy, sx = np.gradient(topo_smoothed, dx, dx)
5✔
1102
    slope = np.arctan(np.sqrt(sy**2 + sx**2))
5✔
1103
    min_slope = np.deg2rad(cfg.PARAMS['distributed_inversion_min_slope'])
5✔
1104
    slope_factor = utils.clip_array(slope, min_slope, np.pi/2)
5✔
1105
    slope_factor = 1 / slope_factor**(glen_n / (glen_n+2))
5✔
1106

1107
    aspect = np.arctan2(-sx, sy)
5✔
1108
    aspect[aspect < 0] += 2 * np.pi
5✔
1109

1110
    with ncDataset(grids_file, 'a') as nc:
5✔
1111

1112
        vn = 'glacier_ext_erosion'
5✔
1113
        if vn in nc.variables:
5✔
1114
            v = nc.variables[vn]
1✔
1115
        else:
1116
            v = nc.createVariable(vn, 'i1', ('y', 'x', ))
5✔
1117
        v.units = '-'
5✔
1118
        v.long_name = 'Glacier exterior with binary erosion method'
5✔
1119
        v[:] = glacier_ext
5✔
1120

1121
        vn = 'ice_divides'
5✔
1122
        if vn in nc.variables:
5✔
1123
            v = nc.variables[vn]
1✔
1124
        else:
1125
            v = nc.createVariable(vn, 'i1', ('y', 'x', ))
5✔
1126
        v.units = '-'
5✔
1127
        v.long_name = 'Glacier ice divides'
5✔
1128
        v[:] = glacier_ext_intersect
5✔
1129

1130
        vn = 'slope'
5✔
1131
        if vn in nc.variables:
5✔
1132
            v = nc.variables[vn]
1✔
1133
        else:
1134
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
5✔
1135
        v.units = 'rad'
5✔
1136
        v.long_name = 'Local slope based on smoothed topography'
5✔
1137
        v[:] = slope
5✔
1138

1139
        vn = 'aspect'
5✔
1140
        if vn in nc.variables:
5✔
1141
            v = nc.variables[vn]
1✔
1142
        else:
1143
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
5✔
1144
        v.units = 'rad'
5✔
1145
        v.long_name = 'Local aspect based on smoothed topography'
5✔
1146
        v[:] = aspect
5✔
1147

1148
        vn = 'slope_factor'
5✔
1149
        if vn in nc.variables:
5✔
1150
            v = nc.variables[vn]
1✔
1151
        else:
1152
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
5✔
1153
        v.units = '-'
5✔
1154
        v.long_name = 'Slope factor as defined in Farinotti et al 2009'
5✔
1155
        v[:] = slope_factor
5✔
1156

1157
        vn = 'dis_from_border'
5✔
1158
        if vn in nc.variables:
5✔
1159
            v = nc.variables[vn]
1✔
1160
        else:
1161
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
5✔
1162
        v.units = 'm'
5✔
1163
        v.long_name = 'Distance from glacier boundaries'
5✔
1164
        v[:] = dis_from_border
5✔
1165

1166

1167
def _all_inflows(cls, cl):
9✔
1168
    """Find all centerlines flowing into the centerline examined.
1169

1170
    Parameters
1171
    ----------
1172
    cls : list
1173
        all centerlines of the examined glacier
1174
    cline : Centerline
1175
        centerline to control
1176

1177
    Returns
1178
    -------
1179
    list of strings of centerlines
1180
    """
1181

1182
    ixs = [str(cls.index(cl.inflows[i])) for i in range(len(cl.inflows))]
×
1183
    for cl in cl.inflows:
×
1184
        ixs.extend(_all_inflows(cls, cl))
×
1185
    return ixs
×
1186

1187

1188
@entity_task(log)
9✔
1189
def gridded_mb_attributes(gdir):
9✔
1190
    """Adds mass balance related attributes to the gridded data file.
1191

1192
    This could be useful for distributed ice thickness models.
1193
    The raster data are added to the gridded_data file.
1194

1195
    Parameters
1196
    ----------
1197
    gdir : :py:class:`oggm.GlacierDirectory`
1198
        where to write the data
1199
    """
1200
    from oggm.core.massbalance import LinearMassBalance, ConstantMassBalance
1✔
1201
    from oggm.core.centerlines import line_inflows
1✔
1202

1203
    # Get the input data
1204
    with ncDataset(gdir.get_filepath('gridded_data')) as nc:
1✔
1205
        topo_2d = nc.variables['topo_smoothed'][:]
1✔
1206
        glacier_mask_2d = nc.variables['glacier_mask'][:]
1✔
1207
        glacier_mask_2d = glacier_mask_2d == 1
1✔
1208
        catchment_mask_2d = glacier_mask_2d * np.NaN
1✔
1209

1210
    cls = gdir.read_pickle('centerlines')
1✔
1211

1212
    # Catchment areas
1213
    cis = gdir.read_pickle('geometries')['catchment_indices']
1✔
1214
    for j, ci in enumerate(cis):
1✔
1215
        catchment_mask_2d[tuple(ci.T)] = j
1✔
1216

1217
    # Make everything we need flat
1218
    catchment_mask = catchment_mask_2d[glacier_mask_2d].astype(int)
1✔
1219
    topo = topo_2d[glacier_mask_2d]
1✔
1220

1221
    # Prepare the distributed mass balance data
1222
    rho = cfg.PARAMS['ice_density']
1✔
1223
    dx2 = gdir.grid.dx ** 2
1✔
1224

1225
    # Linear
1226
    def to_minimize(ela_h):
1✔
1227
        mbmod = LinearMassBalance(ela_h[0])
1✔
1228
        smb = mbmod.get_annual_mb(heights=topo)
1✔
1229
        return np.sum(smb)**2
1✔
1230
    ela_h = optimization.minimize(to_minimize, [0.], method='Powell')
1✔
1231
    mbmod = LinearMassBalance(float(ela_h['x']))
1✔
1232
    lin_mb_on_z = mbmod.get_annual_mb(heights=topo) * cfg.SEC_IN_YEAR * rho
1✔
1233
    if not np.isclose(np.sum(lin_mb_on_z), 0, atol=10):
1✔
1234
        raise RuntimeError('Spec mass balance should be zero but is: {}'
×
1235
                           .format(np.sum(lin_mb_on_z)))
1236

1237
    # Normal OGGM (a bit tweaked)
1238
    df = gdir.read_json('local_mustar')
1✔
1239

1240
    def to_minimize(mu_star):
1✔
1241
        mbmod = ConstantMassBalance(gdir, mu_star=mu_star, bias=0,
1✔
1242
                                    check_calib_params=False,
1243
                                    y0=df['t_star'])
1244
        smb = mbmod.get_annual_mb(heights=topo)
1✔
1245
        return np.sum(smb)**2
1✔
1246
    mu_star = optimization.minimize(to_minimize, [0.], method='Powell')
1✔
1247
    mbmod = ConstantMassBalance(gdir, mu_star=float(mu_star['x']), bias=0,
1✔
1248
                                check_calib_params=False,
1249
                                y0=df['t_star'])
1250
    oggm_mb_on_z = mbmod.get_annual_mb(heights=topo) * cfg.SEC_IN_YEAR * rho
1✔
1251
    if not np.isclose(np.sum(oggm_mb_on_z), 0, atol=10):
1✔
1252
        raise RuntimeError('Spec mass balance should be zero but is: {}'
×
1253
                           .format(np.sum(oggm_mb_on_z)))
1254

1255
    # Altitude based mass balance
1256
    catch_area_above_z = topo * np.NaN
1✔
1257
    lin_mb_above_z = topo * np.NaN
1✔
1258
    oggm_mb_above_z = topo * np.NaN
1✔
1259
    for i, h in enumerate(topo):
1✔
1260
        catch_area_above_z[i] = np.sum(topo >= h) * dx2
1✔
1261
        lin_mb_above_z[i] = np.sum(lin_mb_on_z[topo >= h]) * dx2
1✔
1262
        oggm_mb_above_z[i] = np.sum(oggm_mb_on_z[topo >= h]) * dx2
1✔
1263

1264
    # Hardest part - MB per catchment
1265
    catchment_area = topo * np.NaN
1✔
1266
    lin_mb_above_z_on_catch = topo * np.NaN
1✔
1267
    oggm_mb_above_z_on_catch = topo * np.NaN
1✔
1268

1269
    # First, find all inflows indices and min altitude per catchment
1270
    inflows = []
1✔
1271
    lowest_h = []
1✔
1272
    for i, cl in enumerate(cls):
1✔
1273
        lowest_h.append(np.min(topo[catchment_mask == i]))
1✔
1274
        inflows.append([cls.index(l) for l in line_inflows(cl, keep=False)])
1✔
1275

1276
    for i, (catch_id, h) in enumerate(zip(catchment_mask, topo)):
1✔
1277

1278
        if h == np.min(topo):
1✔
1279
            t = 1
1✔
1280

1281
        # Find the catchment area of the point itself by eliminating points
1282
        # below the point altitude. We assume we keep all of them first,
1283
        # then remove those we don't want
1284
        sel_catchs = inflows[catch_id].copy()
1✔
1285
        for catch in inflows[catch_id]:
1✔
1286
            if h >= lowest_h[catch]:
1✔
1287
                for cc in np.append(inflows[catch], catch):
1✔
1288
                    try:
1✔
1289
                        sel_catchs.remove(cc)
1✔
1290
                    except ValueError:
×
1291
                        pass
×
1292

1293
        # At the very least we need or own catchment
1294
        sel_catchs.append(catch_id)
1✔
1295

1296
        # Then select all the catchment points
1297
        sel_points = np.isin(catchment_mask, sel_catchs)
1✔
1298

1299
        # And keep the ones above our altitude
1300
        sel_points = sel_points & (topo >= h)
1✔
1301

1302
        # Compute
1303
        lin_mb_above_z_on_catch[i] = np.sum(lin_mb_on_z[sel_points]) * dx2
1✔
1304
        oggm_mb_above_z_on_catch[i] = np.sum(oggm_mb_on_z[sel_points]) * dx2
1✔
1305
        catchment_area[i] = np.sum(sel_points) * dx2
1✔
1306

1307
    # Make 2D again
1308
    def _fill_2d_like(data):
1✔
1309
        out = topo_2d * np.NaN
1✔
1310
        out[glacier_mask_2d] = data
1✔
1311
        return out
1✔
1312

1313
    catchment_area = _fill_2d_like(catchment_area)
1✔
1314
    catch_area_above_z = _fill_2d_like(catch_area_above_z)
1✔
1315
    lin_mb_above_z = _fill_2d_like(lin_mb_above_z)
1✔
1316
    oggm_mb_above_z = _fill_2d_like(oggm_mb_above_z)
1✔
1317
    lin_mb_above_z_on_catch = _fill_2d_like(lin_mb_above_z_on_catch)
1✔
1318
    oggm_mb_above_z_on_catch = _fill_2d_like(oggm_mb_above_z_on_catch)
1✔
1319

1320
    # Save to file
1321
    with ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
1✔
1322

1323
        vn = 'catchment_area'
1✔
1324
        if vn in nc.variables:
1✔
1325
            v = nc.variables[vn]
×
1326
        else:
1327
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
1✔
1328
        v.units = 'm^2'
1✔
1329
        v.long_name = 'Catchment area above point'
1✔
1330
        v.description = ('This is a very crude method: just the area above '
1✔
1331
                         'the points elevation on glacier.')
1332
        v[:] = catch_area_above_z
1✔
1333

1334
        vn = 'catchment_area_on_catch'
1✔
1335
        if vn in nc.variables:
1✔
1336
            v = nc.variables[vn]
×
1337
        else:
1338
            v = nc.createVariable(vn, 'f4', ('y', 'x',))
1✔
1339
        v.units = 'm^2'
1✔
1340
        v.long_name = 'Catchment area above point on flowline catchments'
1✔
1341
        v.description = ('Uses the catchments masks of the flowlines to '
1✔
1342
                         'compute the area above the altitude of the given '
1343
                         'point.')
1344
        v[:] = catchment_area
1✔
1345

1346
        vn = 'lin_mb_above_z'
1✔
1347
        if vn in nc.variables:
1✔
1348
            v = nc.variables[vn]
×
1349
        else:
1350
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
1✔
1351
        v.units = 'kg/year'
1✔
1352
        v.long_name = 'MB above point from linear MB model, without catchments'
1✔
1353
        v.description = ('Mass balance cumulated above the altitude of the'
1✔
1354
                         'point, hence in unit of flux. Note that it is '
1355
                         'a coarse approximation of the real flux. '
1356
                         'The mass balance model is a simple linear function'
1357
                         'of altitude.')
1358
        v[:] = lin_mb_above_z
1✔
1359

1360
        vn = 'lin_mb_above_z_on_catch'
1✔
1361
        if vn in nc.variables:
1✔
1362
            v = nc.variables[vn]
×
1363
        else:
1364
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
1✔
1365
        v.units = 'kg/year'
1✔
1366
        v.long_name = 'MB above point from linear MB model, with catchments'
1✔
1367
        v.description = ('Mass balance cumulated above the altitude of the'
1✔
1368
                         'point in a flowline catchment, hence in unit of '
1369
                         'flux. Note that it is a coarse approximation of the '
1370
                         'real flux. The mass balance model is a simple '
1371
                         'linear function of altitude.')
1372
        v[:] = lin_mb_above_z_on_catch
1✔
1373

1374
        vn = 'oggm_mb_above_z'
1✔
1375
        if vn in nc.variables:
1✔
1376
            v = nc.variables[vn]
×
1377
        else:
1378
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
1✔
1379
        v.units = 'kg/year'
1✔
1380
        v.long_name = 'MB above point from OGGM MB model, without catchments'
1✔
1381
        v.description = ('Mass balance cumulated above the altitude of the'
1✔
1382
                         'point, hence in unit of flux. Note that it is '
1383
                         'a coarse approximation of the real flux. '
1384
                         'The mass balance model is a calibrated temperature '
1385
                         'index model like OGGM.')
1386
        v[:] = oggm_mb_above_z
1✔
1387

1388
        vn = 'oggm_mb_above_z_on_catch'
1✔
1389
        if vn in nc.variables:
1✔
1390
            v = nc.variables[vn]
×
1391
        else:
1392
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
1✔
1393
        v.units = 'kg/year'
1✔
1394
        v.long_name = 'MB above point from OGGM MB model, with catchments'
1✔
1395
        v.description = ('Mass balance cumulated above the altitude of the'
1✔
1396
                         'point in a flowline catchment, hence in unit of '
1397
                         'flux. Note that it is a coarse approximation of the '
1398
                         'real flux. The mass balance model is a calibrated '
1399
                         'temperature index model like OGGM.')
1400
        v[:] = oggm_mb_above_z_on_catch
1✔
1401

1402

1403
def merged_glacier_masks(gdir, geometry):
9✔
1404
    """Makes a gridded mask of a merged glacier outlines.
1405

1406
    This is a simplified version of glacier_masks. We don't need fancy
1407
    corrections or smoothing here: The flowlines for the actual model run are
1408
    based on a proper call of glacier_masks.
1409

1410
    This task is only to get outlines etc. for visualization!
1411

1412
    Parameters
1413
    ----------
1414
    gdir : :py:class:`oggm.GlacierDirectory`
1415
        where to write the data
1416
    geometry: shapely.geometry.multipolygon.MultiPolygon
1417
        united outlines of the merged glaciers
1418
    """
1419

1420
    # open srtm tif-file:
1421
    dem = read_geotiff_dem(gdir)
1✔
1422

1423
    if np.min(dem) == np.max(dem):
1✔
1424
        raise RuntimeError('({}) min equal max in the DEM.'
×
1425
                           .format(gdir.rgi_id))
1426

1427
    # Clip topography to 0 m a.s.l.
1428
    utils.clip_min(dem, 0, out=dem)
1✔
1429

1430
    # Interpolate shape to a regular path
1431
    glacier_poly_hr = tolist(geometry)
1✔
1432

1433
    for nr, poly in enumerate(glacier_poly_hr):
1✔
1434
        # transform geometry to map
1435
        _geometry = salem.transform_geometry(poly, to_crs=gdir.grid.proj)
1✔
1436
        glacier_poly_hr[nr] = _interp_polygon(_geometry, gdir.grid.dx)
1✔
1437

1438
    glacier_poly_hr = shpg.MultiPolygon(glacier_poly_hr)
1✔
1439

1440
    # Transform geometry into grid coordinates
1441
    # It has to be in pix center coordinates because of how skimage works
1442
    def proj(x, y):
1✔
1443
        grid = gdir.grid.center_grid
1✔
1444
        return grid.transform(x, y, crs=grid.proj)
1✔
1445

1446
    glacier_poly_hr = shapely.ops.transform(proj, glacier_poly_hr)
1✔
1447

1448
    # simple trick to correct invalid polys:
1449
    # http://stackoverflow.com/questions/20833344/
1450
    # fix-invalid-polygon-python-shapely
1451
    glacier_poly_hr = glacier_poly_hr.buffer(0)
1✔
1452
    if not glacier_poly_hr.is_valid:
1✔
1453
        raise RuntimeError('This glacier geometry is not valid.')
×
1454

1455
    # Rounded geometry to nearest nearest pix
1456
    # I can not use _polyg
1457
    # glacier_poly_pix = _polygon_to_pix(glacier_poly_hr)
1458
    def project(x, y):
1✔
1459
        return np.rint(x).astype(np.int64), np.rint(y).astype(np.int64)
1✔
1460

1461
    glacier_poly_pix = shapely.ops.transform(project, glacier_poly_hr)
1✔
1462
    glacier_poly_pix_iter = tolist(glacier_poly_pix)
1✔
1463

1464
    # Compute the glacier mask (currently: center pixels + touched)
1465
    nx, ny = gdir.grid.nx, gdir.grid.ny
1✔
1466
    glacier_mask = np.zeros((ny, nx), dtype=np.uint8)
1✔
1467
    glacier_ext = np.zeros((ny, nx), dtype=np.uint8)
1✔
1468

1469
    for poly in glacier_poly_pix_iter:
1✔
1470
        (x, y) = poly.exterior.xy
1✔
1471
        glacier_mask[skdraw.polygon(np.array(y), np.array(x))] = 1
1✔
1472
        for gint in poly.interiors:
1✔
1473
            x, y = tuple2int(gint.xy)
1✔
1474
            glacier_mask[skdraw.polygon(y, x)] = 0
1✔
1475
            glacier_mask[y, x] = 0  # on the nunataks, no
1✔
1476
        x, y = tuple2int(poly.exterior.xy)
1✔
1477
        glacier_mask[y, x] = 1
1✔
1478
        glacier_ext[y, x] = 1
1✔
1479

1480
    # Last sanity check based on the masked dem
1481
    tmp_max = np.max(dem[np.where(glacier_mask == 1)])
1✔
1482
    tmp_min = np.min(dem[np.where(glacier_mask == 1)])
1✔
1483
    if tmp_max < (tmp_min + 1):
1✔
1484
        raise RuntimeError('({}) min equal max in the masked DEM.'
×
1485
                           .format(gdir.rgi_id))
1486

1487
    # write out the grids in the netcdf file
1488
    with GriddedNcdfFile(gdir, reset=True) as nc:
1✔
1489

1490
        v = nc.createVariable('topo', 'f4', ('y', 'x', ), zlib=True)
1✔
1491
        v.units = 'm'
1✔
1492
        v.long_name = 'DEM topography'
1✔
1493
        v[:] = dem
1✔
1494

1495
        v = nc.createVariable('glacier_mask', 'i1', ('y', 'x', ), zlib=True)
1✔
1496
        v.units = '-'
1✔
1497
        v.long_name = 'Glacier mask'
1✔
1498
        v[:] = glacier_mask
1✔
1499

1500
        v = nc.createVariable('glacier_ext', 'i1', ('y', 'x', ), zlib=True)
1✔
1501
        v.units = '-'
1✔
1502
        v.long_name = 'Glacier external boundaries'
1✔
1503
        v[:] = glacier_ext
1✔
1504

1505
        # add some meta stats and close
1506
        nc.max_h_dem = np.nanmax(dem)
1✔
1507
        nc.min_h_dem = np.nanmin(dem)
1✔
1508
        dem_on_g = dem[np.where(glacier_mask)]
1✔
1509
        nc.max_h_glacier = np.max(dem_on_g)
1✔
1510
        nc.min_h_glacier = np.min(dem_on_g)
1✔
1511

1512
    geometries = dict()
1✔
1513
    geometries['polygon_hr'] = glacier_poly_hr
1✔
1514
    geometries['polygon_pix'] = glacier_poly_pix
1✔
1515
    geometries['polygon_area'] = geometry.area
1✔
1516
    gdir.write_pickle(geometries, 'geometries')
1✔
1517

1518

1519
@entity_task(log)
9✔
1520
def gridded_data_var_to_geotiff(gdir, varname, fname=None):
9✔
1521
    """Writes a NetCDF variable to a georeferenced geotiff file.
1522

1523
    The geotiff file will be written in the gdir directory.
1524

1525
    Parameters
1526
    ----------
1527
    gdir : :py:class:`oggm.GlacierDirectory`
1528
        where to write the data
1529
    varname : str
1530
        variable name in gridded_data.nc
1531
    fname : str
1532
        output file name (should end with `tif`), default is `varname.tif`
1533
    """
1534

1535
    # Assign the output path
1536
    if fname is None:
1✔
1537
        fname = varname+'.tif'
1✔
1538
    outpath = os.path.join(gdir.dir, fname)
1✔
1539

1540
    # Locate gridded_data.nc file and read it
1541
    nc_path = gdir.get_filepath('gridded_data')
1✔
1542
    with xr.open_dataset(nc_path) as ds:
1✔
1543

1544
        # Prepare the profile dict
1545
        crs = ds.pyproj_srs
1✔
1546
        var = ds[varname]
1✔
1547
        grid = ds.salem.grid
1✔
1548
        data = var.data
1✔
1549
        data_type = data.dtype.name
1✔
1550
        height, width = var.data.shape
1✔
1551
        dx, dy = grid.dx, grid.dy
1✔
1552
        x0, y0 = grid.x0, grid.y0
1✔
1553

1554
        profile = {'driver': 'GTiff', 'dtype': data_type, 'nodata': None,
1✔
1555
                   'width': width, 'height': height, 'count': 1,
1556
                   'crs': crs,
1557
                   'transform': rasterio.Affine(dx, 0.0, x0,
1558
                                                0.0, dy, y0),
1559
                   'tiled': True,
1560
                   'interleave': 'band'}
1561

1562
        # Write GeoTiff file
1563
        with rasterio.open(outpath, 'w', **profile) as dst:
1✔
1564
            dst.write(data, 1)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc