• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

lutraconsulting / MDAL / 18500207831

14 Oct 2025 02:28PM UTC coverage: 89.585%. First build
18500207831

push

github

uclaros
remove whitespace (astyle)

0 of 1 new or added line in 1 file covered. (0.0%)

9711 of 10840 relevant lines covered (89.58%)

73325.64 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

80.72
/mdal/frmts/mdal_hdf5.cpp
1
/*
2
 MDAL - Mesh Data Abstraction Library (MIT License)
3
 Copyright (C) 2018 Lutra Consulting Limited
4
*/
5

6
#include "mdal_hdf5.hpp"
7
#include <cstring>
8
#include <algorithm>
9

10
HdfFile::HdfFile( const std::string &path, HdfFile::Mode mode )
321✔
11
  : mPath( path )
321✔
12
{
13
  switch ( mode )
321✔
14
  {
15
    case HdfFile::ReadOnly:
317✔
16
      if ( H5Fis_hdf5( mPath.c_str() ) > 0 )
317✔
17
        d = std::make_shared< Handle >( H5Fopen( path.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT ) );
122✔
18
      break;
317✔
19
    case HdfFile::ReadWrite:
3✔
20
      if ( H5Fis_hdf5( mPath.c_str() ) > 0 )
3✔
21
        d = std::make_shared< Handle >( H5Fopen( path.c_str(), H5F_ACC_RDWR, H5P_DEFAULT ) );
3✔
22
      break;
3✔
23
    case HdfFile::Create:
1✔
24
      d = std::make_shared< Handle >( H5Fcreate( path.c_str(), H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT ) );
1✔
25
      break;
1✔
26
  }
27
}
321✔
28

29
HdfFile::~HdfFile() = default;
327✔
30

31
bool HdfFile::isValid() const { return d && ( d->id >= 0 ); }
518✔
32

33
hid_t HdfFile::id() const { return d->id; }
×
34

35
std::string HdfFile::filePath() const
×
36
{
37
  return mPath;
×
38
}
39

40
HdfGroup::HdfGroup( HdfFile::SharedHandle file, const std::string &path )
836✔
41
{
42
  d = std::make_shared< Handle >( H5Gopen( file->id, path.c_str() ) );
836✔
43
  mFile = std::move( file );
836✔
44
}
836✔
45

46
HdfGroup::HdfGroup( std::shared_ptr<Handle> handle, HdfFile::SharedHandle file )
5✔
47
  : mFile( std::move( file ) )
5✔
48
  , d( std::move( handle ) )
5✔
49
{
50
}
5✔
51

52
bool HdfGroup::isValid() const { return d->id >= 0; }
423✔
53

54
hid_t HdfGroup::id() const { return d->id; }
21✔
55

56
hid_t HdfGroup::file_id() const { return H5Iget_file_id( d->id ); }
×
57

58
std::string HdfGroup::name() const
1,206✔
59
{
60
  char name[HDF_MAX_NAME];
61
  H5Iget_name( d->id, name, HDF_MAX_NAME );
1,206✔
62
  return std::string( name );
1,206✔
63
}
64

65
std::vector<std::string> HdfGroup::groups() const { return objects( H5G_GROUP ); }
376✔
66

67
std::vector<std::string> HdfGroup::datasets() const { return objects( H5G_DATASET ); }
85✔
68

69
std::vector<std::string> HdfGroup::objects() const { return objects( H5G_UNKNOWN ); }
×
70

71
std::string HdfGroup::childPath( const std::string &childName ) const { return name() + "/" + childName; }
2,322✔
72

73
std::vector<std::string> HdfGroup::objects( H5G_obj_t type ) const
461✔
74
{
75
  std::vector<std::string> lst;
461✔
76

77
  hsize_t nobj;
78
  H5Gget_num_objs( d->id, &nobj );
461✔
79
  for ( hsize_t i = 0; i < nobj; ++i )
1,498✔
80
  {
81
    if ( type == H5G_UNKNOWN || H5Gget_objtype_by_idx( d->id, i ) == type )
1,037✔
82
    {
83
      char name[HDF_MAX_NAME];
84
      H5Gget_objname_by_idx( d->id, i, name, ( size_t )HDF_MAX_NAME );
741✔
85
      lst.push_back( std::string( name ) );
741✔
86
    }
87
  }
88
  return lst;
922✔
89
}
×
90

91
HdfAttribute::HdfAttribute( hid_t obj_id, const std::string &attr_name, HdfDataType type )
17✔
92
  : m_objId( obj_id )
17✔
93
  , mType( type )
17✔
94
{
95
  std::vector<hsize_t> dimsSingle = {1};
17✔
96
  HdfDataspace dsc( dimsSingle );
17✔
97
  d = std::make_shared< Handle >( H5Acreate2( obj_id, attr_name.c_str(), type.id(), dsc.id(), H5P_DEFAULT, H5P_DEFAULT ) );
17✔
98
}
17✔
99

100
HdfAttribute::HdfAttribute( hid_t obj_id, const std::string &attr_name )
271✔
101
  : m_objId( obj_id ), m_name( attr_name )
271✔
102
{
103
  d = std::make_shared< Handle >( H5Aopen( obj_id, attr_name.c_str(), H5P_DEFAULT ) );
271✔
104
}
271✔
105

106
HdfAttribute::~HdfAttribute() = default;
288✔
107

108
bool HdfAttribute::isValid() const { return d->id >= 0; }
163✔
109

110
hid_t HdfAttribute::id() const { return d->id; }
174✔
111

112
std::string HdfAttribute::readString() const
154✔
113
{
114
  HdfDataType datatype( H5Aget_type( id() ) );
154✔
115
  char name[HDF_MAX_NAME + 1];
116
  std::memset( name, '\0', HDF_MAX_NAME + 1 );
154✔
117
  herr_t status = H5Aread( d->id, datatype.id(), name );
154✔
118
  if ( status < 0 )
154✔
119
  {
120
    return std::string();
2✔
121
  }
122
  std::string res( name );
152✔
123
  res = MDAL::trim( res );
152✔
124
  return res;
152✔
125
}
154✔
126

127
double HdfAttribute::readDouble() const
20✔
128
{
129
  HdfDataType datatype( H5Aget_type( id() ) );
20✔
130
  double value;
131
  herr_t status = H5Aread( d->id, H5T_NATIVE_DOUBLE, &value );
20✔
132
  if ( status < 0 )
20✔
133
  {
134
    return std::numeric_limits<double>::quiet_NaN();
×
135
  }
136
  return value;
20✔
137
}
20✔
138

139
void HdfAttribute::write( const std::string &value )
9✔
140
{
141
  if ( !isValid() || !mType.isValid() )
9✔
142
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Write failed due to invalid data" );
×
143

144
  // make sure you do not store more than it is possible
145
  std::vector<char> buf( HDF_MAX_NAME + 1, '\0' );
9✔
146
  size_t size = value.size() < HDF_MAX_NAME  ? value.size() : HDF_MAX_NAME;
9✔
147
  memcpy( buf.data(), value.c_str(), size );
9✔
148

149
  if ( H5Awrite( d->id, mType.id(), buf.data() ) < 0 )
9✔
150
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Could not write data" );
×
151
}
9✔
152

153
void HdfAttribute::write( int value )
8✔
154
{
155
  if ( !isValid() || !mType.isValid() )
8✔
156
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Write failed due to invalid data" );
×
157

158
  if ( H5Awrite( d->id, mType.id(), &value ) < 0 )
8✔
159
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Could not write data" );
×
160
}
8✔
161

162
HdfDataset::HdfDataset( HdfFile::SharedHandle file, const std::string &path, HdfDataType dtype, size_t nItems )
2✔
163
  : mFile( file ),
2✔
164
    mType( dtype )
2✔
165
{
166
  // Crete dataspace for attribute
167
  std::vector<hsize_t> dimsSingle = {nItems};
2✔
168
  HdfDataspace dsc( dimsSingle );
2✔
169

170
  d = std::make_shared< Handle >( H5Dcreate2( file->id, path.c_str(), dtype.id(), dsc.id(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT ) );
2✔
171
}
2✔
172

173

174
HdfDataset::HdfDataset( HdfFile::SharedHandle file, const std::string &path, HdfDataType dtype, HdfDataspace dataspace )
16✔
175
  : mFile( file ),
16✔
176
    mType( dtype )
16✔
177
{
178
  d = std::make_shared< Handle >( H5Dcreate2( file->id, path.c_str(), dtype.id(), dataspace.id(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT ) );
16✔
179
}
16✔
180

181
HdfDataset::HdfDataset( HdfFile::SharedHandle file, const std::string &path )
749✔
182
  : mFile( file ),
749✔
183
    d( std::make_shared< Handle >( H5Dopen2( file->id, path.c_str(), H5P_DEFAULT ) ) )
749✔
184
{
185
}
749✔
186

187
HdfDataset::~HdfDataset() = default;
2,591✔
188

189
bool HdfDataset::isValid() const { return  d && d->id >= 0; }
246✔
190

191
hid_t HdfDataset::id() const { return d->id; }
×
192

193
std::vector<hsize_t> HdfDataset::dims() const
850✔
194
{
195
  hid_t sid = H5Dget_space( d->id );
850✔
196
  if ( sid == H5I_INVALID_HID )
850✔
197
    return std::vector<hsize_t>();
×
198

199
  int ndims = H5Sget_simple_extent_ndims( sid );
850✔
200
  if ( ndims < 0 )
850✔
201
  {
202
    H5Sclose( sid );
×
NEW
203
    return std::vector<hsize_t>();
×
204
  }
205

206
  std::vector<hsize_t> ret( static_cast<size_t>( ndims ) );
850✔
207
  H5Sget_simple_extent_dims( sid, ret.data(), nullptr );
850✔
208
  H5Sclose( sid );
850✔
209
  return ret;
850✔
210
}
850✔
211

212
hsize_t HdfDataset::elementCount() const
489✔
213
{
214
  hsize_t count = 1;
489✔
215
  for ( hsize_t dsize : dims() )
1,192✔
216
    count *= dsize;
1,192✔
217
  return count;
489✔
218
}
219

220
H5T_class_t HdfDataset::type() const
×
221
{
222
  if ( mType.isValid() )
×
223
    return H5Tget_class( mType.id() );
×
224
  else
225
  {
226
    HdfDataType dt( H5Dget_type( d->id ) );
×
227
    return H5Tget_class( dt.id() );
×
228
  }
×
229
}
230

231
std::vector<uchar> HdfDataset::readArrayUint8( const std::vector<hsize_t> &offsets, const std::vector<hsize_t> &counts ) const { return readArray<uchar>( H5T_NATIVE_UINT8, offsets, counts ); }
4✔
232

233
std::vector<float> HdfDataset::readArray( const std::vector<hsize_t> &offsets, const std::vector<hsize_t> &counts ) const { return readArray<float>( H5T_NATIVE_FLOAT, offsets, counts ); }
15✔
234

235
std::vector<double> HdfDataset::readArrayDouble( const std::vector<hsize_t> &offsets, const std::vector<hsize_t> &counts ) const { return readArray<double>( H5T_NATIVE_DOUBLE, offsets, counts ); }
990✔
236

237
std::vector<int> HdfDataset::readArrayInt( const std::vector<hsize_t> &offsets, const std::vector<hsize_t> &counts ) const { return readArray<int>( H5T_NATIVE_INT, offsets, counts ); }
×
238

239
std::vector<uchar> HdfDataset::readArrayUint8() const { return readArray<uchar>( H5T_NATIVE_UINT8 ); }
×
240

241
std::vector<float> HdfDataset::readArray() const { return readArray<float>( H5T_NATIVE_FLOAT ); }
190✔
242

243
std::vector<double> HdfDataset::readArrayDouble() const { return readArray<double>( H5T_NATIVE_DOUBLE ); }
118✔
244

245
std::vector<int> HdfDataset::readArrayInt() const { return readArray<int>( H5T_NATIVE_INT ); }
73✔
246

247
std::vector<std::string> HdfDataset::readArrayString() const
12✔
248
{
249
  std::vector<std::string> ret;
12✔
250

251
  HdfDataType datatype = HdfDataType::createString();
12✔
252
  std::vector<HdfString> arr = readArray<HdfString>( datatype.id() );
12✔
253

254
  for ( const HdfString &str : arr )
1,053✔
255
  {
256
    std::string dat = std::string( str.data );
1,041✔
257
    ret.push_back( MDAL::trim( dat ) );
1,041✔
258
  }
1,041✔
259

260
  return ret;
24✔
261
}
12✔
262

263
float HdfDataset::readFloat() const
×
264
{
265
  if ( elementCount() != 1 )
×
266
  {
267
    MDAL::Log::debug( "Not scalar!" );
×
268
    return 0;
×
269
  }
270

271
  float value;
272
  herr_t status = H5Dread( d->id, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &value );
×
273
  if ( status < 0 )
×
274
  {
275
    MDAL::Log::debug( "Failed to read data!" );
×
276
    return 0;
×
277
  }
278
  return value;
×
279
}
280

281
void HdfDataset::write( std::vector<float> &value )
12✔
282
{
283
  if ( !isValid() || !mType.isValid() )
12✔
284
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Write failed due to invalid data" );
×
285

286
  // Write float array to dataset
287
  if ( H5Dwrite( d->id, mType.id(), H5S_ALL, H5S_ALL, H5P_DEFAULT, value.data() ) < 0 )
12✔
288
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Could not write float array to dataset" );
×
289
}
12✔
290

291
void HdfDataset::write( float value )
1✔
292
{
293
  if ( !isValid() || !mType.isValid() )
1✔
294
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Write failed due to invalid data" );
×
295

296
  // Write float array to dataset
297
  if ( H5Dwrite( d->id, mType.id(), H5S_ALL, H5S_ALL, H5P_DEFAULT, &value ) < 0 )
1✔
298
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Could not write float to dataset" );
×
299
}
1✔
300

301
void HdfDataset::write( std::vector<double> &value )
4✔
302
{
303
  if ( !isValid() || !mType.isValid() )
4✔
304
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Write failed due to invalid data" );
×
305

306
  // Write double array to dataset.
307
  if ( H5Dwrite( d->id, mType.id(), H5S_ALL, H5S_ALL, H5P_DEFAULT, value.data() ) < 0 )
4✔
308
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Could not write double array to dataset" );
×
309
}
4✔
310

311
void HdfDataset::write( const std::string &value )
1✔
312
{
313
  if ( !isValid() || !mType.isValid() )
1✔
314
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Write failed due to invalid data" );
×
315

316
  // make sure you do not store more than it is possible
317
  std::vector<char> buf( HDF_MAX_NAME + 1, '\0' );
1✔
318
  size_t size = value.size() < HDF_MAX_NAME  ? value.size() : HDF_MAX_NAME;
1✔
319
  memcpy( buf.data(), value.c_str(), size );
1✔
320

321
  // Write string to dataset.
322
  if ( H5Dwrite( d->id, mType.id(), H5S_ALL, H5S_ALL, H5P_DEFAULT, buf.data() ) < 0 )
1✔
323
    throw MDAL::Error( MDAL_Status::Err_FailToWriteToDisk, "Could not write string to dataset" );
×
324
}
1✔
325

326
std::string HdfDataset::readString() const
36✔
327
{
328
  if ( elementCount() != 1 )
36✔
329
  {
330
    MDAL::Log::debug( "Not scalar!" );
×
331
    return std::string();
×
332
  }
333

334
  char name[HDF_MAX_NAME];
335
  HdfDataType datatype = HdfDataType::createString();
36✔
336
  herr_t status = H5Dread( d->id, datatype.id(), H5S_ALL, H5S_ALL, H5P_DEFAULT, name );
36✔
337
  if ( status < 0 )
36✔
338
  {
339
    MDAL::Log::debug( "Failed to read data!" );
×
340
    return std::string();
×
341
  }
342
  return std::string( name );
36✔
343
}
36✔
344

345
HdfDataspace::HdfDataspace( const std::vector<hsize_t> &dims )
1,048✔
346
{
347
  d = std::make_shared< Handle >( H5Screate_simple(
3,144✔
348
                                    static_cast<int>( dims.size() ),
1,048✔
349
                                    dims.data(),
350
                                    dims.data()
351
                                  ) );
1,048✔
352
}
1,048✔
353

354

355
HdfDataspace::HdfDataspace( hid_t dataset )
1,013✔
356
{
357
  if ( dataset >= 0 )
1,013✔
358
    d = std::make_shared< Handle >( H5Dget_space( dataset ) );
1,009✔
359
}
1,013✔
360

361
HdfDataspace::~HdfDataspace() = default;
2,081✔
362

363
void HdfDataspace::selectHyperslab( hsize_t start, hsize_t count )
1,009✔
364
{
365
  // this function works only for 1D arrays
366
  assert( H5Sget_simple_extent_ndims( d->id ) == 1 );
1,009✔
367

368
  herr_t status = H5Sselect_hyperslab( d->id, H5S_SELECT_SET, &start, NULL, &count, NULL );
1,009✔
369
  if ( status < 0 )
1,009✔
370
  {
371
    MDAL::Log::debug( "Failed to select 1D hyperslab!" );
×
372
  }
373
}
1,009✔
374

375
void HdfDataspace::selectHyperslab( const std::vector<hsize_t> offsets,
1,009✔
376
                                    const std::vector<hsize_t> counts )
377
{
378
  assert( H5Sget_simple_extent_ndims( d->id ) == static_cast<int>( offsets.size() ) );
1,009✔
379
  assert( offsets.size() == counts.size() );
1,009✔
380

381
  herr_t status = H5Sselect_hyperslab( d->id,
1,009✔
382
                                       H5S_SELECT_SET,
383
                                       offsets.data(),
384
                                       NULL,
385
                                       counts.data(),
386
                                       NULL );
387
  if ( status < 0 )
1,009✔
388
  {
389
    MDAL::Log::debug( "Failed to select 1D hyperslab!" );
×
390
  }
391
}
1,009✔
392

393
bool HdfDataspace::isValid() const { return d->id >= 0; }
×
394

395
hid_t HdfDataspace::id() const { return d->id; }
2,053✔
396

397

398
HdfDataType::HdfDataType() = default;
1,126✔
399

400
HdfDataType::HdfDataType( hid_t type, bool isNativeType )
253✔
401
{
402
  if ( isNativeType )
253✔
403
    mNativeId = type;
199✔
404
  else
405
    d = std::make_shared< Handle >( type );
54✔
406
}
253✔
407

408
HdfDataType HdfDataType::createString( int size )
54✔
409
{
410
  assert( size > 0 );
54✔
411
  if ( size > HDF_MAX_NAME )
54✔
412
    size = HDF_MAX_NAME;
×
413

414
  hid_t atype = H5Tcopy( H5T_C_S1 );
54✔
415
  H5Tset_size( atype, static_cast<size_t>( size ) );
54✔
416
  H5Tset_strpad( atype, H5T_STR_NULLTERM );
54✔
417
  return HdfDataType( atype, false );
54✔
418
}
419

420
HdfDataType::~HdfDataType() = default;
3,158✔
421

422
bool HdfDataType::isValid() const
35✔
423
{
424
  if ( d )
35✔
425
    return d->id >= 0;
10✔
426
  else
427
    return mNativeId >= 0;
25✔
428
}
429

430
hid_t HdfDataType::id() const
272✔
431
{
432
  if ( d )
272✔
433
    return d->id;
68✔
434
  else
435
    return mNativeId;
204✔
436
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc