• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 5869096654

15 Aug 2023 02:59PM UTC coverage: 89.79% (+0.3%) from 89.481%
5869096654

push

github

web-flow
Merge pull request #851 from geo-engine/pg-dataset-metadata-mapping

Pg dataset metadata mapping

1982 of 1982 new or added lines in 9 files covered. (100.0%)

106148 of 118218 relevant lines covered (89.79%)

61246.75 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

67.12
/services/src/datasets/postgres.rs
1
use super::listing::Provenance;
2
use crate::api::model::datatypes::{DatasetId, DatasetName, LayerId};
3
use crate::api::model::responses::datasets::DatasetIdAndName;
4
use crate::api::model::services::AddDataset;
5
use crate::contexts::PostgresDb;
6
use crate::datasets::listing::ProvenanceOutput;
7
use crate::datasets::listing::{DatasetListOptions, DatasetListing, DatasetProvider};
8
use crate::datasets::storage::DATASET_DB_LAYER_PROVIDER_ID;
9
use crate::datasets::storage::DATASET_DB_ROOT_COLLECTION_ID;
10
use crate::datasets::storage::{
11
    Dataset, DatasetDb, DatasetStore, DatasetStorer, MetaDataDefinition,
12
};
13
use crate::datasets::upload::FileId;
14
use crate::datasets::upload::{Upload, UploadDb, UploadId};
15
use crate::error::{self, Result};
16
use crate::layers::layer::CollectionItem;
17
use crate::layers::layer::Layer;
18
use crate::layers::layer::LayerCollection;
19
use crate::layers::layer::LayerCollectionListOptions;
20
use crate::layers::layer::LayerListing;
21
use crate::layers::layer::ProviderLayerCollectionId;
22
use crate::layers::layer::ProviderLayerId;
23
use crate::layers::listing::{DatasetLayerCollectionProvider, LayerCollectionId};
24
use crate::layers::storage::INTERNAL_PROVIDER_ID;
25
use crate::projects::Symbology;
26
use crate::util::operators::source_operator_from_dataset;
27
use crate::workflows::workflow::Workflow;
28
use async_trait::async_trait;
29
use bb8_postgres::tokio_postgres::tls::{MakeTlsConnect, TlsConnect};
30
use bb8_postgres::tokio_postgres::Socket;
31
use geoengine_datatypes::dataset::DataId;
32
use geoengine_datatypes::primitives::RasterQueryRectangle;
33
use geoengine_datatypes::primitives::VectorQueryRectangle;
34
use geoengine_datatypes::util::Identifier;
35
use geoengine_operators::engine::{
36
    MetaData, MetaDataProvider, RasterResultDescriptor, TypedResultDescriptor,
37
    VectorResultDescriptor,
38
};
39
use geoengine_operators::mock::MockDatasetDataSourceLoadingInfo;
40
use geoengine_operators::source::{GdalLoadingInfo, OgrSourceDataset};
41
use postgres_types::{FromSql, ToSql};
42
use std::collections::HashMap;
43
use std::str::FromStr;
44
use uuid::Uuid;
45

46
impl<Tls> DatasetDb for PostgresDb<Tls>
47
where
48
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
49
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
50
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
51
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
52
{
53
}
54

55
#[async_trait]
56
impl<Tls> DatasetProvider for PostgresDb<Tls>
57
where
58
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
59
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
60
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
61
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
62
{
63
    async fn list_datasets(&self, _options: DatasetListOptions) -> Result<Vec<DatasetListing>> {
2✔
64
        // TODO: use options
65

66
        let conn = self.conn_pool.get().await?;
2✔
67
        let stmt = conn
2✔
68
            .prepare(
2✔
69
                "
2✔
70
            SELECT 
2✔
71
                id,
2✔
72
                name,
2✔
73
                display_name,
2✔
74
                description,
2✔
75
                tags,
2✔
76
                source_operator,
2✔
77
                result_descriptor,
2✔
78
                symbology
2✔
79
            FROM 
2✔
80
                datasets;",
2✔
81
            )
2✔
82
            .await?;
1✔
83

84
        let rows = conn.query(&stmt, &[]).await?;
2✔
85

86
        Ok(rows
2✔
87
            .iter()
2✔
88
            .map(|row| {
3✔
89
                Result::<DatasetListing>::Ok(DatasetListing {
3✔
90
                    id: row.get(0),
3✔
91
                    name: row.get(1),
3✔
92
                    display_name: row.get(2),
3✔
93
                    description: row.get(3),
3✔
94
                    tags: row.get::<_, Option<_>>(4).unwrap_or_default(),
3✔
95
                    source_operator: row.get(5),
3✔
96
                    result_descriptor: row.get(6),
3✔
97
                    symbology: row.get(7),
3✔
98
                })
3✔
99
            })
3✔
100
            .filter_map(Result::ok)
2✔
101
            .collect())
2✔
102
    }
4✔
103

104
    async fn load_dataset(&self, dataset: &DatasetId) -> Result<Dataset> {
10✔
105
        let conn = self.conn_pool.get().await?;
10✔
106
        let stmt = conn
10✔
107
            .prepare(
10✔
108
                "
10✔
109
            SELECT
10✔
110
                id,
10✔
111
                name,
10✔
112
                display_name,
10✔
113
                description,
10✔
114
                result_descriptor,
10✔
115
                source_operator,
10✔
116
                symbology,
10✔
117
                provenance
10✔
118
            FROM 
10✔
119
                datasets
10✔
120
            WHERE 
10✔
121
                id = $1
10✔
122
            LIMIT 
10✔
123
                1",
10✔
124
            )
10✔
125
            .await?;
8✔
126

127
        // TODO: throw proper dataset does not exist
128
        let row = conn.query_one(&stmt, &[dataset]).await?;
10✔
129

130
        Ok(Dataset {
6✔
131
            id: row.get(0),
6✔
132
            name: row.get(1),
6✔
133
            display_name: row.get(2),
6✔
134
            description: row.get(3),
6✔
135
            result_descriptor: row.get(4),
6✔
136
            source_operator: row.get(5),
6✔
137
            symbology: row.get(6),
6✔
138
            provenance: row.get(7),
6✔
139
        })
6✔
140
    }
20✔
141

142
    async fn load_provenance(&self, dataset: &DatasetId) -> Result<ProvenanceOutput> {
3✔
143
        let conn = self.conn_pool.get().await?;
4✔
144

145
        let stmt = conn
3✔
146
            .prepare(
3✔
147
                "
3✔
148
            SELECT 
3✔
149
                provenance 
3✔
150
            FROM 
3✔
151
                datasets
3✔
152
            WHERE
3✔
153
                id = $1;",
3✔
154
            )
3✔
155
            .await?;
10✔
156

157
        let row = conn.query_one(&stmt, &[dataset]).await?;
3✔
158

159
        let provenances: Vec<Provenance> = row.get(0);
3✔
160

3✔
161
        Ok(ProvenanceOutput {
3✔
162
            data: (*dataset).into(),
3✔
163
            provenance: Some(provenances),
3✔
164
        })
3✔
165
    }
6✔
166

167
    async fn resolve_dataset_name_to_id(&self, dataset_name: &DatasetName) -> Result<DatasetId> {
32✔
168
        let conn = self.conn_pool.get().await?;
32✔
169

170
        let stmt = conn
32✔
171
            .prepare(
32✔
172
                "SELECT id
32✔
173
                FROM datasets
32✔
174
                WHERE name = $1::\"DatasetName\"",
32✔
175
            )
32✔
176
            .await?;
27✔
177

178
        let row = conn.query_one(&stmt, &[&dataset_name]).await?;
32✔
179

180
        Ok(row.get(0))
32✔
181
    }
64✔
182
}
183

184
#[async_trait]
185
impl<Tls>
186
    MetaDataProvider<MockDatasetDataSourceLoadingInfo, VectorResultDescriptor, VectorQueryRectangle>
187
    for PostgresDb<Tls>
188
where
189
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
190
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
191
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
192
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
193
{
194
    async fn meta_data(
×
195
        &self,
×
196
        _id: &DataId,
×
197
    ) -> geoengine_operators::util::Result<
×
198
        Box<
×
199
            dyn MetaData<
×
200
                MockDatasetDataSourceLoadingInfo,
×
201
                VectorResultDescriptor,
×
202
                VectorQueryRectangle,
×
203
            >,
×
204
        >,
×
205
    > {
×
206
        Err(geoengine_operators::error::Error::NotYetImplemented)
×
207
    }
×
208
}
209

210
#[async_trait]
211
impl<Tls> MetaDataProvider<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>
212
    for PostgresDb<Tls>
213
where
214
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
215
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
216
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
217
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
218
{
219
    async fn meta_data(
4✔
220
        &self,
4✔
221
        id: &DataId,
4✔
222
    ) -> geoengine_operators::util::Result<
4✔
223
        Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
4✔
224
    > {
4✔
225
        let id = id
4✔
226
            .internal()
4✔
227
            .ok_or(geoengine_operators::error::Error::DataIdTypeMissMatch)?;
4✔
228

229
        let conn = self.conn_pool.get().await.map_err(|e| {
4✔
230
            geoengine_operators::error::Error::MetaData {
×
231
                source: Box::new(e),
×
232
            }
×
233
        })?;
4✔
234
        let stmt = conn
4✔
235
            .prepare(
4✔
236
                "
4✔
237
        SELECT
4✔
238
            meta_data
4✔
239
        FROM
4✔
240
            datasets
4✔
241
        WHERE
4✔
242
            id = $1",
4✔
243
            )
4✔
244
            .await
4✔
245
            .map_err(|e| geoengine_operators::error::Error::MetaData {
4✔
246
                source: Box::new(e),
×
247
            })?;
4✔
248

249
        let row = conn.query_one(&stmt, &[&id]).await.map_err(|e| {
5✔
250
            geoengine_operators::error::Error::MetaData {
×
251
                source: Box::new(e),
×
252
            }
×
253
        })?;
4✔
254

255
        let meta_data: MetaDataDefinition = row.get("meta_data");
4✔
256

257
        let MetaDataDefinition::OgrMetaData(meta_data) = meta_data else {
4✔
258
            return Err(geoengine_operators::error::Error::MetaData {
×
259
                source: Box::new(geoengine_operators::error::Error::InvalidType {
×
260
                    expected: "OgrMetaData".to_string(),
×
261
                    found: meta_data.type_name().to_string(),
×
262
                }),
×
263
            });
×
264
        };
265

266
        Ok(Box::new(meta_data))
4✔
267
    }
8✔
268
}
269

270
#[async_trait]
271
impl<Tls> MetaDataProvider<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>
272
    for PostgresDb<Tls>
273
where
274
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
275
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
276
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
277
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
278
{
279
    async fn meta_data(
25✔
280
        &self,
25✔
281
        id: &DataId,
25✔
282
    ) -> geoengine_operators::util::Result<
25✔
283
        Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
25✔
284
    > {
25✔
285
        let id = id
25✔
286
            .internal()
25✔
287
            .ok_or(geoengine_operators::error::Error::DataIdTypeMissMatch)?;
25✔
288

289
        let conn = self.conn_pool.get().await.map_err(|e| {
25✔
290
            geoengine_operators::error::Error::MetaData {
×
291
                source: Box::new(e),
×
292
            }
×
293
        })?;
25✔
294
        let stmt = conn
25✔
295
            .prepare(
25✔
296
                "
25✔
297
            SELECT
25✔
298
                meta_data
25✔
299
            FROM
25✔
300
               datasets
25✔
301
            WHERE
25✔
302
                id = $1;",
25✔
303
            )
25✔
304
            .await
147✔
305
            .map_err(|e| geoengine_operators::error::Error::MetaData {
25✔
306
                source: Box::new(e),
×
307
            })?;
25✔
308

309
        let row = conn.query_one(&stmt, &[&id]).await.map_err(|e| {
25✔
310
            geoengine_operators::error::Error::MetaData {
×
311
                source: Box::new(e),
×
312
            }
×
313
        })?;
25✔
314

315
        let meta_data: MetaDataDefinition = row.get(0);
25✔
316

25✔
317
        Ok(match meta_data {
25✔
318
            MetaDataDefinition::GdalMetaDataRegular(m) => Box::new(m),
18✔
319
            MetaDataDefinition::GdalStatic(m) => Box::new(m),
3✔
320
            MetaDataDefinition::GdalMetaDataList(m) => Box::new(m),
3✔
321
            MetaDataDefinition::GdalMetadataNetCdfCf(m) => Box::new(m),
1✔
322
            _ => return Err(geoengine_operators::error::Error::DataIdTypeMissMatch),
×
323
        })
324
    }
50✔
325
}
326

327
#[async_trait]
328
pub trait PostgresStorable<Tls>: Send + Sync
329
where
330
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
331
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
332
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
333
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
334
{
335
    fn to_typed_metadata(&self) -> Result<DatasetMetaData>;
336
}
337

338
pub struct DatasetMetaData<'m> {
339
    meta_data: &'m MetaDataDefinition,
340
    result_descriptor: crate::api::model::operators::TypedResultDescriptor,
341
}
342

343
impl<Tls> DatasetStorer for PostgresDb<Tls>
344
where
345
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
346
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
347
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
348
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
349
{
350
    type StorageType = Box<dyn PostgresStorable<Tls>>;
351
}
352

353
impl<Tls> PostgresStorable<Tls> for MetaDataDefinition
354
where
355
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
356
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
357
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
358
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
359
{
360
    fn to_typed_metadata(&self) -> Result<DatasetMetaData> {
68✔
361
        match self {
68✔
362
            MetaDataDefinition::MockMetaData(d) => Ok(DatasetMetaData {
×
363
                meta_data: self,
×
364
                result_descriptor: TypedResultDescriptor::from(d.result_descriptor.clone()).into(),
×
365
            }),
×
366
            MetaDataDefinition::OgrMetaData(d) => Ok(DatasetMetaData {
11✔
367
                meta_data: self,
11✔
368
                result_descriptor: TypedResultDescriptor::from(d.result_descriptor.clone()).into(),
11✔
369
            }),
11✔
370
            MetaDataDefinition::GdalMetaDataRegular(d) => Ok(DatasetMetaData {
50✔
371
                meta_data: self,
50✔
372
                result_descriptor: TypedResultDescriptor::from(d.result_descriptor.clone()).into(),
50✔
373
            }),
50✔
374
            MetaDataDefinition::GdalStatic(d) => Ok(DatasetMetaData {
3✔
375
                meta_data: self,
3✔
376
                result_descriptor: TypedResultDescriptor::from(d.result_descriptor.clone()).into(),
3✔
377
            }),
3✔
378
            MetaDataDefinition::GdalMetadataNetCdfCf(d) => Ok(DatasetMetaData {
1✔
379
                meta_data: self,
1✔
380
                result_descriptor: TypedResultDescriptor::from(d.result_descriptor.clone()).into(),
1✔
381
            }),
1✔
382
            MetaDataDefinition::GdalMetaDataList(d) => Ok(DatasetMetaData {
3✔
383
                meta_data: self,
3✔
384
                result_descriptor: TypedResultDescriptor::from(d.result_descriptor.clone()).into(),
3✔
385
            }),
3✔
386
        }
387
    }
68✔
388
}
389

390
#[async_trait]
391
impl<Tls> DatasetStore for PostgresDb<Tls>
392
where
393
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
394
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
395
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
396
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
397
{
398
    async fn add_dataset(
68✔
399
        &self,
68✔
400
        dataset: AddDataset,
68✔
401
        meta_data: Box<dyn PostgresStorable<Tls>>,
68✔
402
    ) -> Result<DatasetIdAndName> {
68✔
403
        let id = DatasetId::new();
68✔
404
        let name = dataset.name.unwrap_or_else(|| DatasetName {
68✔
405
            namespace: None,
12✔
406
            name: id.to_string(),
12✔
407
        });
68✔
408

68✔
409
        Self::check_namespace(&name)?;
68✔
410

411
        let typed_meta_data = meta_data.to_typed_metadata()?;
68✔
412

413
        let conn = self.conn_pool.get().await?;
68✔
414

415
        // unique constraint on `id` checks if dataset with same id exists
416

417
        let stmt = conn
68✔
418
            .prepare(
68✔
419
                "
68✔
420
                INSERT INTO datasets (
68✔
421
                    id,
68✔
422
                    name,
68✔
423
                    display_name,
68✔
424
                    description,
68✔
425
                    source_operator,
68✔
426
                    result_descriptor,
68✔
427
                    meta_data,
68✔
428
                    symbology,
68✔
429
                    provenance
68✔
430
                )
68✔
431
                VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)",
68✔
432
            )
68✔
433
            .await?;
9,033✔
434

435
        conn.execute(
68✔
436
            &stmt,
68✔
437
            &[
68✔
438
                &id,
68✔
439
                &name,
68✔
440
                &dataset.display_name,
68✔
441
                &dataset.description,
68✔
442
                &dataset.source_operator,
68✔
443
                &typed_meta_data.result_descriptor,
68✔
444
                typed_meta_data.meta_data,
68✔
445
                &dataset.symbology,
68✔
446
                &dataset.provenance,
68✔
447
            ],
68✔
448
        )
68✔
449
        .await?;
68✔
450

451
        Ok(DatasetIdAndName { id, name })
68✔
452
    }
136✔
453

454
    async fn delete_dataset(&self, dataset_id: DatasetId) -> Result<()> {
4✔
455
        let conn = self.conn_pool.get().await?;
4✔
456

457
        let stmt = conn.prepare("DELETE FROM datasets WHERE id = $1;").await?;
4✔
458

459
        conn.execute(&stmt, &[&dataset_id]).await?;
4✔
460

461
        Ok(())
4✔
462
    }
8✔
463

464
    fn wrap_meta_data(&self, meta: MetaDataDefinition) -> Self::StorageType {
66✔
465
        Box::new(meta)
66✔
466
    }
66✔
467
}
468

469
#[async_trait]
470
impl<Tls> UploadDb for PostgresDb<Tls>
471
where
472
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
473
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
474
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
475
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
476
{
477
    async fn load_upload(&self, upload: UploadId) -> Result<Upload> {
4✔
478
        // TODO: check permissions
479

480
        let conn = self.conn_pool.get().await?;
4✔
481

482
        let stmt = conn
4✔
483
            .prepare("SELECT id, files FROM uploads WHERE id = $1;")
4✔
484
            .await?;
3✔
485

486
        let row = conn.query_one(&stmt, &[&upload]).await?;
4✔
487

488
        Ok(Upload {
4✔
489
            id: row.get(0),
4✔
490
            files: row
4✔
491
                .get::<_, Vec<FileUpload>>(1)
4✔
492
                .into_iter()
4✔
493
                .map(Into::into)
4✔
494
                .collect(),
4✔
495
        })
4✔
496
    }
8✔
497

498
    async fn create_upload(&self, upload: Upload) -> Result<()> {
6✔
499
        let conn = self.conn_pool.get().await?;
9✔
500

501
        let stmt = conn
6✔
502
            .prepare("INSERT INTO uploads (id, files) VALUES ($1, $2)")
6✔
503
            .await?;
25✔
504

505
        conn.execute(
6✔
506
            &stmt,
6✔
507
            &[
6✔
508
                &upload.id,
6✔
509
                &upload
6✔
510
                    .files
6✔
511
                    .iter()
6✔
512
                    .map(FileUpload::from)
6✔
513
                    .collect::<Vec<_>>(),
6✔
514
            ],
6✔
515
        )
6✔
516
        .await?;
6✔
517
        Ok(())
6✔
518
    }
12✔
519
}
520

521
#[async_trait]
522
impl<Tls> DatasetLayerCollectionProvider for PostgresDb<Tls>
523
where
524
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
525
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
526
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
527
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
528
{
529
    async fn load_dataset_layer_collection(
×
530
        &self,
×
531
        collection: &LayerCollectionId,
×
532
        options: LayerCollectionListOptions,
×
533
    ) -> Result<LayerCollection> {
×
534
        let conn = self.conn_pool.get().await?;
×
535

536
        let stmt = conn
×
537
            .prepare(
×
538
                "
×
539
                SELECT 
×
540
                    concat(id, '') AS id, 
×
541
                    display_name, 
×
542
                    description
×
543
                FROM 
×
544
                   datasets
×
545
                ORDER BY name ASC
×
546
                LIMIT $1
×
547
                OFFSET $2;",
×
548
            )
×
549
            .await?;
×
550

551
        let rows = conn
×
552
            .query(
×
553
                &stmt,
×
554
                &[&i64::from(options.limit), &i64::from(options.offset)],
×
555
            )
×
556
            .await?;
×
557

558
        let items = rows
×
559
            .iter()
×
560
            .map(|row| {
×
561
                Result::<CollectionItem>::Ok(CollectionItem::Layer(LayerListing {
×
562
                    id: ProviderLayerId {
×
563
                        provider_id: DATASET_DB_LAYER_PROVIDER_ID,
×
564
                        layer_id: LayerId(row.get(0)),
×
565
                    },
×
566
                    name: row.get(1),
×
567
                    description: row.get(2),
×
568
                    properties: vec![],
×
569
                }))
×
570
            })
×
571
            .filter_map(Result::ok)
×
572
            .collect();
×
573

×
574
        Ok(LayerCollection {
×
575
            id: ProviderLayerCollectionId {
×
576
                provider_id: INTERNAL_PROVIDER_ID,
×
577
                collection_id: collection.clone(),
×
578
            },
×
579
            name: "Datasets".to_string(),
×
580
            description: "Basic Layers for all Datasets".to_string(),
×
581
            items,
×
582
            entry_label: None,
×
583
            properties: vec![],
×
584
        })
×
585
    }
×
586

587
    async fn get_dataset_root_layer_collection_id(&self) -> Result<LayerCollectionId> {
×
588
        Ok(LayerCollectionId(DATASET_DB_ROOT_COLLECTION_ID.to_string()))
×
589
    }
×
590

591
    async fn load_dataset_layer(&self, id: &LayerId) -> Result<Layer> {
×
592
        let conn = self.conn_pool.get().await?;
×
593

594
        let stmt = conn
×
595
            .prepare(
×
596
                "
×
597
                SELECT 
×
598
                    name, 
×
599
                    display_name,
×
600
                    description,
×
601
                    source_operator,
×
602
                    symbology
×
603
                FROM 
×
604
                    datasets
×
605
                WHERE id = $1;",
×
606
            )
×
607
            .await?;
×
608

609
        let row = conn
×
610
            .query_one(
611
                &stmt,
×
612
                &[
×
613
                    &Uuid::from_str(&id.0).map_err(|_| error::Error::IdStringMustBeUuid {
×
614
                        found: id.0.clone(),
×
615
                    })?,
×
616
                ],
617
            )
618
            .await?;
×
619

620
        let name: DatasetName = row.get(0);
×
621
        let display_name: String = row.get(1);
×
622
        let description: String = row.get(2);
×
623
        let source_operator: String = row.get(3);
×
624
        let symbology: Option<Symbology> = row.get(4);
×
625

626
        let operator = source_operator_from_dataset(&source_operator, &name.into())?;
×
627

628
        Ok(Layer {
×
629
            id: ProviderLayerId {
×
630
                provider_id: DATASET_DB_LAYER_PROVIDER_ID,
×
631
                layer_id: id.clone(),
×
632
            },
×
633
            name: display_name,
×
634
            description,
×
635
            workflow: Workflow { operator },
×
636
            symbology,
×
637
            properties: vec![],
×
638
            metadata: HashMap::new(),
×
639
        })
×
640
    }
×
641
}
642

643
#[derive(Debug, Clone, ToSql, FromSql)]
78✔
644
pub struct FileUpload {
645
    pub id: FileId,
646
    pub name: String,
647
    pub byte_size: i64,
648
}
649

650
impl From<crate::datasets::upload::FileUpload> for FileUpload {
651
    fn from(upload: crate::datasets::upload::FileUpload) -> Self {
×
652
        Self {
×
653
            id: upload.id,
×
654
            name: upload.name,
×
655
            byte_size: upload.byte_size as i64,
×
656
        }
×
657
    }
×
658
}
659

660
impl From<&crate::datasets::upload::FileUpload> for FileUpload {
661
    fn from(upload: &crate::datasets::upload::FileUpload) -> Self {
15✔
662
        Self {
15✔
663
            id: upload.id,
15✔
664
            name: upload.name.clone(),
15✔
665
            byte_size: upload.byte_size as i64,
15✔
666
        }
15✔
667
    }
15✔
668
}
669

670
impl From<FileUpload> for crate::datasets::upload::FileUpload {
671
    fn from(upload: FileUpload) -> Self {
12✔
672
        Self {
12✔
673
            id: upload.id,
12✔
674
            name: upload.name,
12✔
675
            byte_size: upload.byte_size as u64,
12✔
676
        }
12✔
677
    }
12✔
678
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc