• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 5740825225

02 Aug 2023 03:32PM UTC coverage: 88.958% (+0.05%) from 88.913%
5740825225

push

github

web-flow
Merge pull request #844 from geo-engine/pg-symbology-mapping

Pg symbology mapping

610 of 610 new or added lines in 10 files covered. (100.0%)

106476 of 119693 relevant lines covered (88.96%)

60487.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

68.18
/services/src/datasets/postgres.rs
1
use std::collections::HashMap;
2
use std::str::FromStr;
3

4
use crate::api::model::datatypes::{DatasetId, DatasetName, LayerId};
5
use crate::api::model::responses::datasets::DatasetIdAndName;
6
use crate::api::model::services::AddDataset;
7
use crate::contexts::PostgresDb;
8
use crate::datasets::listing::ProvenanceOutput;
9
use crate::datasets::storage::DATASET_DB_LAYER_PROVIDER_ID;
10
use crate::datasets::storage::DATASET_DB_ROOT_COLLECTION_ID;
11
use crate::datasets::storage::{
12
    Dataset, DatasetDb, DatasetStore, DatasetStorer, MetaDataDefinition,
13
};
14
use crate::datasets::upload::FileId;
15
use crate::datasets::upload::{Upload, UploadDb, UploadId};
16
use crate::error::{self, Result};
17
use crate::layers::layer::CollectionItem;
18
use crate::layers::layer::Layer;
19
use crate::layers::layer::LayerCollection;
20
use crate::layers::layer::LayerCollectionListOptions;
21
use crate::layers::layer::LayerListing;
22
use crate::layers::layer::ProviderLayerCollectionId;
23
use crate::layers::layer::ProviderLayerId;
24

25
use crate::datasets::listing::{DatasetListOptions, DatasetListing, DatasetProvider};
26
use crate::layers::listing::{DatasetLayerCollectionProvider, LayerCollectionId};
27
use crate::layers::storage::INTERNAL_PROVIDER_ID;
28
use crate::projects::Symbology;
29
use crate::util::operators::source_operator_from_dataset;
30
use crate::workflows::workflow::Workflow;
31
use async_trait::async_trait;
32

33
use bb8_postgres::tokio_postgres::tls::{MakeTlsConnect, TlsConnect};
34
use bb8_postgres::tokio_postgres::Socket;
35

36
use geoengine_datatypes::dataset::DataId;
37
use geoengine_datatypes::primitives::RasterQueryRectangle;
38
use geoengine_datatypes::primitives::VectorQueryRectangle;
39
use geoengine_datatypes::util::Identifier;
40
use geoengine_operators::engine::{
41
    MetaData, MetaDataProvider, RasterResultDescriptor, StaticMetaData, TypedResultDescriptor,
42
    VectorResultDescriptor,
43
};
44

45
use geoengine_operators::mock::MockDatasetDataSourceLoadingInfo;
46

47
use geoengine_operators::source::{GdalLoadingInfo, OgrSourceDataset};
48

49
use postgres_types::{FromSql, ToSql};
50
use uuid::Uuid;
51

52
use super::listing::Provenance;
53

54
impl<Tls> DatasetDb for PostgresDb<Tls>
55
where
56
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
57
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
58
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
59
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
60
{
61
}
62

63
#[async_trait]
64
impl<Tls> DatasetProvider for PostgresDb<Tls>
65
where
66
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
67
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
68
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
69
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
70
{
71
    async fn list_datasets(&self, _options: DatasetListOptions) -> Result<Vec<DatasetListing>> {
1✔
72
        // TODO: use options
73

74
        let conn = self.conn_pool.get().await?;
1✔
75
        let stmt = conn
1✔
76
            .prepare(
1✔
77
                "
1✔
78
            SELECT 
1✔
79
                id,
1✔
80
                name,
1✔
81
                display_name,
1✔
82
                description,
1✔
83
                tags,
1✔
84
                source_operator,
1✔
85
                result_descriptor,
1✔
86
                symbology
1✔
87
            FROM 
1✔
88
                datasets;",
1✔
89
            )
1✔
90
            .await?;
1✔
91

92
        let rows = conn.query(&stmt, &[]).await?;
1✔
93

94
        Ok(rows
1✔
95
            .iter()
1✔
96
            .map(|row| {
1✔
97
                Result::<DatasetListing>::Ok(DatasetListing {
1✔
98
                    id: row.get(0),
1✔
99
                    name: row.get(1),
1✔
100
                    display_name: row.get(2),
1✔
101
                    description: row.get(3),
1✔
102
                    tags: row.get::<_, Option<_>>(4).unwrap_or_default(),
1✔
103
                    source_operator: row.get(5),
1✔
104
                    result_descriptor: serde_json::from_value(row.get(6))?,
1✔
105
                    symbology: row.get(7),
1✔
106
                })
107
            })
1✔
108
            .filter_map(Result::ok)
1✔
109
            .collect())
1✔
110
    }
2✔
111

112
    async fn load_dataset(&self, dataset: &DatasetId) -> Result<Dataset> {
4✔
113
        let conn = self.conn_pool.get().await?;
5✔
114
        let stmt = conn
4✔
115
            .prepare(
4✔
116
                "
4✔
117
            SELECT
4✔
118
                id,
4✔
119
                name,
4✔
120
                display_name,
4✔
121
                description,
4✔
122
                result_descriptor,
4✔
123
                source_operator,
4✔
124
                symbology,
4✔
125
                provenance
4✔
126
            FROM 
4✔
127
                datasets
4✔
128
            WHERE 
4✔
129
                id = $1
4✔
130
            LIMIT 
4✔
131
                1",
4✔
132
            )
4✔
133
            .await?;
4✔
134

135
        // TODO: throw proper dataset does not exist
136
        let row = conn.query_one(&stmt, &[dataset]).await?;
4✔
137

138
        Ok(Dataset {
139
            id: row.get(0),
2✔
140
            name: row.get(1),
2✔
141
            display_name: row.get(2),
2✔
142
            description: row.get(3),
2✔
143
            result_descriptor: serde_json::from_value(row.get(4))?,
2✔
144
            source_operator: row.get(5),
2✔
145
            symbology: row.get(6),
2✔
146
            provenance: row.get(7),
2✔
147
        })
148
    }
8✔
149

150
    async fn load_provenance(&self, dataset: &DatasetId) -> Result<ProvenanceOutput> {
1✔
151
        let conn = self.conn_pool.get().await?;
1✔
152

153
        let stmt = conn
1✔
154
            .prepare(
1✔
155
                "
1✔
156
            SELECT 
1✔
157
                provenance 
1✔
158
            FROM 
1✔
159
                datasets
1✔
160
            WHERE
1✔
161
                id = $1;",
1✔
162
            )
1✔
163
            .await?;
1✔
164

165
        let row = conn.query_one(&stmt, &[dataset]).await?;
1✔
166

167
        let provenances: Vec<Provenance> = row.get(0);
1✔
168

1✔
169
        Ok(ProvenanceOutput {
1✔
170
            data: (*dataset).into(),
1✔
171
            provenance: Some(provenances),
1✔
172
        })
1✔
173
    }
2✔
174

175
    async fn resolve_dataset_name_to_id(&self, dataset_name: &DatasetName) -> Result<DatasetId> {
1✔
176
        let conn = self.conn_pool.get().await?;
1✔
177

178
        let stmt = conn
1✔
179
            .prepare(
1✔
180
                "SELECT id
1✔
181
                FROM datasets
1✔
182
                WHERE name = $1::\"DatasetName\"",
1✔
183
            )
1✔
184
            .await?;
1✔
185

186
        let row = conn.query_one(&stmt, &[&dataset_name]).await?;
1✔
187

188
        Ok(row.get(0))
1✔
189
    }
2✔
190
}
191

192
#[async_trait]
193
impl<Tls>
194
    MetaDataProvider<MockDatasetDataSourceLoadingInfo, VectorResultDescriptor, VectorQueryRectangle>
195
    for PostgresDb<Tls>
196
where
197
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
198
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
199
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
200
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
201
{
202
    async fn meta_data(
×
203
        &self,
×
204
        _id: &DataId,
×
205
    ) -> geoengine_operators::util::Result<
×
206
        Box<
×
207
            dyn MetaData<
×
208
                MockDatasetDataSourceLoadingInfo,
×
209
                VectorResultDescriptor,
×
210
                VectorQueryRectangle,
×
211
            >,
×
212
        >,
×
213
    > {
×
214
        Err(geoengine_operators::error::Error::NotYetImplemented)
×
215
    }
×
216
}
217

218
#[async_trait]
219
impl<Tls> MetaDataProvider<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>
220
    for PostgresDb<Tls>
221
where
222
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
223
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
224
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
225
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
226
{
227
    async fn meta_data(
2✔
228
        &self,
2✔
229
        id: &DataId,
2✔
230
    ) -> geoengine_operators::util::Result<
2✔
231
        Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
2✔
232
    > {
2✔
233
        let id = id
2✔
234
            .internal()
2✔
235
            .ok_or(geoengine_operators::error::Error::DataIdTypeMissMatch)?;
2✔
236

237
        let conn = self.conn_pool.get().await.map_err(|e| {
2✔
238
            geoengine_operators::error::Error::MetaData {
×
239
                source: Box::new(e),
×
240
            }
×
241
        })?;
2✔
242
        let stmt = conn
2✔
243
            .prepare(
2✔
244
                "
2✔
245
        SELECT
2✔
246
            meta_data
2✔
247
        FROM
2✔
248
            datasets
2✔
249
        WHERE
2✔
250
            id = $1",
2✔
251
            )
2✔
252
            .await
2✔
253
            .map_err(|e| geoengine_operators::error::Error::MetaData {
2✔
254
                source: Box::new(e),
×
255
            })?;
2✔
256

257
        let row = conn.query_one(&stmt, &[&id]).await.map_err(|e| {
2✔
258
            geoengine_operators::error::Error::MetaData {
×
259
                source: Box::new(e),
×
260
            }
×
261
        })?;
2✔
262

263
        let meta_data: StaticMetaData<
2✔
264
            OgrSourceDataset,
2✔
265
            VectorResultDescriptor,
2✔
266
            VectorQueryRectangle,
2✔
267
        > = serde_json::from_value(row.get(0))?;
2✔
268

269
        Ok(Box::new(meta_data))
2✔
270
    }
4✔
271
}
272

273
#[async_trait]
274
impl<Tls> MetaDataProvider<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>
275
    for PostgresDb<Tls>
276
where
277
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
278
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
279
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
280
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
281
{
282
    async fn meta_data(
4✔
283
        &self,
4✔
284
        id: &DataId,
4✔
285
    ) -> geoengine_operators::util::Result<
4✔
286
        Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
4✔
287
    > {
4✔
288
        let id = id
4✔
289
            .internal()
4✔
290
            .ok_or(geoengine_operators::error::Error::DataIdTypeMissMatch)?;
4✔
291

292
        let conn = self.conn_pool.get().await.map_err(|e| {
4✔
293
            geoengine_operators::error::Error::MetaData {
×
294
                source: Box::new(e),
×
295
            }
×
296
        })?;
4✔
297
        let stmt = conn
4✔
298
            .prepare(
4✔
299
                "
4✔
300
            SELECT
4✔
301
                meta_data
4✔
302
            FROM
4✔
303
               datasets
4✔
304
            WHERE
4✔
305
                id = $1;",
4✔
306
            )
4✔
307
            .await
4✔
308
            .map_err(|e| geoengine_operators::error::Error::MetaData {
4✔
309
                source: Box::new(e),
×
310
            })?;
4✔
311

312
        let row = conn.query_one(&stmt, &[&id]).await.map_err(|e| {
4✔
313
            geoengine_operators::error::Error::MetaData {
×
314
                source: Box::new(e),
×
315
            }
×
316
        })?;
4✔
317

318
        let meta_data: MetaDataDefinition = serde_json::from_value(row.get(0))?;
4✔
319

320
        Ok(match meta_data {
4✔
321
            MetaDataDefinition::GdalMetaDataRegular(m) => Box::new(m),
1✔
322
            MetaDataDefinition::GdalStatic(m) => Box::new(m),
1✔
323
            MetaDataDefinition::GdalMetaDataList(m) => Box::new(m),
1✔
324
            MetaDataDefinition::GdalMetadataNetCdfCf(m) => Box::new(m),
1✔
325
            _ => return Err(geoengine_operators::error::Error::DataIdTypeMissMatch),
×
326
        })
327
    }
8✔
328
}
329

330
#[async_trait]
331
pub trait PostgresStorable<Tls>: Send + Sync
332
where
333
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
334
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
335
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
336
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
337
{
338
    fn to_json(&self) -> Result<DatasetMetaDataJson>;
339
}
340

341
pub struct DatasetMetaDataJson {
342
    meta_data: serde_json::Value,
343
    result_descriptor: serde_json::Value,
344
}
345

346
impl<Tls> DatasetStorer for PostgresDb<Tls>
347
where
348
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
349
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
350
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
351
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
352
{
353
    type StorageType = Box<dyn PostgresStorable<Tls>>;
354
}
355

356
impl<Tls> PostgresStorable<Tls> for MetaDataDefinition
357
where
358
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
359
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
360
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
361
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
362
{
363
    fn to_json(&self) -> Result<DatasetMetaDataJson> {
10✔
364
        match self {
10✔
365
            MetaDataDefinition::MockMetaData(d) => Ok(DatasetMetaDataJson {
×
366
                meta_data: serde_json::to_value(self)?,
×
367
                result_descriptor: serde_json::to_value(TypedResultDescriptor::from(
×
368
                    d.result_descriptor.clone(),
×
369
                ))?,
×
370
            }),
371
            MetaDataDefinition::OgrMetaData(d) => Ok(DatasetMetaDataJson {
5✔
372
                meta_data: serde_json::to_value(self)?,
5✔
373
                result_descriptor: serde_json::to_value(TypedResultDescriptor::from(
5✔
374
                    d.result_descriptor.clone(),
5✔
375
                ))?,
5✔
376
            }),
377
            MetaDataDefinition::GdalMetaDataRegular(d) => Ok(DatasetMetaDataJson {
2✔
378
                meta_data: serde_json::to_value(self)?,
2✔
379
                result_descriptor: serde_json::to_value(TypedResultDescriptor::from(
2✔
380
                    d.result_descriptor.clone(),
2✔
381
                ))?,
2✔
382
            }),
383
            MetaDataDefinition::GdalStatic(d) => Ok(DatasetMetaDataJson {
1✔
384
                meta_data: serde_json::to_value(self)?,
1✔
385
                result_descriptor: serde_json::to_value(TypedResultDescriptor::from(
1✔
386
                    d.result_descriptor.clone(),
1✔
387
                ))?,
1✔
388
            }),
389
            MetaDataDefinition::GdalMetadataNetCdfCf(d) => Ok(DatasetMetaDataJson {
1✔
390
                meta_data: serde_json::to_value(self)?,
1✔
391
                result_descriptor: serde_json::to_value(TypedResultDescriptor::from(
1✔
392
                    d.result_descriptor.clone(),
1✔
393
                ))?,
1✔
394
            }),
395
            MetaDataDefinition::GdalMetaDataList(d) => Ok(DatasetMetaDataJson {
1✔
396
                meta_data: serde_json::to_value(self)?,
1✔
397
                result_descriptor: serde_json::to_value(TypedResultDescriptor::from(
1✔
398
                    d.result_descriptor.clone(),
1✔
399
                ))?,
1✔
400
            }),
401
        }
402
    }
10✔
403
}
404

405
#[async_trait]
406
impl<Tls> DatasetStore for PostgresDb<Tls>
407
where
408
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
409
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
410
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
411
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
412
{
413
    async fn add_dataset(
10✔
414
        &self,
10✔
415
        dataset: AddDataset,
10✔
416
        meta_data: Box<dyn PostgresStorable<Tls>>,
10✔
417
    ) -> Result<DatasetIdAndName> {
10✔
418
        let id = DatasetId::new();
10✔
419
        let name = dataset.name.unwrap_or_else(|| DatasetName {
10✔
420
            namespace: None,
5✔
421
            name: id.to_string(),
5✔
422
        });
10✔
423

10✔
424
        Self::check_namespace(&name)?;
10✔
425

426
        let meta_data_json = meta_data.to_json()?;
10✔
427

428
        let conn = self.conn_pool.get().await?;
10✔
429

430
        // unique constraint on `id` checks if dataset with same id exists
431

432
        let stmt = conn
10✔
433
            .prepare(
10✔
434
                "
10✔
435
                INSERT INTO datasets (
10✔
436
                    id,
10✔
437
                    name,
10✔
438
                    display_name,
10✔
439
                    description,
10✔
440
                    source_operator,
10✔
441
                    result_descriptor,
10✔
442
                    meta_data,
10✔
443
                    symbology,
10✔
444
                    provenance
10✔
445
                )
10✔
446
                VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)",
10✔
447
            )
10✔
448
            .await?;
208✔
449

450
        conn.execute(
10✔
451
            &stmt,
10✔
452
            &[
10✔
453
                &id,
10✔
454
                &name,
10✔
455
                &dataset.display_name,
10✔
456
                &dataset.description,
10✔
457
                &dataset.source_operator,
10✔
458
                &meta_data_json.result_descriptor,
10✔
459
                &meta_data_json.meta_data,
10✔
460
                &dataset.symbology,
10✔
461
                &dataset.provenance,
10✔
462
            ],
10✔
463
        )
10✔
464
        .await?;
10✔
465

466
        Ok(DatasetIdAndName { id, name })
10✔
467
    }
20✔
468

469
    async fn delete_dataset(&self, dataset_id: DatasetId) -> Result<()> {
2✔
470
        let conn = self.conn_pool.get().await?;
2✔
471

472
        let stmt = conn.prepare("DELETE FROM datasets WHERE id = $1;").await?;
2✔
473

474
        conn.execute(&stmt, &[&dataset_id]).await?;
2✔
475

476
        Ok(())
2✔
477
    }
4✔
478

479
    fn wrap_meta_data(&self, meta: MetaDataDefinition) -> Self::StorageType {
10✔
480
        Box::new(meta)
10✔
481
    }
10✔
482
}
483

484
#[async_trait]
485
impl<Tls> UploadDb for PostgresDb<Tls>
486
where
487
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
488
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
489
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
490
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
491
{
492
    async fn load_upload(&self, upload: UploadId) -> Result<Upload> {
1✔
493
        // TODO: check permissions
494

495
        let conn = self.conn_pool.get().await?;
1✔
496

497
        let stmt = conn
1✔
498
            .prepare("SELECT id, files FROM uploads WHERE id = $1;")
1✔
499
            .await?;
1✔
500

501
        let row = conn.query_one(&stmt, &[&upload]).await?;
1✔
502

503
        Ok(Upload {
1✔
504
            id: row.get(0),
1✔
505
            files: row
1✔
506
                .get::<_, Vec<FileUpload>>(1)
1✔
507
                .into_iter()
1✔
508
                .map(Into::into)
1✔
509
                .collect(),
1✔
510
        })
1✔
511
    }
2✔
512

513
    async fn create_upload(&self, upload: Upload) -> Result<()> {
1✔
514
        let conn = self.conn_pool.get().await?;
1✔
515

516
        let stmt = conn
1✔
517
            .prepare("INSERT INTO uploads (id, files) VALUES ($1, $2)")
1✔
518
            .await?;
4✔
519

520
        conn.execute(
1✔
521
            &stmt,
1✔
522
            &[
1✔
523
                &upload.id,
1✔
524
                &upload
1✔
525
                    .files
1✔
526
                    .iter()
1✔
527
                    .map(FileUpload::from)
1✔
528
                    .collect::<Vec<_>>(),
1✔
529
            ],
1✔
530
        )
1✔
531
        .await?;
1✔
532
        Ok(())
1✔
533
    }
2✔
534
}
535

536
#[async_trait]
537
impl<Tls> DatasetLayerCollectionProvider for PostgresDb<Tls>
538
where
539
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
540
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
541
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
542
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
543
{
544
    async fn load_dataset_layer_collection(
×
545
        &self,
×
546
        collection: &LayerCollectionId,
×
547
        options: LayerCollectionListOptions,
×
548
    ) -> Result<LayerCollection> {
×
549
        let conn = self.conn_pool.get().await?;
×
550

551
        let stmt = conn
×
552
            .prepare(
×
553
                "
×
554
                SELECT 
×
555
                    concat(id, '') AS id, 
×
556
                    display_name, 
×
557
                    description
×
558
                FROM 
×
559
                   datasets
×
560
                ORDER BY name ASC
×
561
                LIMIT $1
×
562
                OFFSET $2;",
×
563
            )
×
564
            .await?;
×
565

566
        let rows = conn
×
567
            .query(
×
568
                &stmt,
×
569
                &[&i64::from(options.limit), &i64::from(options.offset)],
×
570
            )
×
571
            .await?;
×
572

573
        let items = rows
×
574
            .iter()
×
575
            .map(|row| {
×
576
                Result::<CollectionItem>::Ok(CollectionItem::Layer(LayerListing {
×
577
                    id: ProviderLayerId {
×
578
                        provider_id: DATASET_DB_LAYER_PROVIDER_ID,
×
579
                        layer_id: LayerId(row.get(0)),
×
580
                    },
×
581
                    name: row.get(1),
×
582
                    description: row.get(2),
×
583
                    properties: vec![],
×
584
                }))
×
585
            })
×
586
            .filter_map(Result::ok)
×
587
            .collect();
×
588

×
589
        Ok(LayerCollection {
×
590
            id: ProviderLayerCollectionId {
×
591
                provider_id: INTERNAL_PROVIDER_ID,
×
592
                collection_id: collection.clone(),
×
593
            },
×
594
            name: "Datasets".to_string(),
×
595
            description: "Basic Layers for all Datasets".to_string(),
×
596
            items,
×
597
            entry_label: None,
×
598
            properties: vec![],
×
599
        })
×
600
    }
×
601

602
    async fn get_dataset_root_layer_collection_id(&self) -> Result<LayerCollectionId> {
×
603
        Ok(LayerCollectionId(DATASET_DB_ROOT_COLLECTION_ID.to_string()))
×
604
    }
×
605

606
    async fn load_dataset_layer(&self, id: &LayerId) -> Result<Layer> {
×
607
        let conn = self.conn_pool.get().await?;
×
608

609
        let stmt = conn
×
610
            .prepare(
×
611
                "
×
612
                SELECT 
×
613
                    name, 
×
614
                    display_name,
×
615
                    description,
×
616
                    source_operator,
×
617
                    symbology
×
618
                FROM 
×
619
                    datasets
×
620
                WHERE id = $1;",
×
621
            )
×
622
            .await?;
×
623

624
        let row = conn
×
625
            .query_one(
626
                &stmt,
×
627
                &[
×
628
                    &Uuid::from_str(&id.0).map_err(|_| error::Error::IdStringMustBeUuid {
×
629
                        found: id.0.clone(),
×
630
                    })?,
×
631
                ],
632
            )
633
            .await?;
×
634

635
        let name: DatasetName = row.get(0);
×
636
        let display_name: String = row.get(1);
×
637
        let description: String = row.get(2);
×
638
        let source_operator: String = row.get(3);
×
639
        let symbology: Option<Symbology> = row.get(4);
×
640

641
        let operator = source_operator_from_dataset(&source_operator, &name.into())?;
×
642

643
        Ok(Layer {
×
644
            id: ProviderLayerId {
×
645
                provider_id: DATASET_DB_LAYER_PROVIDER_ID,
×
646
                layer_id: id.clone(),
×
647
            },
×
648
            name: display_name,
×
649
            description,
×
650
            workflow: Workflow { operator },
×
651
            symbology,
×
652
            properties: vec![],
×
653
            metadata: HashMap::new(),
×
654
        })
×
655
    }
×
656
}
657

658
#[derive(Debug, Clone, ToSql, FromSql)]
8✔
659
pub struct FileUpload {
660
    pub id: FileId,
661
    pub name: String,
662
    pub byte_size: i64,
663
}
664

665
impl From<crate::datasets::upload::FileUpload> for FileUpload {
666
    fn from(upload: crate::datasets::upload::FileUpload) -> Self {
×
667
        Self {
×
668
            id: upload.id,
×
669
            name: upload.name,
×
670
            byte_size: upload.byte_size as i64,
×
671
        }
×
672
    }
×
673
}
674

675
impl From<&crate::datasets::upload::FileUpload> for FileUpload {
676
    fn from(upload: &crate::datasets::upload::FileUpload) -> Self {
1✔
677
        Self {
1✔
678
            id: upload.id,
1✔
679
            name: upload.name.clone(),
1✔
680
            byte_size: upload.byte_size as i64,
1✔
681
        }
1✔
682
    }
1✔
683
}
684

685
impl From<FileUpload> for crate::datasets::upload::FileUpload {
686
    fn from(upload: FileUpload) -> Self {
1✔
687
        Self {
1✔
688
            id: upload.id,
1✔
689
            name: upload.name,
1✔
690
            byte_size: upload.byte_size as u64,
1✔
691
        }
1✔
692
    }
1✔
693
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc