• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 6709775090

31 Oct 2023 03:43PM UTC coverage: 89.483% (-0.02%) from 89.498%
6709775090

push

github

web-flow
Merge pull request #882 from geo-engine/API-docs

Api docs

493 of 493 new or added lines in 15 files covered. (100.0%)

109476 of 122343 relevant lines covered (89.48%)

59359.13 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.31
/services/src/pro/contexts/postgres.rs
1
use super::migrations::pro_migrations;
2
use super::{ExecutionContextImpl, ProApplicationContext, ProGeoEngineDb, QuotaCheckerImpl};
3
use crate::api::cli::add_providers_from_directory;
4
use crate::contexts::{
5
    migrate_database, ApplicationContext, MigrationResult, PostgresContext, QueryContextImpl,
6
    SessionId,
7
};
8
use crate::contexts::{GeoEngineDb, SessionContext};
9
use crate::datasets::upload::{Volume, Volumes};
10
use crate::datasets::DatasetName;
11
use crate::error::{self, Error, Result};
12
use crate::pro::api::cli::add_datasets_from_directory;
13
use crate::pro::layers::add_from_directory::{
14
    add_layer_collections_from_directory, add_layers_from_directory,
15
    add_pro_providers_from_directory,
16
};
17
use crate::pro::machine_learning::ml_model::{MlModel, MlModelDb};
18
use crate::pro::quota::{initialize_quota_tracking, QuotaTrackingFactory};
19
use crate::pro::tasks::{ProTaskManager, ProTaskManagerBackend};
20
use crate::pro::users::{OidcRequestDb, UserAuth, UserSession};
21
use crate::pro::util::config::{Cache, Oidc, Quota};
22
use crate::tasks::SimpleTaskManagerContext;
23
use async_trait::async_trait;
24
use bb8_postgres::{
25
    bb8::Pool,
26
    bb8::PooledConnection,
27
    tokio_postgres::{tls::MakeTlsConnect, tls::TlsConnect, Config, Socket},
28
    PostgresConnectionManager,
29
};
30
use geoengine_datatypes::pro::MlModelId;
31
use geoengine_datatypes::raster::TilingSpecification;
32
use geoengine_datatypes::util::test::TestDefault;
33
use geoengine_datatypes::util::Identifier;
34
use geoengine_operators::engine::{
35
    ChunkByteSize, ExecutionContextExtensions, QueryContextExtensions,
36
};
37
use geoengine_operators::pro::cache::shared_cache::SharedCache;
38
use geoengine_operators::pro::machine_learning::{LoadMlModel, MlModelAccess};
39
use geoengine_operators::pro::meta::quota::{ComputationContext, QuotaChecker};
40
use geoengine_operators::util::create_rayon_thread_pool;
41
use log::info;
42
use rayon::ThreadPool;
43
use snafu::{ensure, ResultExt};
44
use std::path::PathBuf;
45
use std::sync::Arc;
46

47
// TODO: do not report postgres error details to user
48

49
/// A contex with references to Postgres backends of the dbs. Automatically migrates schema on instantiation
50
#[derive(Clone)]
235✔
51
pub struct ProPostgresContext<Tls>
52
where
53
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
54
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
55
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
56
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
57
{
58
    thread_pool: Arc<ThreadPool>,
59
    exe_ctx_tiling_spec: TilingSpecification,
60
    query_ctx_chunk_size: ChunkByteSize,
61
    task_manager: Arc<ProTaskManagerBackend>,
62
    oidc_request_db: Arc<Option<OidcRequestDb>>,
63
    quota: QuotaTrackingFactory,
64
    pub(crate) pool: Pool<PostgresConnectionManager<Tls>>,
65
    volumes: Volumes,
66
    tile_cache: Arc<SharedCache>,
67
}
68

69
impl<Tls> ProPostgresContext<Tls>
70
where
71
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
72
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
73
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
74
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
75
{
76
    pub async fn new_with_context_spec(
82✔
77
        config: Config,
82✔
78
        tls: Tls,
82✔
79
        exe_ctx_tiling_spec: TilingSpecification,
82✔
80
        query_ctx_chunk_size: ChunkByteSize,
82✔
81
        quota_config: Quota,
82✔
82
        oidc_db: Option<OidcRequestDb>,
82✔
83
    ) -> Result<Self> {
82✔
84
        let pg_mgr = PostgresConnectionManager::new(config, tls);
82✔
85

86
        let pool = Pool::builder().build(pg_mgr).await?;
82✔
87

88
        Self::create_pro_database(pool.get().await?).await?;
1,923✔
89

90
        let db = ProPostgresDb::new(pool.clone(), UserSession::admin_session());
82✔
91
        let quota = initialize_quota_tracking(
82✔
92
            quota_config.mode,
82✔
93
            db,
82✔
94
            quota_config.increment_quota_buffer_size,
82✔
95
            quota_config.increment_quota_buffer_timeout_seconds,
82✔
96
        );
82✔
97

82✔
98
        Ok(ProPostgresContext {
82✔
99
            task_manager: Default::default(),
82✔
100
            thread_pool: create_rayon_thread_pool(0),
82✔
101
            exe_ctx_tiling_spec,
82✔
102
            query_ctx_chunk_size,
82✔
103
            oidc_request_db: Arc::new(oidc_db),
82✔
104
            quota,
82✔
105
            pool,
82✔
106
            volumes: Default::default(),
82✔
107
            tile_cache: Arc::new(SharedCache::test_default()),
82✔
108
        })
82✔
109
    }
82✔
110

111
    #[allow(clippy::missing_panics_doc)]
112
    pub async fn new_with_oidc(
×
113
        config: Config,
×
114
        tls: Tls,
×
115
        oidc_db: OidcRequestDb,
×
116
        cache_config: Cache,
×
117
        quota_config: Quota,
×
118
    ) -> Result<Self> {
×
119
        let pg_mgr = PostgresConnectionManager::new(config, tls);
×
120

121
        let pool = Pool::builder().build(pg_mgr).await?;
×
122

123
        Self::create_pro_database(pool.get().await?).await?;
×
124

125
        let db = ProPostgresDb::new(pool.clone(), UserSession::admin_session());
×
126
        let quota = initialize_quota_tracking(
×
127
            quota_config.mode,
×
128
            db,
×
129
            quota_config.increment_quota_buffer_size,
×
130
            quota_config.increment_quota_buffer_timeout_seconds,
×
131
        );
×
132

×
133
        Ok(ProPostgresContext {
×
134
            task_manager: Default::default(),
×
135
            thread_pool: create_rayon_thread_pool(0),
×
136
            exe_ctx_tiling_spec: TestDefault::test_default(),
×
137
            query_ctx_chunk_size: TestDefault::test_default(),
×
138
            oidc_request_db: Arc::new(Some(oidc_db)),
×
139
            quota,
×
140
            pool,
×
141
            volumes: Default::default(),
×
142
            tile_cache: Arc::new(
×
143
                SharedCache::new(
×
144
                    cache_config.cache_size_in_mb,
×
145
                    cache_config.landing_zone_ratio,
×
146
                )
×
147
                .expect("tile cache creation should work because the config is valid"),
×
148
            ),
×
149
        })
×
150
    }
×
151

152
    // TODO: check if the datasets exist already and don't output warnings when skipping them
153
    #[allow(clippy::too_many_arguments, clippy::missing_panics_doc)]
154
    pub async fn new_with_data(
×
155
        config: Config,
×
156
        tls: Tls,
×
157
        dataset_defs_path: PathBuf,
×
158
        provider_defs_path: PathBuf,
×
159
        layer_defs_path: PathBuf,
×
160
        layer_collection_defs_path: PathBuf,
×
161
        exe_ctx_tiling_spec: TilingSpecification,
×
162
        query_ctx_chunk_size: ChunkByteSize,
×
163
        oidc_config: Oidc,
×
164
        cache_config: Cache,
×
165
        quota_config: Quota,
×
166
    ) -> Result<Self> {
×
167
        let pg_mgr = PostgresConnectionManager::new(config, tls);
×
168

169
        let pool = Pool::builder().build(pg_mgr).await?;
×
170

171
        let created_schema = Self::create_pro_database(pool.get().await?).await?;
×
172

173
        let db = ProPostgresDb::new(pool.clone(), UserSession::admin_session());
×
174
        let quota = initialize_quota_tracking(
×
175
            quota_config.mode,
×
176
            db,
×
177
            quota_config.increment_quota_buffer_size,
×
178
            quota_config.increment_quota_buffer_timeout_seconds,
×
179
        );
×
180

×
181
        let app_ctx = ProPostgresContext {
×
182
            task_manager: Default::default(),
×
183
            thread_pool: create_rayon_thread_pool(0),
×
184
            exe_ctx_tiling_spec,
×
185
            query_ctx_chunk_size,
×
186
            oidc_request_db: Arc::new(OidcRequestDb::try_from(oidc_config).ok()),
×
187
            quota,
×
188
            pool,
×
189
            volumes: Default::default(),
×
190
            tile_cache: Arc::new(
×
191
                SharedCache::new(
×
192
                    cache_config.cache_size_in_mb,
×
193
                    cache_config.landing_zone_ratio,
×
194
                )
×
195
                .expect("tile cache creation should work because the config is valid"),
×
196
            ),
×
197
        };
×
198

×
199
        if created_schema {
×
200
            info!("Populating database with initial data...");
×
201

202
            let mut db = app_ctx.session_context(UserSession::admin_session()).db();
×
203

×
204
            add_layers_from_directory(&mut db, layer_defs_path).await;
×
205
            add_layer_collections_from_directory(&mut db, layer_collection_defs_path).await;
×
206

207
            add_datasets_from_directory(&mut db, dataset_defs_path).await;
×
208

209
            add_providers_from_directory(&mut db, provider_defs_path.clone()).await;
×
210

211
            add_pro_providers_from_directory(&mut db, provider_defs_path.join("pro")).await;
×
212
        }
×
213

214
        Ok(app_ctx)
×
215
    }
×
216

217
    #[allow(clippy::too_many_lines)]
218
    /// Creates the database schema. Returns true if the schema was created, false if it already existed.
219
    pub(crate) async fn create_pro_database(
82✔
220
        mut conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
82✔
221
    ) -> Result<bool> {
82✔
222
        PostgresContext::maybe_clear_database(&conn).await?;
82✔
223

224
        let migration = migrate_database(&mut conn, &pro_migrations()).await?;
1,841✔
225

226
        Ok(migration == MigrationResult::CreatedDatabase)
82✔
227
    }
82✔
228

229
    pub fn oidc_request_db(&self) -> Arc<Option<OidcRequestDb>> {
2✔
230
        self.oidc_request_db.clone()
2✔
231
    }
2✔
232
}
233

234
#[async_trait]
235
impl<Tls> ApplicationContext for ProPostgresContext<Tls>
236
where
237
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
238
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
239
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
240
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
241
{
242
    type SessionContext = PostgresSessionContext<Tls>;
243
    type Session = UserSession;
244

245
    fn session_context(&self, session: Self::Session) -> Self::SessionContext {
121✔
246
        PostgresSessionContext {
121✔
247
            session,
121✔
248
            context: self.clone(),
121✔
249
        }
121✔
250
    }
121✔
251

252
    async fn session_by_id(&self, session_id: SessionId) -> Result<Self::Session> {
51✔
253
        self.user_session_by_id(session_id)
51✔
254
            .await
397✔
255
            .map_err(Box::new)
51✔
256
            .context(error::Unauthorized)
51✔
257
    }
102✔
258
}
259

260
#[async_trait]
261
impl<Tls> ProApplicationContext for ProPostgresContext<Tls>
262
where
263
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
264
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
265
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
266
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
267
{
268
    fn oidc_request_db(&self) -> Option<&OidcRequestDb> {
6✔
269
        self.oidc_request_db.as_ref().as_ref()
6✔
270
    }
6✔
271
}
272

273
#[derive(Clone)]
×
274
pub struct PostgresSessionContext<Tls>
275
where
276
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
277
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
278
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
279
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
280
{
281
    session: UserSession,
282
    context: ProPostgresContext<Tls>,
283
}
284

285
#[async_trait]
286
impl<Tls> SessionContext for PostgresSessionContext<Tls>
287
where
288
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
289
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
290
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
291
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
292
{
293
    type Session = UserSession;
294
    type GeoEngineDB = ProPostgresDb<Tls>;
295

296
    type TaskContext = SimpleTaskManagerContext;
297
    type TaskManager = ProTaskManager; // this does not persist across restarts
298
    type QueryContext = QueryContextImpl;
299
    type ExecutionContext = ExecutionContextImpl<Self::GeoEngineDB>;
300

301
    fn db(&self) -> Self::GeoEngineDB {
144✔
302
        ProPostgresDb::new(self.context.pool.clone(), self.session.clone())
144✔
303
    }
144✔
304

305
    fn tasks(&self) -> Self::TaskManager {
5✔
306
        ProTaskManager::new(self.context.task_manager.clone(), self.session.clone())
5✔
307
    }
5✔
308

309
    fn query_context(&self) -> Result<Self::QueryContext> {
6✔
310
        // TODO: load config only once
6✔
311

6✔
312
        let mut extensions = QueryContextExtensions::default();
6✔
313
        extensions.insert(
6✔
314
            self.context
6✔
315
                .quota
6✔
316
                .create_quota_tracking(&self.session, ComputationContext::new()),
6✔
317
        );
6✔
318
        extensions.insert(Box::new(QuotaCheckerImpl { user_db: self.db() }) as QuotaChecker);
6✔
319
        extensions.insert(self.context.tile_cache.clone());
6✔
320

6✔
321
        Ok(QueryContextImpl::new_with_extensions(
6✔
322
            self.context.query_ctx_chunk_size,
6✔
323
            self.context.thread_pool.clone(),
6✔
324
            extensions,
6✔
325
        ))
6✔
326
    }
6✔
327

328
    fn execution_context(&self) -> Result<Self::ExecutionContext> {
10✔
329
        let mut extensions = ExecutionContextExtensions::default();
10✔
330
        let ml_model_access: MlModelAccess = Box::new(self.db());
10✔
331
        extensions.insert(ml_model_access);
10✔
332

10✔
333
        Ok(
10✔
334
            ExecutionContextImpl::<ProPostgresDb<Tls>>::new_with_extensions(
10✔
335
                self.db(),
10✔
336
                self.context.thread_pool.clone(),
10✔
337
                self.context.exe_ctx_tiling_spec,
10✔
338
                extensions,
10✔
339
            ),
10✔
340
        )
10✔
341
    }
10✔
342

343
    fn volumes(&self) -> Result<Vec<Volume>> {
×
344
        ensure!(self.session.is_admin(), error::PermissionDenied);
×
345

346
        Ok(self.context.volumes.volumes.clone())
×
347
    }
×
348

349
    fn session(&self) -> &Self::Session {
×
350
        &self.session
×
351
    }
×
352
}
353

354
pub struct ProPostgresDb<Tls>
355
where
356
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
357
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
358
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
359
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
360
{
361
    pub(crate) conn_pool: Pool<PostgresConnectionManager<Tls>>,
362
    pub(crate) session: UserSession,
363
}
364

365
impl<Tls> ProPostgresDb<Tls>
366
where
367
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
368
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
369
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
370
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
371
{
372
    pub fn new(conn_pool: Pool<PostgresConnectionManager<Tls>>, session: UserSession) -> Self {
227✔
373
        Self { conn_pool, session }
227✔
374
    }
227✔
375

376
    /// Check whether the namepsace of the given dataset is allowed for insertion
377
    pub(crate) fn check_namespace(&self, id: &DatasetName) -> Result<()> {
23✔
378
        let is_ok = match &id.namespace {
23✔
379
            Some(namespace) => namespace.as_str() == self.session.user.id.to_string(),
17✔
380
            None => self.session.is_admin(),
6✔
381
        };
382

383
        if is_ok {
23✔
384
            Ok(())
23✔
385
        } else {
386
            Err(Error::InvalidDatasetIdNamespace)
×
387
        }
388
    }
23✔
389
}
390

391
#[async_trait]
392
impl<Tls> MlModelDb for ProPostgresDb<Tls>
393
where
394
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
395
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
396
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
397
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
398
{
399
    /// Load a machine learning model from the database by its ID.
400
    ///
401
    /// # Arguments
402
    ///
403
    /// * `model_id` - A MlModelId that identifies the desired model.
404
    ///
405
    /// # Returns
406
    ///
407
    /// * `MlModel` - The loaded machine learning model.
408
    /// * `Error` - If the model cannot be found or loaded.
409
    async fn load_ml_model(&self, model_id: MlModelId) -> Result<MlModel> {
5✔
410
        let conn = self.conn_pool.get().await?;
5✔
411

412
        let stmt = conn
5✔
413
            .prepare("SELECT id, content FROM ml_models WHERE id = $1")
5✔
414
            .await?;
5✔
415

416
        let row = conn.query_opt(&stmt, &[&model_id]).await?;
8✔
417

418
        // Handle the result of the query
419
        match row {
5✔
420
            Some(row) => Ok(MlModel {
4✔
421
                id: row.get(0),
4✔
422
                content: row.get(1),
4✔
423
            }),
4✔
424
            None => Err(
1✔
425
                error::Error::MachineLearningError { source:
1✔
426
                    crate::pro::machine_learning::ml_error::MachineLearningError::UnknownModelIdInPostgres {
1✔
427
                     model_id,
1✔
428
                    }
1✔
429
                },
1✔
430
            ),
1✔
431
        }
432
    }
10✔
433

434
    /// Store a machine learning model in the database.
435
    ///
436
    /// # Arguments
437
    ///
438
    /// * `model` - The MlModel to be stored.
439
    ///
440
    /// # Returns
441
    ///
442
    /// * `Result<()>` - Ok if the model was successfully stored, otherwise an error.
443
    async fn store_ml_model(&self, model: MlModel) -> Result<()> {
3✔
444
        let mut conn = self.conn_pool.get().await?;
3✔
445

446
        let tx = conn.build_transaction().start().await?;
3✔
447

448
        let stmt = tx
3✔
449
            .prepare(
3✔
450
                "
3✔
451
                INSERT INTO ml_models (
3✔
452
                    id,
3✔
453
                    content
3✔
454
                )
3✔
455
                VALUES ($1, $2);",
3✔
456
            )
3✔
457
            .await?;
3✔
458

459
        tx.execute(&stmt, &[&model.id, &model.content]).await?;
3✔
460

461
        tx.commit().await?;
3✔
462

463
        Ok(())
3✔
464
    }
6✔
465
}
466

467
#[async_trait]
468
impl<Tls> LoadMlModel for ProPostgresDb<Tls>
469
where
470
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
471
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
472
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
473
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
474
{
475
    async fn load_ml_model_by_id(
3✔
476
        &self,
3✔
477
        model_id: MlModelId,
3✔
478
    ) -> Result<String, geoengine_operators::error::Error> {
3✔
479
        self.load_ml_model(model_id)
3✔
480
            .await
12✔
481
            .map(|model| model.content)
3✔
482
            .map_err(|_| geoengine_operators::error::Error::MachineLearningModelNotFound)
3✔
483
    }
6✔
484
}
485

486
impl<Tls> GeoEngineDb for ProPostgresDb<Tls>
487
where
488
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
489
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
490
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
491
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
492
{
493
}
494

495
impl<Tls> ProGeoEngineDb for ProPostgresDb<Tls>
496
where
497
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
498
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
499
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
500
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
501
{
502
}
503

504
#[cfg(test)]
505
mod tests {
506
    use super::*;
507
    use crate::datasets::external::netcdfcf::NetCdfCfDataProviderDefinition;
508
    use crate::datasets::listing::{DatasetListOptions, DatasetListing, ProvenanceOutput};
509
    use crate::datasets::listing::{DatasetProvider, Provenance};
510
    use crate::datasets::storage::{DatasetStore, MetaDataDefinition};
511
    use crate::datasets::upload::{FileId, UploadId};
512
    use crate::datasets::upload::{FileUpload, Upload, UploadDb};
513
    use crate::datasets::{AddDataset, DatasetIdAndName};
514
    use crate::layers::add_from_directory::UNSORTED_COLLECTION_ID;
515
    use crate::layers::layer::{
516
        AddLayer, AddLayerCollection, CollectionItem, LayerCollection, LayerCollectionListOptions,
517
        LayerCollectionListing, LayerListing, ProviderLayerCollectionId, ProviderLayerId,
518
    };
519
    use crate::layers::listing::{LayerCollectionId, LayerCollectionProvider};
520
    use crate::layers::storage::{
521
        LayerDb, LayerProviderDb, LayerProviderListing, LayerProviderListingOptions,
522
        INTERNAL_PROVIDER_ID,
523
    };
524
    use crate::pro::ge_context;
525
    use crate::pro::machine_learning::ml_model::{MlModel, MlModelDb};
526
    use crate::pro::permissions::{Permission, PermissionDb, Role, RoleDescription, RoleId};
527
    use crate::pro::users::{
528
        ExternalUserClaims, RoleDb, UserCredentials, UserDb, UserId, UserRegistration,
529
    };
530
    use crate::pro::util::config::QuotaTrackingMode;
531
    use crate::pro::util::tests::{admin_login, register_ndvi_workflow_helper};
532
    use crate::projects::{
533
        CreateProject, LayerUpdate, LoadVersion, OrderBy, Plot, PlotUpdate, PointSymbology,
534
        ProjectDb, ProjectId, ProjectLayer, ProjectListOptions, ProjectListing, STRectangle,
535
        UpdateProject,
536
    };
537
    use crate::workflows::registry::WorkflowRegistry;
538
    use crate::workflows::workflow::Workflow;
539
    use bb8_postgres::tokio_postgres::NoTls;
540
    use futures::join;
541
    use geoengine_datatypes::collections::VectorDataType;
542
    use geoengine_datatypes::dataset::{DataProviderId, LayerId};
543
    use geoengine_datatypes::primitives::CacheTtlSeconds;
544
    use geoengine_datatypes::primitives::{
545
        BoundingBox2D, Coordinate2D, DateTime, Duration, FeatureDataType, Measurement,
546
        RasterQueryRectangle, SpatialResolution, TimeGranularity, TimeInstance, TimeInterval,
547
        TimeStep, VectorQueryRectangle,
548
    };
549
    use geoengine_datatypes::pro::MlModelId;
550
    use geoengine_datatypes::raster::RasterDataType;
551
    use geoengine_datatypes::spatial_reference::{SpatialReference, SpatialReferenceOption};
552
    use geoengine_datatypes::test_data;
553
    use geoengine_datatypes::util::Identifier;
554
    use geoengine_operators::engine::{
555
        MetaData, MetaDataProvider, MultipleRasterOrSingleVectorSource, PlotOperator,
556
        RasterResultDescriptor, StaticMetaData, TypedOperator, TypedResultDescriptor,
557
        VectorColumnInfo, VectorOperator, VectorResultDescriptor,
558
    };
559
    use geoengine_operators::mock::{MockPointSource, MockPointSourceParams};
560
    use geoengine_operators::plot::{Statistics, StatisticsParams};
561
    use geoengine_operators::source::{
562
        CsvHeader, FileNotFoundHandling, FormatSpecifics, GdalDatasetGeoTransform,
563
        GdalDatasetParameters, GdalLoadingInfo, GdalMetaDataList, GdalMetaDataRegular,
564
        GdalMetaDataStatic, GdalMetadataNetCdfCf, OgrSourceColumnSpec, OgrSourceDataset,
565
        OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceTimeFormat,
566
    };
567
    use geoengine_operators::util::input::MultiRasterOrVectorOperator::Raster;
568
    use openidconnect::SubjectIdentifier;
569
    use serde_json::json;
570
    use std::str::FromStr;
571

572
    #[ge_context::test]
2✔
573
    async fn test(app_ctx: ProPostgresContext<NoTls>) {
1✔
574
        anonymous(&app_ctx).await;
43✔
575

576
        let _user_id = user_reg_login(&app_ctx).await;
25✔
577

578
        let session = app_ctx
1✔
579
            .login(UserCredentials {
1✔
580
                email: "foo@example.com".into(),
1✔
581
                password: "secret123".into(),
1✔
582
            })
1✔
583
            .await
11✔
584
            .unwrap();
1✔
585

1✔
586
        create_projects(&app_ctx, &session).await;
116✔
587

588
        let projects = list_projects(&app_ctx, &session).await;
13✔
589

590
        set_session(&app_ctx, &projects).await;
29✔
591

592
        let project_id = projects[0].id;
1✔
593

1✔
594
        update_projects(&app_ctx, &session, project_id).await;
199✔
595

596
        add_permission(&app_ctx, &session, project_id).await;
32✔
597

598
        delete_project(&app_ctx, &session, project_id).await;
11✔
599
    }
1✔
600

601
    #[ge_context::test]
2✔
602
    async fn test_external(app_ctx: ProPostgresContext<NoTls>) {
1✔
603
        anonymous(&app_ctx).await;
43✔
604

605
        let session = external_user_login_twice(&app_ctx).await;
52✔
606

607
        create_projects(&app_ctx, &session).await;
116✔
608

609
        let projects = list_projects(&app_ctx, &session).await;
13✔
610

611
        set_session_external(&app_ctx, &projects).await;
29✔
612

613
        let project_id = projects[0].id;
1✔
614

1✔
615
        update_projects(&app_ctx, &session, project_id).await;
199✔
616

617
        add_permission(&app_ctx, &session, project_id).await;
44✔
618

619
        delete_project(&app_ctx, &session, project_id).await;
11✔
620
    }
1✔
621

622
    async fn set_session(app_ctx: &ProPostgresContext<NoTls>, projects: &[ProjectListing]) {
1✔
623
        let credentials = UserCredentials {
1✔
624
            email: "foo@example.com".into(),
1✔
625
            password: "secret123".into(),
1✔
626
        };
1✔
627

628
        let session = app_ctx.login(credentials).await.unwrap();
11✔
629

1✔
630
        set_session_in_database(app_ctx, projects, session).await;
18✔
631
    }
1✔
632

633
    async fn set_session_external(
1✔
634
        app_ctx: &ProPostgresContext<NoTls>,
1✔
635
        projects: &[ProjectListing],
1✔
636
    ) {
1✔
637
        let external_user_claims = ExternalUserClaims {
1✔
638
            external_id: SubjectIdentifier::new("Foo bar Id".into()),
1✔
639
            email: "foo@bar.de".into(),
1✔
640
            real_name: "Foo Bar".into(),
1✔
641
        };
1✔
642

643
        let session = app_ctx
1✔
644
            .login_external(external_user_claims, Duration::minutes(10))
1✔
645
            .await
11✔
646
            .unwrap();
1✔
647

1✔
648
        set_session_in_database(app_ctx, projects, session).await;
18✔
649
    }
1✔
650

651
    async fn set_session_in_database(
2✔
652
        app_ctx: &ProPostgresContext<NoTls>,
2✔
653
        projects: &[ProjectListing],
2✔
654
        session: UserSession,
2✔
655
    ) {
2✔
656
        let db = app_ctx.session_context(session.clone()).db();
2✔
657

2✔
658
        db.set_session_project(projects[0].id).await.unwrap();
6✔
659

2✔
660
        assert_eq!(
2✔
661
            app_ctx.session_by_id(session.id).await.unwrap().project,
12✔
662
            Some(projects[0].id)
2✔
663
        );
664

665
        let rect = STRectangle::new_unchecked(SpatialReference::epsg_4326(), 0., 1., 2., 3., 1, 2);
2✔
666
        db.set_session_view(rect.clone()).await.unwrap();
6✔
667
        assert_eq!(
2✔
668
            app_ctx.session_by_id(session.id).await.unwrap().view,
12✔
669
            Some(rect)
2✔
670
        );
671
    }
2✔
672

673
    async fn delete_project(
2✔
674
        app_ctx: &ProPostgresContext<NoTls>,
2✔
675
        session: &UserSession,
2✔
676
        project_id: ProjectId,
2✔
677
    ) {
2✔
678
        let db = app_ctx.session_context(session.clone()).db();
2✔
679

2✔
680
        db.delete_project(project_id).await.unwrap();
14✔
681

2✔
682
        assert!(db.load_project(project_id).await.is_err());
8✔
683
    }
2✔
684

685
    async fn add_permission(
2✔
686
        app_ctx: &ProPostgresContext<NoTls>,
2✔
687
        session: &UserSession,
2✔
688
        project_id: ProjectId,
2✔
689
    ) {
2✔
690
        let db = app_ctx.session_context(session.clone()).db();
2✔
691

2✔
692
        assert!(db
2✔
693
            .has_permission(project_id, Permission::Owner)
2✔
694
            .await
10✔
695
            .unwrap());
2✔
696

697
        let user2 = app_ctx
2✔
698
            .register_user(UserRegistration {
2✔
699
                email: "user2@example.com".into(),
2✔
700
                password: "12345678".into(),
2✔
701
                real_name: "User2".into(),
2✔
702
            })
2✔
703
            .await
14✔
704
            .unwrap();
2✔
705

706
        let session2 = app_ctx
2✔
707
            .login(UserCredentials {
2✔
708
                email: "user2@example.com".into(),
2✔
709
                password: "12345678".into(),
2✔
710
            })
2✔
711
            .await
18✔
712
            .unwrap();
2✔
713

2✔
714
        let db2 = app_ctx.session_context(session2.clone()).db();
2✔
715
        assert!(!db2
2✔
716
            .has_permission(project_id, Permission::Owner)
2✔
717
            .await
10✔
718
            .unwrap());
2✔
719

720
        db.add_permission(user2.into(), project_id, Permission::Read)
2✔
721
            .await
14✔
722
            .unwrap();
2✔
723

2✔
724
        assert!(db2
2✔
725
            .has_permission(project_id, Permission::Read)
2✔
726
            .await
10✔
727
            .unwrap());
2✔
728
    }
2✔
729

730
    #[allow(clippy::too_many_lines)]
731
    async fn update_projects(
2✔
732
        app_ctx: &ProPostgresContext<NoTls>,
2✔
733
        session: &UserSession,
2✔
734
        project_id: ProjectId,
2✔
735
    ) {
2✔
736
        let db = app_ctx.session_context(session.clone()).db();
2✔
737

738
        let project = db
2✔
739
            .load_project_version(project_id, LoadVersion::Latest)
2✔
740
            .await
84✔
741
            .unwrap();
2✔
742

743
        let layer_workflow_id = db
2✔
744
            .register_workflow(Workflow {
2✔
745
                operator: TypedOperator::Vector(
2✔
746
                    MockPointSource {
2✔
747
                        params: MockPointSourceParams {
2✔
748
                            points: vec![Coordinate2D::new(1., 2.); 3],
2✔
749
                        },
2✔
750
                    }
2✔
751
                    .boxed(),
2✔
752
                ),
2✔
753
            })
2✔
754
            .await
6✔
755
            .unwrap();
2✔
756

2✔
757
        assert!(db.load_workflow(&layer_workflow_id).await.is_ok());
6✔
758

759
        let plot_workflow_id = db
2✔
760
            .register_workflow(Workflow {
2✔
761
                operator: Statistics {
2✔
762
                    params: StatisticsParams {
2✔
763
                        column_names: vec![],
2✔
764
                    },
2✔
765
                    sources: MultipleRasterOrSingleVectorSource {
2✔
766
                        source: Raster(vec![]),
2✔
767
                    },
2✔
768
                }
2✔
769
                .boxed()
2✔
770
                .into(),
2✔
771
            })
2✔
772
            .await
6✔
773
            .unwrap();
2✔
774

2✔
775
        assert!(db.load_workflow(&plot_workflow_id).await.is_ok());
6✔
776

777
        // add a plot
778
        let update = UpdateProject {
2✔
779
            id: project.id,
2✔
780
            name: Some("Test9 Updated".into()),
2✔
781
            description: None,
2✔
782
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
2✔
783
                workflow: layer_workflow_id,
2✔
784
                name: "TestLayer".into(),
2✔
785
                symbology: PointSymbology::default().into(),
2✔
786
                visibility: Default::default(),
2✔
787
            })]),
2✔
788
            plots: Some(vec![PlotUpdate::UpdateOrInsert(Plot {
2✔
789
                workflow: plot_workflow_id,
2✔
790
                name: "Test Plot".into(),
2✔
791
            })]),
2✔
792
            bounds: None,
2✔
793
            time_step: None,
2✔
794
        };
2✔
795
        db.update_project(update).await.unwrap();
152✔
796

797
        let versions = db.list_project_versions(project_id).await.unwrap();
14✔
798
        assert_eq!(versions.len(), 2);
2✔
799

800
        // add second plot
801
        let update = UpdateProject {
2✔
802
            id: project.id,
2✔
803
            name: Some("Test9 Updated".into()),
2✔
804
            description: None,
2✔
805
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
2✔
806
                workflow: layer_workflow_id,
2✔
807
                name: "TestLayer".into(),
2✔
808
                symbology: PointSymbology::default().into(),
2✔
809
                visibility: Default::default(),
2✔
810
            })]),
2✔
811
            plots: Some(vec![
2✔
812
                PlotUpdate::UpdateOrInsert(Plot {
2✔
813
                    workflow: plot_workflow_id,
2✔
814
                    name: "Test Plot".into(),
2✔
815
                }),
2✔
816
                PlotUpdate::UpdateOrInsert(Plot {
2✔
817
                    workflow: plot_workflow_id,
2✔
818
                    name: "Test Plot".into(),
2✔
819
                }),
2✔
820
            ]),
2✔
821
            bounds: None,
2✔
822
            time_step: None,
2✔
823
        };
2✔
824
        db.update_project(update).await.unwrap();
52✔
825

826
        let versions = db.list_project_versions(project_id).await.unwrap();
14✔
827
        assert_eq!(versions.len(), 3);
2✔
828

829
        // delete plots
830
        let update = UpdateProject {
2✔
831
            id: project.id,
2✔
832
            name: None,
2✔
833
            description: None,
2✔
834
            layers: None,
2✔
835
            plots: Some(vec![]),
2✔
836
            bounds: None,
2✔
837
            time_step: None,
2✔
838
        };
2✔
839
        db.update_project(update).await.unwrap();
44✔
840

841
        let versions = db.list_project_versions(project_id).await.unwrap();
14✔
842
        assert_eq!(versions.len(), 4);
2✔
843
    }
2✔
844

845
    async fn list_projects(
2✔
846
        app_ctx: &ProPostgresContext<NoTls>,
2✔
847
        session: &UserSession,
2✔
848
    ) -> Vec<ProjectListing> {
2✔
849
        let options = ProjectListOptions {
2✔
850
            order: OrderBy::NameDesc,
2✔
851
            offset: 0,
2✔
852
            limit: 2,
2✔
853
        };
2✔
854

2✔
855
        let db = app_ctx.session_context(session.clone()).db();
2✔
856

857
        let projects = db.list_projects(options).await.unwrap();
26✔
858

2✔
859
        assert_eq!(projects.len(), 2);
2✔
860
        assert_eq!(projects[0].name, "Test9");
2✔
861
        assert_eq!(projects[1].name, "Test8");
2✔
862
        projects
2✔
863
    }
2✔
864

865
    async fn create_projects(app_ctx: &ProPostgresContext<NoTls>, session: &UserSession) {
2✔
866
        let db = app_ctx.session_context(session.clone()).db();
2✔
867

868
        for i in 0..10 {
22✔
869
            let create = CreateProject {
20✔
870
                name: format!("Test{i}"),
20✔
871
                description: format!("Test{}", 10 - i),
20✔
872
                bounds: STRectangle::new(
20✔
873
                    SpatialReferenceOption::Unreferenced,
20✔
874
                    0.,
20✔
875
                    0.,
20✔
876
                    1.,
20✔
877
                    1.,
20✔
878
                    0,
20✔
879
                    1,
20✔
880
                )
20✔
881
                .unwrap(),
20✔
882
                time_step: None,
20✔
883
            };
20✔
884
            db.create_project(create).await.unwrap();
232✔
885
        }
886
    }
2✔
887

888
    async fn user_reg_login(app_ctx: &ProPostgresContext<NoTls>) -> UserId {
1✔
889
        let user_registration = UserRegistration {
1✔
890
            email: "foo@example.com".into(),
1✔
891
            password: "secret123".into(),
1✔
892
            real_name: "Foo Bar".into(),
1✔
893
        };
1✔
894

895
        let user_id = app_ctx.register_user(user_registration).await.unwrap();
3✔
896

1✔
897
        let credentials = UserCredentials {
1✔
898
            email: "foo@example.com".into(),
1✔
899
            password: "secret123".into(),
1✔
900
        };
1✔
901

902
        let session = app_ctx.login(credentials).await.unwrap();
9✔
903

1✔
904
        let db = app_ctx.session_context(session.clone()).db();
1✔
905

1✔
906
        app_ctx.session_by_id(session.id).await.unwrap();
6✔
907

1✔
908
        db.logout().await.unwrap();
3✔
909

1✔
910
        assert!(app_ctx.session_by_id(session.id).await.is_err());
4✔
911

912
        user_id
1✔
913
    }
1✔
914

915
    //TODO: No duplicate tests for postgres and hashmap implementation possible?
916
    async fn external_user_login_twice(app_ctx: &ProPostgresContext<NoTls>) -> UserSession {
1✔
917
        let external_user_claims = ExternalUserClaims {
1✔
918
            external_id: SubjectIdentifier::new("Foo bar Id".into()),
1✔
919
            email: "foo@bar.de".into(),
1✔
920
            real_name: "Foo Bar".into(),
1✔
921
        };
1✔
922
        let duration = Duration::minutes(30);
1✔
923

924
        //NEW
925
        let login_result = app_ctx
1✔
926
            .login_external(external_user_claims.clone(), duration)
1✔
927
            .await;
21✔
928
        assert!(login_result.is_ok());
1✔
929

930
        let session_1 = login_result.unwrap();
1✔
931
        let user_id = session_1.user.id; //TODO: Not a deterministic test.
1✔
932

1✔
933
        let db1 = app_ctx.session_context(session_1.clone()).db();
1✔
934

1✔
935
        assert!(session_1.user.email.is_some());
1✔
936
        assert_eq!(session_1.user.email.unwrap(), "foo@bar.de");
1✔
937
        assert!(session_1.user.real_name.is_some());
1✔
938
        assert_eq!(session_1.user.real_name.unwrap(), "Foo Bar");
1✔
939

940
        let expected_duration = session_1.created + duration;
1✔
941
        assert_eq!(session_1.valid_until, expected_duration);
1✔
942

943
        assert!(app_ctx.session_by_id(session_1.id).await.is_ok());
7✔
944

945
        assert!(db1.logout().await.is_ok());
3✔
946

947
        assert!(app_ctx.session_by_id(session_1.id).await.is_err());
4✔
948

949
        let duration = Duration::minutes(10);
1✔
950
        let login_result = app_ctx
1✔
951
            .login_external(external_user_claims.clone(), duration)
1✔
952
            .await;
11✔
953
        assert!(login_result.is_ok());
1✔
954

955
        let session_2 = login_result.unwrap();
1✔
956
        let result = session_2.clone();
1✔
957

1✔
958
        assert!(session_2.user.email.is_some()); //TODO: Technically, user details could change for each login. For simplicity, this is not covered yet.
1✔
959
        assert_eq!(session_2.user.email.unwrap(), "foo@bar.de");
1✔
960
        assert!(session_2.user.real_name.is_some());
1✔
961
        assert_eq!(session_2.user.real_name.unwrap(), "Foo Bar");
1✔
962
        assert_eq!(session_2.user.id, user_id);
1✔
963

964
        let expected_duration = session_2.created + duration;
1✔
965
        assert_eq!(session_2.valid_until, expected_duration);
1✔
966

967
        assert!(app_ctx.session_by_id(session_2.id).await.is_ok());
6✔
968

969
        result
1✔
970
    }
1✔
971

972
    async fn anonymous(app_ctx: &ProPostgresContext<NoTls>) {
2✔
973
        let now: DateTime = chrono::offset::Utc::now().into();
2✔
974
        let session = app_ctx.create_anonymous_session().await.unwrap();
30✔
975
        let then: DateTime = chrono::offset::Utc::now().into();
2✔
976

2✔
977
        assert!(session.created >= now && session.created <= then);
2✔
978
        assert!(session.valid_until > session.created);
2✔
979

980
        let session = app_ctx.session_by_id(session.id).await.unwrap();
42✔
981

2✔
982
        let db = app_ctx.session_context(session.clone()).db();
2✔
983

2✔
984
        db.logout().await.unwrap();
6✔
985

2✔
986
        assert!(app_ctx.session_by_id(session.id).await.is_err());
8✔
987
    }
2✔
988

989
    #[ge_context::test]
2✔
990
    async fn it_persists_workflows(app_ctx: ProPostgresContext<NoTls>) {
1✔
991
        let workflow = Workflow {
1✔
992
            operator: TypedOperator::Vector(
1✔
993
                MockPointSource {
1✔
994
                    params: MockPointSourceParams {
1✔
995
                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
996
                    },
1✔
997
                }
1✔
998
                .boxed(),
1✔
999
            ),
1✔
1000
        };
1✔
1001

1002
        let session = app_ctx.create_anonymous_session().await.unwrap();
13✔
1003
        let ctx = app_ctx.session_context(session);
1✔
1004

1✔
1005
        let db = ctx.db();
1✔
1006
        let id = db.register_workflow(workflow).await.unwrap();
3✔
1007

1✔
1008
        drop(ctx);
1✔
1009

1010
        let workflow = db.load_workflow(&id).await.unwrap();
3✔
1011

1✔
1012
        let json = serde_json::to_string(&workflow).unwrap();
1✔
1013
        assert_eq!(
1✔
1014
            json,
1✔
1015
            r#"{"type":"Vector","operator":{"type":"MockPointSource","params":{"points":[{"x":1.0,"y":2.0},{"x":1.0,"y":2.0},{"x":1.0,"y":2.0}]}}}"#
1✔
1016
        );
1✔
1017
    }
1✔
1018

1019
    #[allow(clippy::too_many_lines)]
1020
    #[ge_context::test]
2✔
1021
    async fn it_persists_datasets(app_ctx: ProPostgresContext<NoTls>) {
1✔
1022
        let loading_info = OgrSourceDataset {
1✔
1023
            file_name: PathBuf::from("test.csv"),
1✔
1024
            layer_name: "test.csv".to_owned(),
1✔
1025
            data_type: Some(VectorDataType::MultiPoint),
1✔
1026
            time: OgrSourceDatasetTimeType::Start {
1✔
1027
                start_field: "start".to_owned(),
1✔
1028
                start_format: OgrSourceTimeFormat::Auto,
1✔
1029
                duration: OgrSourceDurationSpec::Zero,
1✔
1030
            },
1✔
1031
            default_geometry: None,
1✔
1032
            columns: Some(OgrSourceColumnSpec {
1✔
1033
                format_specifics: Some(FormatSpecifics::Csv {
1✔
1034
                    header: CsvHeader::Auto,
1✔
1035
                }),
1✔
1036
                x: "x".to_owned(),
1✔
1037
                y: None,
1✔
1038
                int: vec![],
1✔
1039
                float: vec![],
1✔
1040
                text: vec![],
1✔
1041
                bool: vec![],
1✔
1042
                datetime: vec![],
1✔
1043
                rename: None,
1✔
1044
            }),
1✔
1045
            force_ogr_time_filter: false,
1✔
1046
            force_ogr_spatial_filter: false,
1✔
1047
            on_error: OgrSourceErrorSpec::Ignore,
1✔
1048
            sql_query: None,
1✔
1049
            attribute_query: None,
1✔
1050
            cache_ttl: CacheTtlSeconds::default(),
1✔
1051
        };
1✔
1052

1✔
1053
        let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1054
            OgrSourceDataset,
1✔
1055
            VectorResultDescriptor,
1✔
1056
            VectorQueryRectangle,
1✔
1057
        > {
1✔
1058
            loading_info: loading_info.clone(),
1✔
1059
            result_descriptor: VectorResultDescriptor {
1✔
1060
                data_type: VectorDataType::MultiPoint,
1✔
1061
                spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1062
                columns: [(
1✔
1063
                    "foo".to_owned(),
1✔
1064
                    VectorColumnInfo {
1✔
1065
                        data_type: FeatureDataType::Float,
1✔
1066
                        measurement: Measurement::Unitless,
1✔
1067
                    },
1✔
1068
                )]
1✔
1069
                .into_iter()
1✔
1070
                .collect(),
1✔
1071
                time: None,
1✔
1072
                bbox: None,
1✔
1073
            },
1✔
1074
            phantom: Default::default(),
1✔
1075
        });
1✔
1076

1077
        let session = app_ctx.create_anonymous_session().await.unwrap();
15✔
1078

1✔
1079
        let dataset_name = DatasetName::new(Some(session.user.id.to_string()), "my_dataset");
1✔
1080

1✔
1081
        let db = app_ctx.session_context(session.clone()).db();
1✔
1082
        let wrap = db.wrap_meta_data(meta_data);
1✔
1083
        let DatasetIdAndName {
1084
            id: dataset_id,
1✔
1085
            name: dataset_name,
1✔
1086
        } = db
1✔
1087
            .add_dataset(
1✔
1088
                AddDataset {
1✔
1089
                    name: Some(dataset_name.clone()),
1✔
1090
                    display_name: "Ogr Test".to_owned(),
1✔
1091
                    description: "desc".to_owned(),
1✔
1092
                    source_operator: "OgrSource".to_owned(),
1✔
1093
                    symbology: None,
1✔
1094
                    provenance: Some(vec![Provenance {
1✔
1095
                        citation: "citation".to_owned(),
1✔
1096
                        license: "license".to_owned(),
1✔
1097
                        uri: "uri".to_owned(),
1✔
1098
                    }]),
1✔
1099
                },
1✔
1100
                wrap,
1✔
1101
            )
1✔
1102
            .await
173✔
1103
            .unwrap();
1✔
1104

1105
        let datasets = db
1✔
1106
            .list_datasets(DatasetListOptions {
1✔
1107
                filter: None,
1✔
1108
                order: crate::datasets::listing::OrderBy::NameAsc,
1✔
1109
                offset: 0,
1✔
1110
                limit: 10,
1✔
1111
            })
1✔
1112
            .await
3✔
1113
            .unwrap();
1✔
1114

1✔
1115
        assert_eq!(datasets.len(), 1);
1✔
1116

1117
        assert_eq!(
1✔
1118
            datasets[0],
1✔
1119
            DatasetListing {
1✔
1120
                id: dataset_id,
1✔
1121
                name: dataset_name,
1✔
1122
                display_name: "Ogr Test".to_owned(),
1✔
1123
                description: "desc".to_owned(),
1✔
1124
                source_operator: "OgrSource".to_owned(),
1✔
1125
                symbology: None,
1✔
1126
                tags: vec![],
1✔
1127
                result_descriptor: TypedResultDescriptor::Vector(VectorResultDescriptor {
1✔
1128
                    data_type: VectorDataType::MultiPoint,
1✔
1129
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1130
                    columns: [(
1✔
1131
                        "foo".to_owned(),
1✔
1132
                        VectorColumnInfo {
1✔
1133
                            data_type: FeatureDataType::Float,
1✔
1134
                            measurement: Measurement::Unitless
1✔
1135
                        }
1✔
1136
                    )]
1✔
1137
                    .into_iter()
1✔
1138
                    .collect(),
1✔
1139
                    time: None,
1✔
1140
                    bbox: None,
1✔
1141
                })
1✔
1142
            },
1✔
1143
        );
1✔
1144

1145
        let provenance = db.load_provenance(&dataset_id).await.unwrap();
3✔
1146

1✔
1147
        assert_eq!(
1✔
1148
            provenance,
1✔
1149
            ProvenanceOutput {
1✔
1150
                data: dataset_id.into(),
1✔
1151
                provenance: Some(vec![Provenance {
1✔
1152
                    citation: "citation".to_owned(),
1✔
1153
                    license: "license".to_owned(),
1✔
1154
                    uri: "uri".to_owned(),
1✔
1155
                }])
1✔
1156
            }
1✔
1157
        );
1✔
1158

1159
        let meta_data: Box<dyn MetaData<OgrSourceDataset, _, _>> =
1✔
1160
            db.meta_data(&dataset_id.into()).await.unwrap();
8✔
1161

1✔
1162
        assert_eq!(
1✔
1163
            meta_data
1✔
1164
                .loading_info(VectorQueryRectangle {
1✔
1165
                    spatial_bounds: BoundingBox2D::new_unchecked(
1✔
1166
                        (-180., -90.).into(),
1✔
1167
                        (180., 90.).into()
1✔
1168
                    ),
1✔
1169
                    time_interval: TimeInterval::default(),
1✔
1170
                    spatial_resolution: SpatialResolution::zero_point_one(),
1✔
1171
                })
1✔
1172
                .await
×
1173
                .unwrap(),
1✔
1174
            loading_info
1175
        );
1176
    }
1✔
1177

1178
    #[ge_context::test]
2✔
1179
    async fn it_persists_uploads(app_ctx: ProPostgresContext<NoTls>) {
1✔
1180
        let id = UploadId::from_str("2de18cd8-4a38-4111-a445-e3734bc18a80").unwrap();
1✔
1181
        let input = Upload {
1✔
1182
            id,
1✔
1183
            files: vec![FileUpload {
1✔
1184
                id: FileId::from_str("e80afab0-831d-4d40-95d6-1e4dfd277e72").unwrap(),
1✔
1185
                name: "test.csv".to_owned(),
1✔
1186
                byte_size: 1337,
1✔
1187
            }],
1✔
1188
        };
1✔
1189

1190
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
1191

1✔
1192
        let db = app_ctx.session_context(session.clone()).db();
1✔
1193

1✔
1194
        db.create_upload(input.clone()).await.unwrap();
12✔
1195

1196
        let upload = db.load_upload(id).await.unwrap();
3✔
1197

1✔
1198
        assert_eq!(upload, input);
1✔
1199
    }
1✔
1200

1201
    #[allow(clippy::too_many_lines)]
1202
    #[ge_context::test]
2✔
1203
    async fn it_persists_layer_providers(app_ctx: ProPostgresContext<NoTls>) {
1✔
1204
        let db = app_ctx.session_context(UserSession::admin_session()).db();
1✔
1205

1✔
1206
        let provider = NetCdfCfDataProviderDefinition {
1✔
1207
            name: "netcdfcf".to_string(),
1✔
1208
            path: test_data!("netcdf4d/").into(),
1✔
1209
            overviews: test_data!("netcdf4d/overviews/").into(),
1✔
1210
            cache_ttl: CacheTtlSeconds::new(0),
1✔
1211
        };
1✔
1212

1213
        let provider_id = db.add_layer_provider(provider.into()).await.unwrap();
29✔
1214

1215
        let providers = db
1✔
1216
            .list_layer_providers(LayerProviderListingOptions {
1✔
1217
                offset: 0,
1✔
1218
                limit: 10,
1✔
1219
            })
1✔
1220
            .await
3✔
1221
            .unwrap();
1✔
1222

1✔
1223
        assert_eq!(providers.len(), 1);
1✔
1224

1225
        assert_eq!(
1✔
1226
            providers[0],
1✔
1227
            LayerProviderListing {
1✔
1228
                id: provider_id,
1✔
1229
                name: "netcdfcf".to_owned(),
1✔
1230
                description: "NetCdfCfProviderDefinition".to_owned(),
1✔
1231
            }
1✔
1232
        );
1✔
1233

1234
        let provider = db.load_layer_provider(provider_id).await.unwrap();
20✔
1235

1236
        let datasets = provider
1✔
1237
            .load_layer_collection(
1238
                &provider.get_root_layer_collection_id().await.unwrap(),
1✔
1239
                LayerCollectionListOptions {
1✔
1240
                    offset: 0,
1✔
1241
                    limit: 10,
1✔
1242
                },
1✔
1243
            )
1244
            .await
2✔
1245
            .unwrap();
1✔
1246

1✔
1247
        assert_eq!(datasets.items.len(), 3);
1✔
1248
    }
1✔
1249

1250
    #[ge_context::test]
2✔
1251
    async fn it_lists_only_permitted_datasets(app_ctx: ProPostgresContext<NoTls>) {
1✔
1252
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1253
        let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1254

1✔
1255
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1256
        let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1257

1✔
1258
        let descriptor = VectorResultDescriptor {
1✔
1259
            data_type: VectorDataType::Data,
1✔
1260
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1261
            columns: Default::default(),
1✔
1262
            time: None,
1✔
1263
            bbox: None,
1✔
1264
        };
1✔
1265

1✔
1266
        let ds = AddDataset {
1✔
1267
            name: None,
1✔
1268
            display_name: "OgrDataset".to_string(),
1✔
1269
            description: "My Ogr dataset".to_string(),
1✔
1270
            source_operator: "OgrSource".to_string(),
1✔
1271
            symbology: None,
1✔
1272
            provenance: None,
1✔
1273
        };
1✔
1274

1✔
1275
        let meta = StaticMetaData {
1✔
1276
            loading_info: OgrSourceDataset {
1✔
1277
                file_name: Default::default(),
1✔
1278
                layer_name: String::new(),
1✔
1279
                data_type: None,
1✔
1280
                time: Default::default(),
1✔
1281
                default_geometry: None,
1✔
1282
                columns: None,
1✔
1283
                force_ogr_time_filter: false,
1✔
1284
                force_ogr_spatial_filter: false,
1✔
1285
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1286
                sql_query: None,
1✔
1287
                attribute_query: None,
1✔
1288
                cache_ttl: CacheTtlSeconds::default(),
1✔
1289
            },
1✔
1290
            result_descriptor: descriptor.clone(),
1✔
1291
            phantom: Default::default(),
1✔
1292
        };
1✔
1293

1✔
1294
        let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1295

1296
        let _id = db1.add_dataset(ds, meta).await.unwrap();
174✔
1297

1298
        let list1 = db1
1✔
1299
            .list_datasets(DatasetListOptions {
1✔
1300
                filter: None,
1✔
1301
                order: crate::datasets::listing::OrderBy::NameAsc,
1✔
1302
                offset: 0,
1✔
1303
                limit: 1,
1✔
1304
            })
1✔
1305
            .await
3✔
1306
            .unwrap();
1✔
1307

1✔
1308
        assert_eq!(list1.len(), 1);
1✔
1309

1310
        let list2 = db2
1✔
1311
            .list_datasets(DatasetListOptions {
1✔
1312
                filter: None,
1✔
1313
                order: crate::datasets::listing::OrderBy::NameAsc,
1✔
1314
                offset: 0,
1✔
1315
                limit: 1,
1✔
1316
            })
1✔
1317
            .await
3✔
1318
            .unwrap();
1✔
1319

1✔
1320
        assert_eq!(list2.len(), 0);
1✔
1321
    }
1✔
1322

1323
    #[ge_context::test]
2✔
1324
    async fn it_shows_only_permitted_provenance(app_ctx: ProPostgresContext<NoTls>) {
1✔
1325
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1326
        let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1327

1✔
1328
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1329
        let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1330

1✔
1331
        let descriptor = VectorResultDescriptor {
1✔
1332
            data_type: VectorDataType::Data,
1✔
1333
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1334
            columns: Default::default(),
1✔
1335
            time: None,
1✔
1336
            bbox: None,
1✔
1337
        };
1✔
1338

1✔
1339
        let ds = AddDataset {
1✔
1340
            name: None,
1✔
1341
            display_name: "OgrDataset".to_string(),
1✔
1342
            description: "My Ogr dataset".to_string(),
1✔
1343
            source_operator: "OgrSource".to_string(),
1✔
1344
            symbology: None,
1✔
1345
            provenance: None,
1✔
1346
        };
1✔
1347

1✔
1348
        let meta = StaticMetaData {
1✔
1349
            loading_info: OgrSourceDataset {
1✔
1350
                file_name: Default::default(),
1✔
1351
                layer_name: String::new(),
1✔
1352
                data_type: None,
1✔
1353
                time: Default::default(),
1✔
1354
                default_geometry: None,
1✔
1355
                columns: None,
1✔
1356
                force_ogr_time_filter: false,
1✔
1357
                force_ogr_spatial_filter: false,
1✔
1358
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1359
                sql_query: None,
1✔
1360
                attribute_query: None,
1✔
1361
                cache_ttl: CacheTtlSeconds::default(),
1✔
1362
            },
1✔
1363
            result_descriptor: descriptor.clone(),
1✔
1364
            phantom: Default::default(),
1✔
1365
        };
1✔
1366

1✔
1367
        let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1368

1369
        let id = db1.add_dataset(ds, meta).await.unwrap().id;
174✔
1370

1✔
1371
        assert!(db1.load_provenance(&id).await.is_ok());
3✔
1372

1373
        assert!(db2.load_provenance(&id).await.is_err());
3✔
1374
    }
1✔
1375

1376
    #[ge_context::test]
2✔
1377
    async fn it_updates_permissions(app_ctx: ProPostgresContext<NoTls>) {
1✔
1378
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1379
        let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1380

1✔
1381
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1382
        let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1383

1✔
1384
        let descriptor = VectorResultDescriptor {
1✔
1385
            data_type: VectorDataType::Data,
1✔
1386
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1387
            columns: Default::default(),
1✔
1388
            time: None,
1✔
1389
            bbox: None,
1✔
1390
        };
1✔
1391

1✔
1392
        let ds = AddDataset {
1✔
1393
            name: None,
1✔
1394
            display_name: "OgrDataset".to_string(),
1✔
1395
            description: "My Ogr dataset".to_string(),
1✔
1396
            source_operator: "OgrSource".to_string(),
1✔
1397
            symbology: None,
1✔
1398
            provenance: None,
1✔
1399
        };
1✔
1400

1✔
1401
        let meta = StaticMetaData {
1✔
1402
            loading_info: OgrSourceDataset {
1✔
1403
                file_name: Default::default(),
1✔
1404
                layer_name: String::new(),
1✔
1405
                data_type: None,
1✔
1406
                time: Default::default(),
1✔
1407
                default_geometry: None,
1✔
1408
                columns: None,
1✔
1409
                force_ogr_time_filter: false,
1✔
1410
                force_ogr_spatial_filter: false,
1✔
1411
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1412
                sql_query: None,
1✔
1413
                attribute_query: None,
1✔
1414
                cache_ttl: CacheTtlSeconds::default(),
1✔
1415
            },
1✔
1416
            result_descriptor: descriptor.clone(),
1✔
1417
            phantom: Default::default(),
1✔
1418
        };
1✔
1419

1✔
1420
        let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1421

1422
        let id = db1.add_dataset(ds, meta).await.unwrap().id;
173✔
1423

1✔
1424
        assert!(db1.load_dataset(&id).await.is_ok());
3✔
1425

1426
        assert!(db2.load_dataset(&id).await.is_err());
3✔
1427

1428
        db1.add_permission(session2.user.id.into(), id, Permission::Read)
1✔
1429
            .await
8✔
1430
            .unwrap();
1✔
1431

1✔
1432
        assert!(db2.load_dataset(&id).await.is_ok());
3✔
1433
    }
1✔
1434

1435
    #[ge_context::test]
2✔
1436
    async fn it_uses_roles_for_permissions(app_ctx: ProPostgresContext<NoTls>) {
1✔
1437
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
13✔
1438
        let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1439

1✔
1440
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1441
        let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1442

1✔
1443
        let descriptor = VectorResultDescriptor {
1✔
1444
            data_type: VectorDataType::Data,
1✔
1445
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1446
            columns: Default::default(),
1✔
1447
            time: None,
1✔
1448
            bbox: None,
1✔
1449
        };
1✔
1450

1✔
1451
        let ds = AddDataset {
1✔
1452
            name: None,
1✔
1453
            display_name: "OgrDataset".to_string(),
1✔
1454
            description: "My Ogr dataset".to_string(),
1✔
1455
            source_operator: "OgrSource".to_string(),
1✔
1456
            symbology: None,
1✔
1457
            provenance: None,
1✔
1458
        };
1✔
1459

1✔
1460
        let meta = StaticMetaData {
1✔
1461
            loading_info: OgrSourceDataset {
1✔
1462
                file_name: Default::default(),
1✔
1463
                layer_name: String::new(),
1✔
1464
                data_type: None,
1✔
1465
                time: Default::default(),
1✔
1466
                default_geometry: None,
1✔
1467
                columns: None,
1✔
1468
                force_ogr_time_filter: false,
1✔
1469
                force_ogr_spatial_filter: false,
1✔
1470
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1471
                sql_query: None,
1✔
1472
                attribute_query: None,
1✔
1473
                cache_ttl: CacheTtlSeconds::default(),
1✔
1474
            },
1✔
1475
            result_descriptor: descriptor.clone(),
1✔
1476
            phantom: Default::default(),
1✔
1477
        };
1✔
1478

1✔
1479
        let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1480

1481
        let id = db1.add_dataset(ds, meta).await.unwrap().id;
173✔
1482

1✔
1483
        assert!(db1.load_dataset(&id).await.is_ok());
3✔
1484

1485
        assert!(db2.load_dataset(&id).await.is_err());
3✔
1486

1487
        db1.add_permission(session2.user.id.into(), id, Permission::Read)
1✔
1488
            .await
8✔
1489
            .unwrap();
1✔
1490

1✔
1491
        assert!(db2.load_dataset(&id).await.is_ok());
3✔
1492
    }
1✔
1493

1494
    #[ge_context::test]
2✔
1495
    async fn it_secures_meta_data(app_ctx: ProPostgresContext<NoTls>) {
1✔
1496
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1497
        let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1498

1✔
1499
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1500
        let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1501

1✔
1502
        let descriptor = VectorResultDescriptor {
1✔
1503
            data_type: VectorDataType::Data,
1✔
1504
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1505
            columns: Default::default(),
1✔
1506
            time: None,
1✔
1507
            bbox: None,
1✔
1508
        };
1✔
1509

1✔
1510
        let ds = AddDataset {
1✔
1511
            name: None,
1✔
1512
            display_name: "OgrDataset".to_string(),
1✔
1513
            description: "My Ogr dataset".to_string(),
1✔
1514
            source_operator: "OgrSource".to_string(),
1✔
1515
            symbology: None,
1✔
1516
            provenance: None,
1✔
1517
        };
1✔
1518

1✔
1519
        let meta = StaticMetaData {
1✔
1520
            loading_info: OgrSourceDataset {
1✔
1521
                file_name: Default::default(),
1✔
1522
                layer_name: String::new(),
1✔
1523
                data_type: None,
1✔
1524
                time: Default::default(),
1✔
1525
                default_geometry: None,
1✔
1526
                columns: None,
1✔
1527
                force_ogr_time_filter: false,
1✔
1528
                force_ogr_spatial_filter: false,
1✔
1529
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1530
                sql_query: None,
1✔
1531
                attribute_query: None,
1✔
1532
                cache_ttl: CacheTtlSeconds::default(),
1✔
1533
            },
1✔
1534
            result_descriptor: descriptor.clone(),
1✔
1535
            phantom: Default::default(),
1✔
1536
        };
1✔
1537

1✔
1538
        let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1539

1540
        let id = db1.add_dataset(ds, meta).await.unwrap().id;
174✔
1541

1542
        let meta: geoengine_operators::util::Result<
1✔
1543
            Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1544
        > = db1.meta_data(&id.into()).await;
8✔
1545

1546
        assert!(meta.is_ok());
1✔
1547

1548
        let meta: geoengine_operators::util::Result<
1✔
1549
            Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1550
        > = db2.meta_data(&id.into()).await;
4✔
1551

1552
        assert!(meta.is_err());
1✔
1553

1554
        db1.add_permission(session2.user.id.into(), id, Permission::Read)
1✔
1555
            .await
7✔
1556
            .unwrap();
1✔
1557

1558
        let meta: geoengine_operators::util::Result<
1✔
1559
            Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1560
        > = db2.meta_data(&id.into()).await;
7✔
1561

1562
        assert!(meta.is_ok());
1✔
1563
    }
1✔
1564

1565
    #[allow(clippy::too_many_lines)]
1566
    #[ge_context::test]
2✔
1567
    async fn it_loads_all_meta_data_types(app_ctx: ProPostgresContext<NoTls>) {
1✔
1568
        let session = app_ctx.create_anonymous_session().await.unwrap();
15✔
1569

1✔
1570
        let db = app_ctx.session_context(session.clone()).db();
1✔
1571

1✔
1572
        let vector_descriptor = VectorResultDescriptor {
1✔
1573
            data_type: VectorDataType::Data,
1✔
1574
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1575
            columns: Default::default(),
1✔
1576
            time: None,
1✔
1577
            bbox: None,
1✔
1578
        };
1✔
1579

1✔
1580
        let raster_descriptor = RasterResultDescriptor {
1✔
1581
            data_type: RasterDataType::U8,
1✔
1582
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1583
            measurement: Default::default(),
1✔
1584
            time: None,
1✔
1585
            bbox: None,
1✔
1586
            resolution: None,
1✔
1587
        };
1✔
1588

1✔
1589
        let vector_ds = AddDataset {
1✔
1590
            name: None,
1✔
1591
            display_name: "OgrDataset".to_string(),
1✔
1592
            description: "My Ogr dataset".to_string(),
1✔
1593
            source_operator: "OgrSource".to_string(),
1✔
1594
            symbology: None,
1✔
1595
            provenance: None,
1✔
1596
        };
1✔
1597

1✔
1598
        let raster_ds = AddDataset {
1✔
1599
            name: None,
1✔
1600
            display_name: "GdalDataset".to_string(),
1✔
1601
            description: "My Gdal dataset".to_string(),
1✔
1602
            source_operator: "GdalSource".to_string(),
1✔
1603
            symbology: None,
1✔
1604
            provenance: None,
1✔
1605
        };
1✔
1606

1✔
1607
        let gdal_params = GdalDatasetParameters {
1✔
1608
            file_path: Default::default(),
1✔
1609
            rasterband_channel: 0,
1✔
1610
            geo_transform: GdalDatasetGeoTransform {
1✔
1611
                origin_coordinate: Default::default(),
1✔
1612
                x_pixel_size: 0.0,
1✔
1613
                y_pixel_size: 0.0,
1✔
1614
            },
1✔
1615
            width: 0,
1✔
1616
            height: 0,
1✔
1617
            file_not_found_handling: FileNotFoundHandling::NoData,
1✔
1618
            no_data_value: None,
1✔
1619
            properties_mapping: None,
1✔
1620
            gdal_open_options: None,
1✔
1621
            gdal_config_options: None,
1✔
1622
            allow_alphaband_as_mask: false,
1✔
1623
            retry: None,
1✔
1624
        };
1✔
1625

1✔
1626
        let meta = StaticMetaData {
1✔
1627
            loading_info: OgrSourceDataset {
1✔
1628
                file_name: Default::default(),
1✔
1629
                layer_name: String::new(),
1✔
1630
                data_type: None,
1✔
1631
                time: Default::default(),
1✔
1632
                default_geometry: None,
1✔
1633
                columns: None,
1✔
1634
                force_ogr_time_filter: false,
1✔
1635
                force_ogr_spatial_filter: false,
1✔
1636
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1637
                sql_query: None,
1✔
1638
                attribute_query: None,
1✔
1639
                cache_ttl: CacheTtlSeconds::default(),
1✔
1640
            },
1✔
1641
            result_descriptor: vector_descriptor.clone(),
1✔
1642
            phantom: Default::default(),
1✔
1643
        };
1✔
1644

1✔
1645
        let meta = db.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1646

1647
        let id = db.add_dataset(vector_ds, meta).await.unwrap().id;
172✔
1648

1649
        let meta: geoengine_operators::util::Result<
1✔
1650
            Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1651
        > = db.meta_data(&id.into()).await;
9✔
1652

1653
        assert!(meta.is_ok());
1✔
1654

1655
        let meta = GdalMetaDataRegular {
1✔
1656
            result_descriptor: raster_descriptor.clone(),
1✔
1657
            params: gdal_params.clone(),
1✔
1658
            time_placeholders: Default::default(),
1✔
1659
            data_time: Default::default(),
1✔
1660
            step: TimeStep {
1✔
1661
                granularity: TimeGranularity::Millis,
1✔
1662
                step: 0,
1✔
1663
            },
1✔
1664
            cache_ttl: CacheTtlSeconds::default(),
1✔
1665
        };
1✔
1666

1✔
1667
        let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataRegular(meta));
1✔
1668

1669
        let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1670

1671
        let meta: geoengine_operators::util::Result<
1✔
1672
            Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1673
        > = db.meta_data(&id.into()).await;
6✔
1674

1675
        assert!(meta.is_ok());
1✔
1676

1677
        let meta = GdalMetaDataStatic {
1✔
1678
            time: None,
1✔
1679
            params: gdal_params.clone(),
1✔
1680
            result_descriptor: raster_descriptor.clone(),
1✔
1681
            cache_ttl: CacheTtlSeconds::default(),
1✔
1682
        };
1✔
1683

1✔
1684
        let meta = db.wrap_meta_data(MetaDataDefinition::GdalStatic(meta));
1✔
1685

1686
        let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1687

1688
        let meta: geoengine_operators::util::Result<
1✔
1689
            Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1690
        > = db.meta_data(&id.into()).await;
6✔
1691

1692
        assert!(meta.is_ok());
1✔
1693

1694
        let meta = GdalMetaDataList {
1✔
1695
            result_descriptor: raster_descriptor.clone(),
1✔
1696
            params: vec![],
1✔
1697
        };
1✔
1698

1✔
1699
        let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataList(meta));
1✔
1700

1701
        let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1702

1703
        let meta: geoengine_operators::util::Result<
1✔
1704
            Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1705
        > = db.meta_data(&id.into()).await;
6✔
1706

1707
        assert!(meta.is_ok());
1✔
1708

1709
        let meta = GdalMetadataNetCdfCf {
1✔
1710
            result_descriptor: raster_descriptor.clone(),
1✔
1711
            params: gdal_params.clone(),
1✔
1712
            start: TimeInstance::MIN,
1✔
1713
            end: TimeInstance::MAX,
1✔
1714
            step: TimeStep {
1✔
1715
                granularity: TimeGranularity::Millis,
1✔
1716
                step: 0,
1✔
1717
            },
1✔
1718
            band_offset: 0,
1✔
1719
            cache_ttl: CacheTtlSeconds::default(),
1✔
1720
        };
1✔
1721

1✔
1722
        let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetadataNetCdfCf(meta));
1✔
1723

1724
        let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1725

1726
        let meta: geoengine_operators::util::Result<
1✔
1727
            Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1728
        > = db.meta_data(&id.into()).await;
6✔
1729

1730
        assert!(meta.is_ok());
1✔
1731
    }
1✔
1732

1733
    #[ge_context::test]
2✔
1734
    async fn it_secures_uploads(app_ctx: ProPostgresContext<NoTls>) {
1✔
1735
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1736
        let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1737

1✔
1738
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1739
        let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1740

1✔
1741
        let upload_id = UploadId::new();
1✔
1742

1✔
1743
        let upload = Upload {
1✔
1744
            id: upload_id,
1✔
1745
            files: vec![FileUpload {
1✔
1746
                id: FileId::new(),
1✔
1747
                name: "test.bin".to_owned(),
1✔
1748
                byte_size: 1024,
1✔
1749
            }],
1✔
1750
        };
1✔
1751

1✔
1752
        db1.create_upload(upload).await.unwrap();
12✔
1753

1✔
1754
        assert!(db1.load_upload(upload_id).await.is_ok());
3✔
1755

1756
        assert!(db2.load_upload(upload_id).await.is_err());
3✔
1757
    }
1✔
1758

1759
    #[allow(clippy::too_many_lines)]
1760
    #[ge_context::test]
2✔
1761
    async fn it_collects_layers(app_ctx: ProPostgresContext<NoTls>) {
1✔
1762
        let session = admin_login(&app_ctx).await;
5✔
1763

1764
        let layer_db = app_ctx.session_context(session).db();
1✔
1765

1✔
1766
        let workflow = Workflow {
1✔
1767
            operator: TypedOperator::Vector(
1✔
1768
                MockPointSource {
1✔
1769
                    params: MockPointSourceParams {
1✔
1770
                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1771
                    },
1✔
1772
                }
1✔
1773
                .boxed(),
1✔
1774
            ),
1✔
1775
        };
1✔
1776

1777
        let root_collection_id = layer_db.get_root_layer_collection_id().await.unwrap();
1✔
1778

1779
        let layer1 = layer_db
1✔
1780
            .add_layer(
1✔
1781
                AddLayer {
1✔
1782
                    name: "Layer1".to_string(),
1✔
1783
                    description: "Layer 1".to_string(),
1✔
1784
                    symbology: None,
1✔
1785
                    workflow: workflow.clone(),
1✔
1786
                    metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1787
                    properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1788
                },
1✔
1789
                &root_collection_id,
1✔
1790
            )
1✔
1791
            .await
36✔
1792
            .unwrap();
1✔
1793

1✔
1794
        assert_eq!(
1✔
1795
            layer_db.load_layer(&layer1).await.unwrap(),
7✔
1796
            crate::layers::layer::Layer {
1✔
1797
                id: ProviderLayerId {
1✔
1798
                    provider_id: INTERNAL_PROVIDER_ID,
1✔
1799
                    layer_id: layer1.clone(),
1✔
1800
                },
1✔
1801
                name: "Layer1".to_string(),
1✔
1802
                description: "Layer 1".to_string(),
1✔
1803
                symbology: None,
1✔
1804
                workflow: workflow.clone(),
1✔
1805
                metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1806
                properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1807
            }
1✔
1808
        );
1809

1810
        let collection1_id = layer_db
1✔
1811
            .add_layer_collection(
1✔
1812
                AddLayerCollection {
1✔
1813
                    name: "Collection1".to_string(),
1✔
1814
                    description: "Collection 1".to_string(),
1✔
1815
                    properties: Default::default(),
1✔
1816
                },
1✔
1817
                &root_collection_id,
1✔
1818
            )
1✔
1819
            .await
11✔
1820
            .unwrap();
1✔
1821

1822
        let layer2 = layer_db
1✔
1823
            .add_layer(
1✔
1824
                AddLayer {
1✔
1825
                    name: "Layer2".to_string(),
1✔
1826
                    description: "Layer 2".to_string(),
1✔
1827
                    symbology: None,
1✔
1828
                    workflow: workflow.clone(),
1✔
1829
                    metadata: Default::default(),
1✔
1830
                    properties: Default::default(),
1✔
1831
                },
1✔
1832
                &collection1_id,
1✔
1833
            )
1✔
1834
            .await
13✔
1835
            .unwrap();
1✔
1836

1837
        let collection2_id = layer_db
1✔
1838
            .add_layer_collection(
1✔
1839
                AddLayerCollection {
1✔
1840
                    name: "Collection2".to_string(),
1✔
1841
                    description: "Collection 2".to_string(),
1✔
1842
                    properties: Default::default(),
1✔
1843
                },
1✔
1844
                &collection1_id,
1✔
1845
            )
1✔
1846
            .await
11✔
1847
            .unwrap();
1✔
1848

1✔
1849
        layer_db
1✔
1850
            .add_collection_to_parent(&collection2_id, &collection1_id)
1✔
1851
            .await
3✔
1852
            .unwrap();
1✔
1853

1854
        let root_collection = layer_db
1✔
1855
            .load_layer_collection(
1✔
1856
                &root_collection_id,
1✔
1857
                LayerCollectionListOptions {
1✔
1858
                    offset: 0,
1✔
1859
                    limit: 20,
1✔
1860
                },
1✔
1861
            )
1✔
1862
            .await
9✔
1863
            .unwrap();
1✔
1864

1✔
1865
        assert_eq!(
1✔
1866
            root_collection,
1✔
1867
            LayerCollection {
1✔
1868
                id: ProviderLayerCollectionId {
1✔
1869
                    provider_id: INTERNAL_PROVIDER_ID,
1✔
1870
                    collection_id: root_collection_id,
1✔
1871
                },
1✔
1872
                name: "Layers".to_string(),
1✔
1873
                description: "All available Geo Engine layers".to_string(),
1✔
1874
                items: vec![
1✔
1875
                    CollectionItem::Collection(LayerCollectionListing {
1✔
1876
                        id: ProviderLayerCollectionId {
1✔
1877
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1878
                            collection_id: collection1_id.clone(),
1✔
1879
                        },
1✔
1880
                        name: "Collection1".to_string(),
1✔
1881
                        description: "Collection 1".to_string(),
1✔
1882
                        properties: Default::default(),
1✔
1883
                    }),
1✔
1884
                    CollectionItem::Collection(LayerCollectionListing {
1✔
1885
                        id: ProviderLayerCollectionId {
1✔
1886
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1887
                            collection_id: LayerCollectionId(UNSORTED_COLLECTION_ID.to_string()),
1✔
1888
                        },
1✔
1889
                        name: "Unsorted".to_string(),
1✔
1890
                        description: "Unsorted Layers".to_string(),
1✔
1891
                        properties: Default::default(),
1✔
1892
                    }),
1✔
1893
                    CollectionItem::Layer(LayerListing {
1✔
1894
                        id: ProviderLayerId {
1✔
1895
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1896
                            layer_id: layer1,
1✔
1897
                        },
1✔
1898
                        name: "Layer1".to_string(),
1✔
1899
                        description: "Layer 1".to_string(),
1✔
1900
                        properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1901
                    })
1✔
1902
                ],
1✔
1903
                entry_label: None,
1✔
1904
                properties: vec![],
1✔
1905
            }
1✔
1906
        );
1✔
1907

1908
        let collection1 = layer_db
1✔
1909
            .load_layer_collection(
1✔
1910
                &collection1_id,
1✔
1911
                LayerCollectionListOptions {
1✔
1912
                    offset: 0,
1✔
1913
                    limit: 20,
1✔
1914
                },
1✔
1915
            )
1✔
1916
            .await
9✔
1917
            .unwrap();
1✔
1918

1✔
1919
        assert_eq!(
1✔
1920
            collection1,
1✔
1921
            LayerCollection {
1✔
1922
                id: ProviderLayerCollectionId {
1✔
1923
                    provider_id: INTERNAL_PROVIDER_ID,
1✔
1924
                    collection_id: collection1_id,
1✔
1925
                },
1✔
1926
                name: "Collection1".to_string(),
1✔
1927
                description: "Collection 1".to_string(),
1✔
1928
                items: vec![
1✔
1929
                    CollectionItem::Collection(LayerCollectionListing {
1✔
1930
                        id: ProviderLayerCollectionId {
1✔
1931
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1932
                            collection_id: collection2_id,
1✔
1933
                        },
1✔
1934
                        name: "Collection2".to_string(),
1✔
1935
                        description: "Collection 2".to_string(),
1✔
1936
                        properties: Default::default(),
1✔
1937
                    }),
1✔
1938
                    CollectionItem::Layer(LayerListing {
1✔
1939
                        id: ProviderLayerId {
1✔
1940
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1941
                            layer_id: layer2,
1✔
1942
                        },
1✔
1943
                        name: "Layer2".to_string(),
1✔
1944
                        description: "Layer 2".to_string(),
1✔
1945
                        properties: vec![],
1✔
1946
                    })
1✔
1947
                ],
1✔
1948
                entry_label: None,
1✔
1949
                properties: vec![],
1✔
1950
            }
1✔
1951
        );
1✔
1952
    }
1✔
1953

1954
    #[ge_context::test]
2✔
1955
    async fn it_tracks_used_quota_in_postgres(app_ctx: ProPostgresContext<NoTls>) {
1✔
1956
        let _user = app_ctx
1✔
1957
            .register_user(UserRegistration {
1✔
1958
                email: "foo@example.com".to_string(),
1✔
1959
                password: "secret1234".to_string(),
1✔
1960
                real_name: "Foo Bar".to_string(),
1✔
1961
            })
1✔
1962
            .await
3✔
1963
            .unwrap();
1✔
1964

1965
        let session = app_ctx
1✔
1966
            .login(UserCredentials {
1✔
1967
                email: "foo@example.com".to_string(),
1✔
1968
                password: "secret1234".to_string(),
1✔
1969
            })
1✔
1970
            .await
3✔
1971
            .unwrap();
1✔
1972

1973
        let admin_session = admin_login(&app_ctx).await;
6✔
1974

1975
        let quota = initialize_quota_tracking(
1✔
1976
            QuotaTrackingMode::Check,
1✔
1977
            app_ctx.session_context(admin_session).db(),
1✔
1978
            0,
1✔
1979
            60,
1✔
1980
        );
1✔
1981

1✔
1982
        let tracking = quota.create_quota_tracking(&session, ComputationContext::new());
1✔
1983

1✔
1984
        tracking.work_unit_done();
1✔
1985
        tracking.work_unit_done();
1✔
1986

1✔
1987
        let db = app_ctx.session_context(session).db();
1✔
1988

1✔
1989
        // wait for quota to be recorded
1✔
1990
        let mut success = false;
1✔
1991
        for _ in 0..10 {
2✔
1992
            let used = db.quota_used().await.unwrap();
6✔
1993
            tokio::time::sleep(std::time::Duration::from_millis(100)).await;
2✔
1994

1995
            if used == 2 {
2✔
1996
                success = true;
1✔
1997
                break;
1✔
1998
            }
1✔
1999
        }
2000

2001
        assert!(success);
1✔
2002
    }
1✔
2003

2004
    #[ge_context::test]
2✔
2005
    async fn it_tracks_available_quota(app_ctx: ProPostgresContext<NoTls>) {
1✔
2006
        let user = app_ctx
1✔
2007
            .register_user(UserRegistration {
1✔
2008
                email: "foo@example.com".to_string(),
1✔
2009
                password: "secret1234".to_string(),
1✔
2010
                real_name: "Foo Bar".to_string(),
1✔
2011
            })
1✔
2012
            .await
1✔
2013
            .unwrap();
1✔
2014

2015
        let session = app_ctx
1✔
2016
            .login(UserCredentials {
1✔
2017
                email: "foo@example.com".to_string(),
1✔
2018
                password: "secret1234".to_string(),
1✔
2019
            })
1✔
2020
            .await
1✔
2021
            .unwrap();
1✔
2022

2023
        let admin_session = admin_login(&app_ctx).await;
5✔
2024

2025
        app_ctx
1✔
2026
            .session_context(admin_session.clone())
1✔
2027
            .db()
1✔
2028
            .update_quota_available_by_user(&user, 1)
1✔
2029
            .await
1✔
2030
            .unwrap();
1✔
2031

1✔
2032
        let quota = initialize_quota_tracking(
1✔
2033
            QuotaTrackingMode::Check,
1✔
2034
            app_ctx.session_context(admin_session).db(),
1✔
2035
            0,
1✔
2036
            60,
1✔
2037
        );
1✔
2038

1✔
2039
        let tracking = quota.create_quota_tracking(&session, ComputationContext::new());
1✔
2040

1✔
2041
        tracking.work_unit_done();
1✔
2042
        tracking.work_unit_done();
1✔
2043

1✔
2044
        let db = app_ctx.session_context(session).db();
1✔
2045

1✔
2046
        // wait for quota to be recorded
1✔
2047
        let mut success = false;
1✔
2048
        for _ in 0..10 {
2✔
2049
            let available = db.quota_available().await.unwrap();
6✔
2050
            tokio::time::sleep(std::time::Duration::from_millis(100)).await;
2✔
2051

2052
            if available == -1 {
2✔
2053
                success = true;
1✔
2054
                break;
1✔
2055
            }
1✔
2056
        }
2057

2058
        assert!(success);
1✔
2059
    }
1✔
2060

2061
    #[ge_context::test]
2✔
2062
    async fn it_updates_quota_in_postgres(app_ctx: ProPostgresContext<NoTls>) {
1✔
2063
        let user = app_ctx
1✔
2064
            .register_user(UserRegistration {
1✔
2065
                email: "foo@example.com".to_string(),
1✔
2066
                password: "secret1234".to_string(),
1✔
2067
                real_name: "Foo Bar".to_string(),
1✔
2068
            })
1✔
2069
            .await
9✔
2070
            .unwrap();
1✔
2071

2072
        let session = app_ctx
1✔
2073
            .login(UserCredentials {
1✔
2074
                email: "foo@example.com".to_string(),
1✔
2075
                password: "secret1234".to_string(),
1✔
2076
            })
1✔
2077
            .await
11✔
2078
            .unwrap();
1✔
2079

1✔
2080
        let db = app_ctx.session_context(session.clone()).db();
1✔
2081
        let admin_db = app_ctx.session_context(UserSession::admin_session()).db();
1✔
2082

1✔
2083
        assert_eq!(
1✔
2084
            db.quota_available().await.unwrap(),
3✔
2085
            crate::util::config::get_config_element::<crate::pro::util::config::Quota>()
1✔
2086
                .unwrap()
1✔
2087
                .default_available_quota
2088
        );
2089

2090
        assert_eq!(
2091
            admin_db.quota_available_by_user(&user).await.unwrap(),
3✔
2092
            crate::util::config::get_config_element::<crate::pro::util::config::Quota>()
1✔
2093
                .unwrap()
1✔
2094
                .default_available_quota
2095
        );
2096

2097
        admin_db
1✔
2098
            .update_quota_available_by_user(&user, 123)
1✔
2099
            .await
3✔
2100
            .unwrap();
1✔
2101

1✔
2102
        assert_eq!(db.quota_available().await.unwrap(), 123);
3✔
2103

2104
        assert_eq!(admin_db.quota_available_by_user(&user).await.unwrap(), 123);
3✔
2105
    }
1✔
2106

2107
    #[allow(clippy::too_many_lines)]
2108
    #[ge_context::test]
2✔
2109
    async fn it_removes_layer_collections(app_ctx: ProPostgresContext<NoTls>) {
1✔
2110
        let session = admin_login(&app_ctx).await;
7✔
2111

2112
        let layer_db = app_ctx.session_context(session).db();
1✔
2113

1✔
2114
        let layer = AddLayer {
1✔
2115
            name: "layer".to_string(),
1✔
2116
            description: "description".to_string(),
1✔
2117
            workflow: Workflow {
1✔
2118
                operator: TypedOperator::Vector(
1✔
2119
                    MockPointSource {
1✔
2120
                        params: MockPointSourceParams {
1✔
2121
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2122
                        },
1✔
2123
                    }
1✔
2124
                    .boxed(),
1✔
2125
                ),
1✔
2126
            },
1✔
2127
            symbology: None,
1✔
2128
            metadata: Default::default(),
1✔
2129
            properties: Default::default(),
1✔
2130
        };
1✔
2131

2132
        let root_collection = &layer_db.get_root_layer_collection_id().await.unwrap();
1✔
2133

1✔
2134
        let collection = AddLayerCollection {
1✔
2135
            name: "top collection".to_string(),
1✔
2136
            description: "description".to_string(),
1✔
2137
            properties: Default::default(),
1✔
2138
        };
1✔
2139

2140
        let top_c_id = layer_db
1✔
2141
            .add_layer_collection(collection, root_collection)
1✔
2142
            .await
1✔
2143
            .unwrap();
1✔
2144

2145
        let l_id = layer_db.add_layer(layer, &top_c_id).await.unwrap();
45✔
2146

1✔
2147
        let collection = AddLayerCollection {
1✔
2148
            name: "empty collection".to_string(),
1✔
2149
            description: "description".to_string(),
1✔
2150
            properties: Default::default(),
1✔
2151
        };
1✔
2152

2153
        let empty_c_id = layer_db
1✔
2154
            .add_layer_collection(collection, &top_c_id)
1✔
2155
            .await
11✔
2156
            .unwrap();
1✔
2157

2158
        let items = layer_db
1✔
2159
            .load_layer_collection(
1✔
2160
                &top_c_id,
1✔
2161
                LayerCollectionListOptions {
1✔
2162
                    offset: 0,
1✔
2163
                    limit: 20,
1✔
2164
                },
1✔
2165
            )
1✔
2166
            .await
9✔
2167
            .unwrap();
1✔
2168

1✔
2169
        assert_eq!(
1✔
2170
            items,
1✔
2171
            LayerCollection {
1✔
2172
                id: ProviderLayerCollectionId {
1✔
2173
                    provider_id: INTERNAL_PROVIDER_ID,
1✔
2174
                    collection_id: top_c_id.clone(),
1✔
2175
                },
1✔
2176
                name: "top collection".to_string(),
1✔
2177
                description: "description".to_string(),
1✔
2178
                items: vec![
1✔
2179
                    CollectionItem::Collection(LayerCollectionListing {
1✔
2180
                        id: ProviderLayerCollectionId {
1✔
2181
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
2182
                            collection_id: empty_c_id.clone(),
1✔
2183
                        },
1✔
2184
                        name: "empty collection".to_string(),
1✔
2185
                        description: "description".to_string(),
1✔
2186
                        properties: Default::default(),
1✔
2187
                    }),
1✔
2188
                    CollectionItem::Layer(LayerListing {
1✔
2189
                        id: ProviderLayerId {
1✔
2190
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
2191
                            layer_id: l_id.clone(),
1✔
2192
                        },
1✔
2193
                        name: "layer".to_string(),
1✔
2194
                        description: "description".to_string(),
1✔
2195
                        properties: vec![],
1✔
2196
                    })
1✔
2197
                ],
1✔
2198
                entry_label: None,
1✔
2199
                properties: vec![],
1✔
2200
            }
1✔
2201
        );
1✔
2202

2203
        // remove empty collection
2204
        layer_db.remove_layer_collection(&empty_c_id).await.unwrap();
11✔
2205

2206
        let items = layer_db
1✔
2207
            .load_layer_collection(
1✔
2208
                &top_c_id,
1✔
2209
                LayerCollectionListOptions {
1✔
2210
                    offset: 0,
1✔
2211
                    limit: 20,
1✔
2212
                },
1✔
2213
            )
1✔
2214
            .await
9✔
2215
            .unwrap();
1✔
2216

1✔
2217
        assert_eq!(
1✔
2218
            items,
1✔
2219
            LayerCollection {
1✔
2220
                id: ProviderLayerCollectionId {
1✔
2221
                    provider_id: INTERNAL_PROVIDER_ID,
1✔
2222
                    collection_id: top_c_id.clone(),
1✔
2223
                },
1✔
2224
                name: "top collection".to_string(),
1✔
2225
                description: "description".to_string(),
1✔
2226
                items: vec![CollectionItem::Layer(LayerListing {
1✔
2227
                    id: ProviderLayerId {
1✔
2228
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
2229
                        layer_id: l_id.clone(),
1✔
2230
                    },
1✔
2231
                    name: "layer".to_string(),
1✔
2232
                    description: "description".to_string(),
1✔
2233
                    properties: vec![],
1✔
2234
                })],
1✔
2235
                entry_label: None,
1✔
2236
                properties: vec![],
1✔
2237
            }
1✔
2238
        );
1✔
2239

2240
        // remove top (not root) collection
2241
        layer_db.remove_layer_collection(&top_c_id).await.unwrap();
11✔
2242

1✔
2243
        layer_db
1✔
2244
            .load_layer_collection(
1✔
2245
                &top_c_id,
1✔
2246
                LayerCollectionListOptions {
1✔
2247
                    offset: 0,
1✔
2248
                    limit: 20,
1✔
2249
                },
1✔
2250
            )
1✔
2251
            .await
4✔
2252
            .unwrap_err();
1✔
2253

1✔
2254
        // should be deleted automatically
1✔
2255
        layer_db.load_layer(&l_id).await.unwrap_err();
4✔
2256

1✔
2257
        // it is not allowed to remove the root collection
1✔
2258
        layer_db
1✔
2259
            .remove_layer_collection(root_collection)
1✔
2260
            .await
4✔
2261
            .unwrap_err();
1✔
2262
        layer_db
1✔
2263
            .load_layer_collection(
1✔
2264
                root_collection,
1✔
2265
                LayerCollectionListOptions {
1✔
2266
                    offset: 0,
1✔
2267
                    limit: 20,
1✔
2268
                },
1✔
2269
            )
1✔
2270
            .await
9✔
2271
            .unwrap();
1✔
2272
    }
1✔
2273

2274
    #[ge_context::test]
2✔
2275
    #[allow(clippy::too_many_lines)]
2276
    async fn it_removes_collections_from_collections(app_ctx: ProPostgresContext<NoTls>) {
1✔
2277
        let session = admin_login(&app_ctx).await;
8✔
2278

2279
        let db = app_ctx.session_context(session).db();
1✔
2280

2281
        let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
2282

2283
        let mid_collection_id = db
1✔
2284
            .add_layer_collection(
1✔
2285
                AddLayerCollection {
1✔
2286
                    name: "mid collection".to_string(),
1✔
2287
                    description: "description".to_string(),
1✔
2288
                    properties: Default::default(),
1✔
2289
                },
1✔
2290
                root_collection_id,
1✔
2291
            )
1✔
2292
            .await
20✔
2293
            .unwrap();
1✔
2294

2295
        let bottom_collection_id = db
1✔
2296
            .add_layer_collection(
1✔
2297
                AddLayerCollection {
1✔
2298
                    name: "bottom collection".to_string(),
1✔
2299
                    description: "description".to_string(),
1✔
2300
                    properties: Default::default(),
1✔
2301
                },
1✔
2302
                &mid_collection_id,
1✔
2303
            )
1✔
2304
            .await
11✔
2305
            .unwrap();
1✔
2306

2307
        let layer_id = db
1✔
2308
            .add_layer(
1✔
2309
                AddLayer {
1✔
2310
                    name: "layer".to_string(),
1✔
2311
                    description: "description".to_string(),
1✔
2312
                    workflow: Workflow {
1✔
2313
                        operator: TypedOperator::Vector(
1✔
2314
                            MockPointSource {
1✔
2315
                                params: MockPointSourceParams {
1✔
2316
                                    points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2317
                                },
1✔
2318
                            }
1✔
2319
                            .boxed(),
1✔
2320
                        ),
1✔
2321
                    },
1✔
2322
                    symbology: None,
1✔
2323
                    metadata: Default::default(),
1✔
2324
                    properties: Default::default(),
1✔
2325
                },
1✔
2326
                &mid_collection_id,
1✔
2327
            )
1✔
2328
            .await
45✔
2329
            .unwrap();
1✔
2330

1✔
2331
        // removing the mid collection…
1✔
2332
        db.remove_layer_collection_from_parent(&mid_collection_id, root_collection_id)
1✔
2333
            .await
13✔
2334
            .unwrap();
1✔
2335

1✔
2336
        // …should remove itself
1✔
2337
        db.load_layer_collection(&mid_collection_id, LayerCollectionListOptions::default())
1✔
2338
            .await
4✔
2339
            .unwrap_err();
1✔
2340

1✔
2341
        // …should remove the bottom collection
1✔
2342
        db.load_layer_collection(&bottom_collection_id, LayerCollectionListOptions::default())
1✔
2343
            .await
4✔
2344
            .unwrap_err();
1✔
2345

1✔
2346
        // … and should remove the layer of the bottom collection
1✔
2347
        db.load_layer(&layer_id).await.unwrap_err();
4✔
2348

1✔
2349
        // the root collection is still there
1✔
2350
        db.load_layer_collection(root_collection_id, LayerCollectionListOptions::default())
1✔
2351
            .await
9✔
2352
            .unwrap();
1✔
2353
    }
1✔
2354

2355
    #[ge_context::test]
2✔
2356
    #[allow(clippy::too_many_lines)]
2357
    async fn it_removes_layers_from_collections(app_ctx: ProPostgresContext<NoTls>) {
1✔
2358
        let session = admin_login(&app_ctx).await;
8✔
2359

2360
        let db = app_ctx.session_context(session).db();
1✔
2361

2362
        let root_collection = &db.get_root_layer_collection_id().await.unwrap();
1✔
2363

2364
        let another_collection = db
1✔
2365
            .add_layer_collection(
1✔
2366
                AddLayerCollection {
1✔
2367
                    name: "top collection".to_string(),
1✔
2368
                    description: "description".to_string(),
1✔
2369
                    properties: Default::default(),
1✔
2370
                },
1✔
2371
                root_collection,
1✔
2372
            )
1✔
2373
            .await
11✔
2374
            .unwrap();
1✔
2375

2376
        let layer_in_one_collection = db
1✔
2377
            .add_layer(
1✔
2378
                AddLayer {
1✔
2379
                    name: "layer 1".to_string(),
1✔
2380
                    description: "description".to_string(),
1✔
2381
                    workflow: Workflow {
1✔
2382
                        operator: TypedOperator::Vector(
1✔
2383
                            MockPointSource {
1✔
2384
                                params: MockPointSourceParams {
1✔
2385
                                    points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2386
                                },
1✔
2387
                            }
1✔
2388
                            .boxed(),
1✔
2389
                        ),
1✔
2390
                    },
1✔
2391
                    symbology: None,
1✔
2392
                    metadata: Default::default(),
1✔
2393
                    properties: Default::default(),
1✔
2394
                },
1✔
2395
                &another_collection,
1✔
2396
            )
1✔
2397
            .await
44✔
2398
            .unwrap();
1✔
2399

2400
        let layer_in_two_collections = db
1✔
2401
            .add_layer(
1✔
2402
                AddLayer {
1✔
2403
                    name: "layer 2".to_string(),
1✔
2404
                    description: "description".to_string(),
1✔
2405
                    workflow: Workflow {
1✔
2406
                        operator: TypedOperator::Vector(
1✔
2407
                            MockPointSource {
1✔
2408
                                params: MockPointSourceParams {
1✔
2409
                                    points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2410
                                },
1✔
2411
                            }
1✔
2412
                            .boxed(),
1✔
2413
                        ),
1✔
2414
                    },
1✔
2415
                    symbology: None,
1✔
2416
                    metadata: Default::default(),
1✔
2417
                    properties: Default::default(),
1✔
2418
                },
1✔
2419
                &another_collection,
1✔
2420
            )
1✔
2421
            .await
13✔
2422
            .unwrap();
1✔
2423

1✔
2424
        db.load_layer(&layer_in_two_collections).await.unwrap();
7✔
2425

1✔
2426
        db.add_layer_to_collection(&layer_in_two_collections, root_collection)
1✔
2427
            .await
7✔
2428
            .unwrap();
1✔
2429

1✔
2430
        // remove first layer --> should be deleted entirely
1✔
2431

1✔
2432
        db.remove_layer_from_collection(&layer_in_one_collection, &another_collection)
1✔
2433
            .await
9✔
2434
            .unwrap();
1✔
2435

2436
        let number_of_layer_in_collection = db
1✔
2437
            .load_layer_collection(
1✔
2438
                &another_collection,
1✔
2439
                LayerCollectionListOptions {
1✔
2440
                    offset: 0,
1✔
2441
                    limit: 20,
1✔
2442
                },
1✔
2443
            )
1✔
2444
            .await
9✔
2445
            .unwrap()
1✔
2446
            .items
1✔
2447
            .len();
1✔
2448
        assert_eq!(
1✔
2449
            number_of_layer_in_collection,
1✔
2450
            1 /* only the other collection should be here */
1✔
2451
        );
1✔
2452

2453
        db.load_layer(&layer_in_one_collection).await.unwrap_err();
4✔
2454

1✔
2455
        // remove second layer --> should only be gone in collection
1✔
2456

1✔
2457
        db.remove_layer_from_collection(&layer_in_two_collections, &another_collection)
1✔
2458
            .await
9✔
2459
            .unwrap();
1✔
2460

2461
        let number_of_layer_in_collection = db
1✔
2462
            .load_layer_collection(
1✔
2463
                &another_collection,
1✔
2464
                LayerCollectionListOptions {
1✔
2465
                    offset: 0,
1✔
2466
                    limit: 20,
1✔
2467
                },
1✔
2468
            )
1✔
2469
            .await
9✔
2470
            .unwrap()
1✔
2471
            .items
1✔
2472
            .len();
1✔
2473
        assert_eq!(
1✔
2474
            number_of_layer_in_collection,
1✔
2475
            0 /* both layers were deleted */
1✔
2476
        );
1✔
2477

2478
        db.load_layer(&layer_in_two_collections).await.unwrap();
7✔
2479
    }
1✔
2480

2481
    #[ge_context::test]
2✔
2482
    #[allow(clippy::too_many_lines)]
2483
    async fn it_deletes_dataset(app_ctx: ProPostgresContext<NoTls>) {
1✔
2484
        let loading_info = OgrSourceDataset {
1✔
2485
            file_name: PathBuf::from("test.csv"),
1✔
2486
            layer_name: "test.csv".to_owned(),
1✔
2487
            data_type: Some(VectorDataType::MultiPoint),
1✔
2488
            time: OgrSourceDatasetTimeType::Start {
1✔
2489
                start_field: "start".to_owned(),
1✔
2490
                start_format: OgrSourceTimeFormat::Auto,
1✔
2491
                duration: OgrSourceDurationSpec::Zero,
1✔
2492
            },
1✔
2493
            default_geometry: None,
1✔
2494
            columns: Some(OgrSourceColumnSpec {
1✔
2495
                format_specifics: Some(FormatSpecifics::Csv {
1✔
2496
                    header: CsvHeader::Auto,
1✔
2497
                }),
1✔
2498
                x: "x".to_owned(),
1✔
2499
                y: None,
1✔
2500
                int: vec![],
1✔
2501
                float: vec![],
1✔
2502
                text: vec![],
1✔
2503
                bool: vec![],
1✔
2504
                datetime: vec![],
1✔
2505
                rename: None,
1✔
2506
            }),
1✔
2507
            force_ogr_time_filter: false,
1✔
2508
            force_ogr_spatial_filter: false,
1✔
2509
            on_error: OgrSourceErrorSpec::Ignore,
1✔
2510
            sql_query: None,
1✔
2511
            attribute_query: None,
1✔
2512
            cache_ttl: CacheTtlSeconds::default(),
1✔
2513
        };
1✔
2514

1✔
2515
        let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
2516
            OgrSourceDataset,
1✔
2517
            VectorResultDescriptor,
1✔
2518
            VectorQueryRectangle,
1✔
2519
        > {
1✔
2520
            loading_info: loading_info.clone(),
1✔
2521
            result_descriptor: VectorResultDescriptor {
1✔
2522
                data_type: VectorDataType::MultiPoint,
1✔
2523
                spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2524
                columns: [(
1✔
2525
                    "foo".to_owned(),
1✔
2526
                    VectorColumnInfo {
1✔
2527
                        data_type: FeatureDataType::Float,
1✔
2528
                        measurement: Measurement::Unitless,
1✔
2529
                    },
1✔
2530
                )]
1✔
2531
                .into_iter()
1✔
2532
                .collect(),
1✔
2533
                time: None,
1✔
2534
                bbox: None,
1✔
2535
            },
1✔
2536
            phantom: Default::default(),
1✔
2537
        });
1✔
2538

2539
        let session = app_ctx.create_anonymous_session().await.unwrap();
15✔
2540

1✔
2541
        let dataset_name = DatasetName::new(Some(session.user.id.to_string()), "my_dataset");
1✔
2542

1✔
2543
        let db = app_ctx.session_context(session.clone()).db();
1✔
2544
        let wrap = db.wrap_meta_data(meta_data);
1✔
2545
        let dataset_id = db
1✔
2546
            .add_dataset(
1✔
2547
                AddDataset {
1✔
2548
                    name: Some(dataset_name),
1✔
2549
                    display_name: "Ogr Test".to_owned(),
1✔
2550
                    description: "desc".to_owned(),
1✔
2551
                    source_operator: "OgrSource".to_owned(),
1✔
2552
                    symbology: None,
1✔
2553
                    provenance: Some(vec![Provenance {
1✔
2554
                        citation: "citation".to_owned(),
1✔
2555
                        license: "license".to_owned(),
1✔
2556
                        uri: "uri".to_owned(),
1✔
2557
                    }]),
1✔
2558
                },
1✔
2559
                wrap,
1✔
2560
            )
1✔
2561
            .await
174✔
2562
            .unwrap()
1✔
2563
            .id;
1✔
2564

1✔
2565
        assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
2566

2567
        db.delete_dataset(dataset_id).await.unwrap();
10✔
2568

1✔
2569
        assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
2570
    }
1✔
2571

2572
    #[ge_context::test]
2✔
2573
    #[allow(clippy::too_many_lines)]
2574
    async fn it_deletes_admin_dataset(app_ctx: ProPostgresContext<NoTls>) {
1✔
2575
        let dataset_name = DatasetName::new(None, "my_dataset");
1✔
2576

1✔
2577
        let loading_info = OgrSourceDataset {
1✔
2578
            file_name: PathBuf::from("test.csv"),
1✔
2579
            layer_name: "test.csv".to_owned(),
1✔
2580
            data_type: Some(VectorDataType::MultiPoint),
1✔
2581
            time: OgrSourceDatasetTimeType::Start {
1✔
2582
                start_field: "start".to_owned(),
1✔
2583
                start_format: OgrSourceTimeFormat::Auto,
1✔
2584
                duration: OgrSourceDurationSpec::Zero,
1✔
2585
            },
1✔
2586
            default_geometry: None,
1✔
2587
            columns: Some(OgrSourceColumnSpec {
1✔
2588
                format_specifics: Some(FormatSpecifics::Csv {
1✔
2589
                    header: CsvHeader::Auto,
1✔
2590
                }),
1✔
2591
                x: "x".to_owned(),
1✔
2592
                y: None,
1✔
2593
                int: vec![],
1✔
2594
                float: vec![],
1✔
2595
                text: vec![],
1✔
2596
                bool: vec![],
1✔
2597
                datetime: vec![],
1✔
2598
                rename: None,
1✔
2599
            }),
1✔
2600
            force_ogr_time_filter: false,
1✔
2601
            force_ogr_spatial_filter: false,
1✔
2602
            on_error: OgrSourceErrorSpec::Ignore,
1✔
2603
            sql_query: None,
1✔
2604
            attribute_query: None,
1✔
2605
            cache_ttl: CacheTtlSeconds::default(),
1✔
2606
        };
1✔
2607

1✔
2608
        let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
2609
            OgrSourceDataset,
1✔
2610
            VectorResultDescriptor,
1✔
2611
            VectorQueryRectangle,
1✔
2612
        > {
1✔
2613
            loading_info: loading_info.clone(),
1✔
2614
            result_descriptor: VectorResultDescriptor {
1✔
2615
                data_type: VectorDataType::MultiPoint,
1✔
2616
                spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2617
                columns: [(
1✔
2618
                    "foo".to_owned(),
1✔
2619
                    VectorColumnInfo {
1✔
2620
                        data_type: FeatureDataType::Float,
1✔
2621
                        measurement: Measurement::Unitless,
1✔
2622
                    },
1✔
2623
                )]
1✔
2624
                .into_iter()
1✔
2625
                .collect(),
1✔
2626
                time: None,
1✔
2627
                bbox: None,
1✔
2628
            },
1✔
2629
            phantom: Default::default(),
1✔
2630
        });
1✔
2631

2632
        let session = admin_login(&app_ctx).await;
2✔
2633

2634
        let db = app_ctx.session_context(session).db();
1✔
2635
        let wrap = db.wrap_meta_data(meta_data);
1✔
2636
        let dataset_id = db
1✔
2637
            .add_dataset(
1✔
2638
                AddDataset {
1✔
2639
                    name: Some(dataset_name),
1✔
2640
                    display_name: "Ogr Test".to_owned(),
1✔
2641
                    description: "desc".to_owned(),
1✔
2642
                    source_operator: "OgrSource".to_owned(),
1✔
2643
                    symbology: None,
1✔
2644
                    provenance: Some(vec![Provenance {
1✔
2645
                        citation: "citation".to_owned(),
1✔
2646
                        license: "license".to_owned(),
1✔
2647
                        uri: "uri".to_owned(),
1✔
2648
                    }]),
1✔
2649
                },
1✔
2650
                wrap,
1✔
2651
            )
1✔
2652
            .await
155✔
2653
            .unwrap()
1✔
2654
            .id;
1✔
2655

1✔
2656
        assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
2657

2658
        db.delete_dataset(dataset_id).await.unwrap();
10✔
2659

1✔
2660
        assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
2661
    }
1✔
2662

2663
    #[ge_context::test]
2✔
2664
    async fn test_missing_layer_dataset_in_collection_listing(app_ctx: ProPostgresContext<NoTls>) {
1✔
2665
        let session = admin_login(&app_ctx).await;
11✔
2666
        let db = app_ctx.session_context(session).db();
1✔
2667

2668
        let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
2669

2670
        let top_collection_id = db
1✔
2671
            .add_layer_collection(
1✔
2672
                AddLayerCollection {
1✔
2673
                    name: "top collection".to_string(),
1✔
2674
                    description: "description".to_string(),
1✔
2675
                    properties: Default::default(),
1✔
2676
                },
1✔
2677
                root_collection_id,
1✔
2678
            )
1✔
2679
            .await
20✔
2680
            .unwrap();
1✔
2681

1✔
2682
        let faux_layer = LayerId("faux".to_string());
1✔
2683

1✔
2684
        // this should fail
1✔
2685
        db.add_layer_to_collection(&faux_layer, &top_collection_id)
1✔
2686
            .await
4✔
2687
            .unwrap_err();
1✔
2688

2689
        let root_collection_layers = db
1✔
2690
            .load_layer_collection(
1✔
2691
                &top_collection_id,
1✔
2692
                LayerCollectionListOptions {
1✔
2693
                    offset: 0,
1✔
2694
                    limit: 20,
1✔
2695
                },
1✔
2696
            )
1✔
2697
            .await
9✔
2698
            .unwrap();
1✔
2699

1✔
2700
        assert_eq!(
1✔
2701
            root_collection_layers,
1✔
2702
            LayerCollection {
1✔
2703
                id: ProviderLayerCollectionId {
1✔
2704
                    provider_id: DataProviderId(
1✔
2705
                        "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74".try_into().unwrap()
1✔
2706
                    ),
1✔
2707
                    collection_id: top_collection_id.clone(),
1✔
2708
                },
1✔
2709
                name: "top collection".to_string(),
1✔
2710
                description: "description".to_string(),
1✔
2711
                items: vec![],
1✔
2712
                entry_label: None,
1✔
2713
                properties: vec![],
1✔
2714
            }
1✔
2715
        );
1✔
2716
    }
1✔
2717

2718
    #[allow(clippy::too_many_lines)]
2719
    #[ge_context::test]
2✔
2720
    async fn it_restricts_layer_permissions(app_ctx: ProPostgresContext<NoTls>) {
1✔
2721
        let admin_session = admin_login(&app_ctx).await;
9✔
2722
        let session1 = app_ctx.create_anonymous_session().await.unwrap();
2✔
2723

1✔
2724
        let admin_db = app_ctx.session_context(admin_session.clone()).db();
1✔
2725
        let db1 = app_ctx.session_context(session1.clone()).db();
1✔
2726

2727
        let root = admin_db.get_root_layer_collection_id().await.unwrap();
1✔
2728

2729
        // add new collection as admin
2730
        let new_collection_id = admin_db
1✔
2731
            .add_layer_collection(
1✔
2732
                AddLayerCollection {
1✔
2733
                    name: "admin collection".to_string(),
1✔
2734
                    description: String::new(),
1✔
2735
                    properties: Default::default(),
1✔
2736
                },
1✔
2737
                &root,
1✔
2738
            )
1✔
2739
            .await
20✔
2740
            .unwrap();
1✔
2741

2742
        // load as regular user, not visible
2743
        let collection = db1
1✔
2744
            .load_layer_collection(
1✔
2745
                &root,
1✔
2746
                LayerCollectionListOptions {
1✔
2747
                    offset: 0,
1✔
2748
                    limit: 10,
1✔
2749
                },
1✔
2750
            )
1✔
2751
            .await
9✔
2752
            .unwrap();
1✔
2753
        assert!(!collection.items.iter().any(|c| match c {
1✔
2754
            CollectionItem::Collection(c) => c.id.collection_id == new_collection_id,
1✔
2755
            CollectionItem::Layer(_) => false,
×
2756
        }));
1✔
2757

2758
        // give user read permission
2759
        admin_db
1✔
2760
            .add_permission(
1✔
2761
                session1.user.id.into(),
1✔
2762
                new_collection_id.clone(),
1✔
2763
                Permission::Read,
1✔
2764
            )
1✔
2765
            .await
7✔
2766
            .unwrap();
1✔
2767

2768
        // now visible
2769
        let collection = db1
1✔
2770
            .load_layer_collection(
1✔
2771
                &root,
1✔
2772
                LayerCollectionListOptions {
1✔
2773
                    offset: 0,
1✔
2774
                    limit: 10,
1✔
2775
                },
1✔
2776
            )
1✔
2777
            .await
9✔
2778
            .unwrap();
1✔
2779

1✔
2780
        assert!(collection.items.iter().any(|c| match c {
1✔
2781
            CollectionItem::Collection(c) => c.id.collection_id == new_collection_id,
1✔
2782
            CollectionItem::Layer(_) => false,
×
2783
        }));
1✔
2784

2785
        // add new layer in the collection as user, fails because only read permission
2786
        let result = db1
1✔
2787
            .add_layer_collection(
1✔
2788
                AddLayerCollection {
1✔
2789
                    name: "user layer".to_string(),
1✔
2790
                    description: String::new(),
1✔
2791
                    properties: Default::default(),
1✔
2792
                },
1✔
2793
                &new_collection_id,
1✔
2794
            )
1✔
2795
            .await;
4✔
2796

2797
        assert!(result.is_err());
1✔
2798

2799
        // give user owner permission
2800
        admin_db
1✔
2801
            .add_permission(
1✔
2802
                session1.user.id.into(),
1✔
2803
                new_collection_id.clone(),
1✔
2804
                Permission::Owner,
1✔
2805
            )
1✔
2806
            .await
7✔
2807
            .unwrap();
1✔
2808

1✔
2809
        // add now works
1✔
2810
        db1.add_layer_collection(
1✔
2811
            AddLayerCollection {
1✔
2812
                name: "user layer".to_string(),
1✔
2813
                description: String::new(),
1✔
2814
                properties: Default::default(),
1✔
2815
            },
1✔
2816
            &new_collection_id,
1✔
2817
        )
1✔
2818
        .await
11✔
2819
        .unwrap();
1✔
2820

1✔
2821
        // remove permissions again
1✔
2822
        admin_db
1✔
2823
            .remove_permission(
1✔
2824
                session1.user.id.into(),
1✔
2825
                new_collection_id.clone(),
1✔
2826
                Permission::Read,
1✔
2827
            )
1✔
2828
            .await
7✔
2829
            .unwrap();
1✔
2830
        admin_db
1✔
2831
            .remove_permission(
1✔
2832
                session1.user.id.into(),
1✔
2833
                new_collection_id.clone(),
1✔
2834
                Permission::Owner,
1✔
2835
            )
1✔
2836
            .await
7✔
2837
            .unwrap();
1✔
2838

2839
        // access is gone now
2840
        let result = db1
1✔
2841
            .add_layer_collection(
1✔
2842
                AddLayerCollection {
1✔
2843
                    name: "user layer".to_string(),
1✔
2844
                    description: String::new(),
1✔
2845
                    properties: Default::default(),
1✔
2846
                },
1✔
2847
                &root,
1✔
2848
            )
1✔
2849
            .await;
4✔
2850

2851
        assert!(result.is_err());
1✔
2852

2853
        let collection = db1
1✔
2854
            .load_layer_collection(
1✔
2855
                &root,
1✔
2856
                LayerCollectionListOptions {
1✔
2857
                    offset: 0,
1✔
2858
                    limit: 10,
1✔
2859
                },
1✔
2860
            )
1✔
2861
            .await
9✔
2862
            .unwrap();
1✔
2863

1✔
2864
        assert!(!collection.items.iter().any(|c| match c {
1✔
2865
            CollectionItem::Collection(c) => c.id.collection_id == new_collection_id,
1✔
2866
            CollectionItem::Layer(_) => false,
×
2867
        }));
1✔
2868
    }
1✔
2869

2870
    #[allow(clippy::too_many_lines)]
2871
    #[ge_context::test]
2✔
2872
    async fn it_handles_user_roles(app_ctx: ProPostgresContext<NoTls>) {
1✔
2873
        let admin_session = admin_login(&app_ctx).await;
11✔
2874
        let user_id = app_ctx
1✔
2875
            .register_user(UserRegistration {
1✔
2876
                email: "foo@example.com".to_string(),
1✔
2877
                password: "secret123".to_string(),
1✔
2878
                real_name: "Foo Bar".to_string(),
1✔
2879
            })
1✔
2880
            .await
4✔
2881
            .unwrap();
1✔
2882

1✔
2883
        let admin_db = app_ctx.session_context(admin_session.clone()).db();
1✔
2884

2885
        // create a new role
2886
        let role_id = admin_db.add_role("foo").await.unwrap();
1✔
2887

2888
        let user_session = app_ctx
1✔
2889
            .login(UserCredentials {
1✔
2890
                email: "foo@example.com".to_string(),
1✔
2891
                password: "secret123".to_string(),
1✔
2892
            })
1✔
2893
            .await
1✔
2894
            .unwrap();
1✔
2895

1✔
2896
        // user does not have the role yet
1✔
2897

1✔
2898
        assert!(!user_session.roles.contains(&role_id));
1✔
2899

2900
        //user can query their role descriptions (user role and registered user)
2901
        assert_eq!(user_session.roles.len(), 2);
1✔
2902

2903
        let expected_user_role_description = RoleDescription {
1✔
2904
            role: Role {
1✔
2905
                id: RoleId::from(user_id),
1✔
2906
                name: "foo@example.com".to_string(),
1✔
2907
            },
1✔
2908
            individual: true,
1✔
2909
        };
1✔
2910
        let expected_registered_role_description = RoleDescription {
1✔
2911
            role: Role {
1✔
2912
                id: Role::registered_user_role_id(),
1✔
2913
                name: "user".to_string(),
1✔
2914
            },
1✔
2915
            individual: false,
1✔
2916
        };
1✔
2917

2918
        let user_role_descriptions = app_ctx
1✔
2919
            .session_context(user_session.clone())
1✔
2920
            .db()
1✔
2921
            .get_role_descriptions(&user_id)
1✔
2922
            .await
3✔
2923
            .unwrap();
1✔
2924
        assert_eq!(
1✔
2925
            vec![
1✔
2926
                expected_user_role_description.clone(),
1✔
2927
                expected_registered_role_description.clone(),
1✔
2928
            ],
1✔
2929
            user_role_descriptions
1✔
2930
        );
1✔
2931

2932
        // we assign the role to the user
2933
        admin_db.assign_role(&role_id, &user_id).await.unwrap();
3✔
2934

2935
        let user_session = app_ctx
1✔
2936
            .login(UserCredentials {
1✔
2937
                email: "foo@example.com".to_string(),
1✔
2938
                password: "secret123".to_string(),
1✔
2939
            })
1✔
2940
            .await
5✔
2941
            .unwrap();
1✔
2942

1✔
2943
        // should be present now
1✔
2944
        assert!(user_session.roles.contains(&role_id));
1✔
2945

2946
        //user can query their role descriptions (now an additional foo role)
2947
        let expected_foo_role_description = RoleDescription {
1✔
2948
            role: Role {
1✔
2949
                id: role_id,
1✔
2950
                name: "foo".to_string(),
1✔
2951
            },
1✔
2952
            individual: false,
1✔
2953
        };
1✔
2954

2955
        let user_role_descriptions = app_ctx
1✔
2956
            .session_context(user_session.clone())
1✔
2957
            .db()
1✔
2958
            .get_role_descriptions(&user_id)
1✔
2959
            .await
×
2960
            .unwrap();
1✔
2961
        assert_eq!(
1✔
2962
            vec![
1✔
2963
                expected_foo_role_description,
1✔
2964
                expected_user_role_description.clone(),
1✔
2965
                expected_registered_role_description.clone(),
1✔
2966
            ],
1✔
2967
            user_role_descriptions
1✔
2968
        );
1✔
2969

2970
        // we revoke it
2971
        admin_db.revoke_role(&role_id, &user_id).await.unwrap();
1✔
2972

2973
        let user_session = app_ctx
1✔
2974
            .login(UserCredentials {
1✔
2975
                email: "foo@example.com".to_string(),
1✔
2976
                password: "secret123".to_string(),
1✔
2977
            })
1✔
2978
            .await
1✔
2979
            .unwrap();
1✔
2980

1✔
2981
        // the role is gone now
1✔
2982
        assert!(!user_session.roles.contains(&role_id));
1✔
2983

2984
        //user can query their role descriptions (user role and registered user)
2985
        let user_role_descriptions = app_ctx
1✔
2986
            .session_context(user_session.clone())
1✔
2987
            .db()
1✔
2988
            .get_role_descriptions(&user_id)
1✔
2989
            .await
×
2990
            .unwrap();
1✔
2991
        assert_eq!(
1✔
2992
            vec![
1✔
2993
                expected_user_role_description.clone(),
1✔
2994
                expected_registered_role_description.clone(),
1✔
2995
            ],
1✔
2996
            user_role_descriptions
1✔
2997
        );
1✔
2998

2999
        // assign it again and then delete the whole role, should not be present at user
3000

3001
        admin_db.assign_role(&role_id, &user_id).await.unwrap();
3✔
3002

1✔
3003
        admin_db.remove_role(&role_id).await.unwrap();
3✔
3004

3005
        let user_session = app_ctx
1✔
3006
            .login(UserCredentials {
1✔
3007
                email: "foo@example.com".to_string(),
1✔
3008
                password: "secret123".to_string(),
1✔
3009
            })
1✔
3010
            .await
5✔
3011
            .unwrap();
1✔
3012

1✔
3013
        assert!(!user_session.roles.contains(&role_id));
1✔
3014

3015
        //user can query their role descriptions (user role and registered user)
3016
        let user_role_descriptions = app_ctx
1✔
3017
            .session_context(user_session.clone())
1✔
3018
            .db()
1✔
3019
            .get_role_descriptions(&user_id)
1✔
3020
            .await
×
3021
            .unwrap();
1✔
3022
        assert_eq!(
1✔
3023
            vec![
1✔
3024
                expected_user_role_description,
1✔
3025
                expected_registered_role_description.clone(),
1✔
3026
            ],
1✔
3027
            user_role_descriptions
1✔
3028
        );
1✔
3029
    }
1✔
3030

3031
    #[allow(clippy::too_many_lines)]
3032
    #[ge_context::test]
2✔
3033
    async fn it_updates_project_layer_symbology(app_ctx: ProPostgresContext<NoTls>) {
1✔
3034
        let session = app_ctx.create_anonymous_session().await.unwrap();
15✔
3035

3036
        let (_, workflow_id) = register_ndvi_workflow_helper(&app_ctx).await;
193✔
3037

3038
        let db = app_ctx.session_context(session.clone()).db();
1✔
3039

1✔
3040
        let create_project: CreateProject = serde_json::from_value(json!({
1✔
3041
            "name": "Default",
1✔
3042
            "description": "Default project",
1✔
3043
            "bounds": {
1✔
3044
                "boundingBox": {
1✔
3045
                    "lowerLeftCoordinate": {
1✔
3046
                        "x": -180,
1✔
3047
                        "y": -90
1✔
3048
                    },
1✔
3049
                    "upperRightCoordinate": {
1✔
3050
                        "x": 180,
1✔
3051
                        "y": 90
1✔
3052
                    }
1✔
3053
                },
1✔
3054
                "spatialReference": "EPSG:4326",
1✔
3055
                "timeInterval": {
1✔
3056
                    "start": 1_396_353_600_000i64,
1✔
3057
                    "end": 1_396_353_600_000i64
1✔
3058
                }
1✔
3059
            },
1✔
3060
            "timeStep": {
1✔
3061
                "step": 1,
1✔
3062
                "granularity": "months"
1✔
3063
            }
1✔
3064
        }))
1✔
3065
        .unwrap();
1✔
3066

3067
        let project_id = db.create_project(create_project).await.unwrap();
13✔
3068

1✔
3069
        let update: UpdateProject = serde_json::from_value(json!({
1✔
3070
            "id": project_id.to_string(),
1✔
3071
            "layers": [{
1✔
3072
                "name": "NDVI",
1✔
3073
                "workflow": workflow_id.to_string(),
1✔
3074
                "visibility": {
1✔
3075
                    "data": true,
1✔
3076
                    "legend": false
1✔
3077
                },
1✔
3078
                "symbology": {
1✔
3079
                    "type": "raster",
1✔
3080
                    "opacity": 1,
1✔
3081
                    "colorizer": {
1✔
3082
                        "type": "linearGradient",
1✔
3083
                        "breakpoints": [{
1✔
3084
                            "value": 1,
1✔
3085
                            "color": [0, 0, 0, 255]
1✔
3086
                        }, {
1✔
3087
                            "value": 255,
1✔
3088
                            "color": [255, 255, 255, 255]
1✔
3089
                        }],
1✔
3090
                        "noDataColor": [0, 0, 0, 0],
1✔
3091
                        "overColor": [255, 255, 255, 127],
1✔
3092
                        "underColor": [255, 255, 255, 127]
1✔
3093
                    }
1✔
3094
                }
1✔
3095
            }]
1✔
3096
        }))
1✔
3097
        .unwrap();
1✔
3098

1✔
3099
        db.update_project(update).await.unwrap();
76✔
3100

1✔
3101
        let update: UpdateProject = serde_json::from_value(json!({
1✔
3102
            "id": project_id.to_string(),
1✔
3103
            "layers": [{
1✔
3104
                "name": "NDVI",
1✔
3105
                "workflow": workflow_id.to_string(),
1✔
3106
                "visibility": {
1✔
3107
                    "data": true,
1✔
3108
                    "legend": false
1✔
3109
                },
1✔
3110
                "symbology": {
1✔
3111
                    "type": "raster",
1✔
3112
                    "opacity": 1,
1✔
3113
                    "colorizer": {
1✔
3114
                        "type": "linearGradient",
1✔
3115
                        "breakpoints": [{
1✔
3116
                            "value": 1,
1✔
3117
                            "color": [0, 0, 4, 255]
1✔
3118
                        }, {
1✔
3119
                            "value": 17.866_666_666_666_667,
1✔
3120
                            "color": [11, 9, 36, 255]
1✔
3121
                        }, {
1✔
3122
                            "value": 34.733_333_333_333_334,
1✔
3123
                            "color": [32, 17, 75, 255]
1✔
3124
                        }, {
1✔
3125
                            "value": 51.6,
1✔
3126
                            "color": [59, 15, 112, 255]
1✔
3127
                        }, {
1✔
3128
                            "value": 68.466_666_666_666_67,
1✔
3129
                            "color": [87, 21, 126, 255]
1✔
3130
                        }, {
1✔
3131
                            "value": 85.333_333_333_333_33,
1✔
3132
                            "color": [114, 31, 129, 255]
1✔
3133
                        }, {
1✔
3134
                            "value": 102.199_999_999_999_99,
1✔
3135
                            "color": [140, 41, 129, 255]
1✔
3136
                        }, {
1✔
3137
                            "value": 119.066_666_666_666_65,
1✔
3138
                            "color": [168, 50, 125, 255]
1✔
3139
                        }, {
1✔
3140
                            "value": 135.933_333_333_333_34,
1✔
3141
                            "color": [196, 60, 117, 255]
1✔
3142
                        }, {
1✔
3143
                            "value": 152.799_999_999_999_98,
1✔
3144
                            "color": [222, 73, 104, 255]
1✔
3145
                        }, {
1✔
3146
                            "value": 169.666_666_666_666_66,
1✔
3147
                            "color": [241, 96, 93, 255]
1✔
3148
                        }, {
1✔
3149
                            "value": 186.533_333_333_333_33,
1✔
3150
                            "color": [250, 127, 94, 255]
1✔
3151
                        }, {
1✔
3152
                            "value": 203.399_999_999_999_98,
1✔
3153
                            "color": [254, 159, 109, 255]
1✔
3154
                        }, {
1✔
3155
                            "value": 220.266_666_666_666_65,
1✔
3156
                            "color": [254, 191, 132, 255]
1✔
3157
                        }, {
1✔
3158
                            "value": 237.133_333_333_333_3,
1✔
3159
                            "color": [253, 222, 160, 255]
1✔
3160
                        }, {
1✔
3161
                            "value": 254,
1✔
3162
                            "color": [252, 253, 191, 255]
1✔
3163
                        }],
1✔
3164
                        "noDataColor": [0, 0, 0, 0],
1✔
3165
                        "overColor": [255, 255, 255, 127],
1✔
3166
                        "underColor": [255, 255, 255, 127]
1✔
3167
                    }
1✔
3168
                }
1✔
3169
            }]
1✔
3170
        }))
1✔
3171
        .unwrap();
1✔
3172

1✔
3173
        db.update_project(update).await.unwrap();
22✔
3174

1✔
3175
        let update: UpdateProject = serde_json::from_value(json!({
1✔
3176
            "id": project_id.to_string(),
1✔
3177
            "layers": [{
1✔
3178
                "name": "NDVI",
1✔
3179
                "workflow": workflow_id.to_string(),
1✔
3180
                "visibility": {
1✔
3181
                    "data": true,
1✔
3182
                    "legend": false
1✔
3183
                },
1✔
3184
                "symbology": {
1✔
3185
                    "type": "raster",
1✔
3186
                    "opacity": 1,
1✔
3187
                    "colorizer": {
1✔
3188
                        "type": "linearGradient",
1✔
3189
                        "breakpoints": [{
1✔
3190
                            "value": 1,
1✔
3191
                            "color": [0, 0, 4, 255]
1✔
3192
                        }, {
1✔
3193
                            "value": 17.866_666_666_666_667,
1✔
3194
                            "color": [11, 9, 36, 255]
1✔
3195
                        }, {
1✔
3196
                            "value": 34.733_333_333_333_334,
1✔
3197
                            "color": [32, 17, 75, 255]
1✔
3198
                        }, {
1✔
3199
                            "value": 51.6,
1✔
3200
                            "color": [59, 15, 112, 255]
1✔
3201
                        }, {
1✔
3202
                            "value": 68.466_666_666_666_67,
1✔
3203
                            "color": [87, 21, 126, 255]
1✔
3204
                        }, {
1✔
3205
                            "value": 85.333_333_333_333_33,
1✔
3206
                            "color": [114, 31, 129, 255]
1✔
3207
                        }, {
1✔
3208
                            "value": 102.199_999_999_999_99,
1✔
3209
                            "color": [140, 41, 129, 255]
1✔
3210
                        }, {
1✔
3211
                            "value": 119.066_666_666_666_65,
1✔
3212
                            "color": [168, 50, 125, 255]
1✔
3213
                        }, {
1✔
3214
                            "value": 135.933_333_333_333_34,
1✔
3215
                            "color": [196, 60, 117, 255]
1✔
3216
                        }, {
1✔
3217
                            "value": 152.799_999_999_999_98,
1✔
3218
                            "color": [222, 73, 104, 255]
1✔
3219
                        }, {
1✔
3220
                            "value": 169.666_666_666_666_66,
1✔
3221
                            "color": [241, 96, 93, 255]
1✔
3222
                        }, {
1✔
3223
                            "value": 186.533_333_333_333_33,
1✔
3224
                            "color": [250, 127, 94, 255]
1✔
3225
                        }, {
1✔
3226
                            "value": 203.399_999_999_999_98,
1✔
3227
                            "color": [254, 159, 109, 255]
1✔
3228
                        }, {
1✔
3229
                            "value": 220.266_666_666_666_65,
1✔
3230
                            "color": [254, 191, 132, 255]
1✔
3231
                        }, {
1✔
3232
                            "value": 237.133_333_333_333_3,
1✔
3233
                            "color": [253, 222, 160, 255]
1✔
3234
                        }, {
1✔
3235
                            "value": 254,
1✔
3236
                            "color": [252, 253, 191, 255]
1✔
3237
                        }],
1✔
3238
                        "noDataColor": [0, 0, 0, 0],
1✔
3239
                        "overColor": [255, 255, 255, 127],
1✔
3240
                        "underColor": [255, 255, 255, 127]
1✔
3241
                    }
1✔
3242
                }
1✔
3243
            }]
1✔
3244
        }))
1✔
3245
        .unwrap();
1✔
3246

1✔
3247
        db.update_project(update).await.unwrap();
22✔
3248

1✔
3249
        let update: UpdateProject = serde_json::from_value(json!({
1✔
3250
            "id": project_id.to_string(),
1✔
3251
            "layers": [{
1✔
3252
                "name": "NDVI",
1✔
3253
                "workflow": workflow_id.to_string(),
1✔
3254
                "visibility": {
1✔
3255
                    "data": true,
1✔
3256
                    "legend": false
1✔
3257
                },
1✔
3258
                "symbology": {
1✔
3259
                    "type": "raster",
1✔
3260
                    "opacity": 1,
1✔
3261
                    "colorizer": {
1✔
3262
                        "type": "linearGradient",
1✔
3263
                        "breakpoints": [{
1✔
3264
                            "value": 1,
1✔
3265
                            "color": [0, 0, 4, 255]
1✔
3266
                        }, {
1✔
3267
                            "value": 17.933_333_333_333_334,
1✔
3268
                            "color": [11, 9, 36, 255]
1✔
3269
                        }, {
1✔
3270
                            "value": 34.866_666_666_666_67,
1✔
3271
                            "color": [32, 17, 75, 255]
1✔
3272
                        }, {
1✔
3273
                            "value": 51.800_000_000_000_004,
1✔
3274
                            "color": [59, 15, 112, 255]
1✔
3275
                        }, {
1✔
3276
                            "value": 68.733_333_333_333_33,
1✔
3277
                            "color": [87, 21, 126, 255]
1✔
3278
                        }, {
1✔
3279
                            "value": 85.666_666_666_666_66,
1✔
3280
                            "color": [114, 31, 129, 255]
1✔
3281
                        }, {
1✔
3282
                            "value": 102.6,
1✔
3283
                            "color": [140, 41, 129, 255]
1✔
3284
                        }, {
1✔
3285
                            "value": 119.533_333_333_333_32,
1✔
3286
                            "color": [168, 50, 125, 255]
1✔
3287
                        }, {
1✔
3288
                            "value": 136.466_666_666_666_67,
1✔
3289
                            "color": [196, 60, 117, 255]
1✔
3290
                        }, {
1✔
3291
                            "value": 153.4,
1✔
3292
                            "color": [222, 73, 104, 255]
1✔
3293
                        }, {
1✔
3294
                            "value": 170.333_333_333_333_31,
1✔
3295
                            "color": [241, 96, 93, 255]
1✔
3296
                        }, {
1✔
3297
                            "value": 187.266_666_666_666_65,
1✔
3298
                            "color": [250, 127, 94, 255]
1✔
3299
                        }, {
1✔
3300
                            "value": 204.2,
1✔
3301
                            "color": [254, 159, 109, 255]
1✔
3302
                        }, {
1✔
3303
                            "value": 221.133_333_333_333_33,
1✔
3304
                            "color": [254, 191, 132, 255]
1✔
3305
                        }, {
1✔
3306
                            "value": 238.066_666_666_666_63,
1✔
3307
                            "color": [253, 222, 160, 255]
1✔
3308
                        }, {
1✔
3309
                            "value": 255,
1✔
3310
                            "color": [252, 253, 191, 255]
1✔
3311
                        }],
1✔
3312
                        "noDataColor": [0, 0, 0, 0],
1✔
3313
                        "overColor": [255, 255, 255, 127],
1✔
3314
                        "underColor": [255, 255, 255, 127]
1✔
3315
                    }
1✔
3316
                }
1✔
3317
            }]
1✔
3318
        }))
1✔
3319
        .unwrap();
1✔
3320

1✔
3321
        let update = update;
1✔
3322

3323
        // run two updates concurrently
3324
        let (r0, r1) = join!(db.update_project(update.clone()), db.update_project(update));
1✔
3325

3326
        assert!(r0.is_ok());
1✔
3327
        assert!(r1.is_ok());
1✔
3328
    }
1✔
3329

3330
    #[ge_context::test]
2✔
3331
    #[allow(clippy::too_many_lines)]
3332
    async fn it_resolves_dataset_names_to_ids(app_ctx: ProPostgresContext<NoTls>) {
1✔
3333
        let admin_session = UserSession::admin_session();
1✔
3334
        let db = app_ctx.session_context(admin_session.clone()).db();
1✔
3335

1✔
3336
        let loading_info = OgrSourceDataset {
1✔
3337
            file_name: PathBuf::from("test.csv"),
1✔
3338
            layer_name: "test.csv".to_owned(),
1✔
3339
            data_type: Some(VectorDataType::MultiPoint),
1✔
3340
            time: OgrSourceDatasetTimeType::Start {
1✔
3341
                start_field: "start".to_owned(),
1✔
3342
                start_format: OgrSourceTimeFormat::Auto,
1✔
3343
                duration: OgrSourceDurationSpec::Zero,
1✔
3344
            },
1✔
3345
            default_geometry: None,
1✔
3346
            columns: Some(OgrSourceColumnSpec {
1✔
3347
                format_specifics: Some(FormatSpecifics::Csv {
1✔
3348
                    header: CsvHeader::Auto,
1✔
3349
                }),
1✔
3350
                x: "x".to_owned(),
1✔
3351
                y: None,
1✔
3352
                int: vec![],
1✔
3353
                float: vec![],
1✔
3354
                text: vec![],
1✔
3355
                bool: vec![],
1✔
3356
                datetime: vec![],
1✔
3357
                rename: None,
1✔
3358
            }),
1✔
3359
            force_ogr_time_filter: false,
1✔
3360
            force_ogr_spatial_filter: false,
1✔
3361
            on_error: OgrSourceErrorSpec::Ignore,
1✔
3362
            sql_query: None,
1✔
3363
            attribute_query: None,
1✔
3364
            cache_ttl: CacheTtlSeconds::default(),
1✔
3365
        };
1✔
3366

1✔
3367
        let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
3368
            OgrSourceDataset,
1✔
3369
            VectorResultDescriptor,
1✔
3370
            VectorQueryRectangle,
1✔
3371
        > {
1✔
3372
            loading_info: loading_info.clone(),
1✔
3373
            result_descriptor: VectorResultDescriptor {
1✔
3374
                data_type: VectorDataType::MultiPoint,
1✔
3375
                spatial_reference: SpatialReference::epsg_4326().into(),
1✔
3376
                columns: [(
1✔
3377
                    "foo".to_owned(),
1✔
3378
                    VectorColumnInfo {
1✔
3379
                        data_type: FeatureDataType::Float,
1✔
3380
                        measurement: Measurement::Unitless,
1✔
3381
                    },
1✔
3382
                )]
1✔
3383
                .into_iter()
1✔
3384
                .collect(),
1✔
3385
                time: None,
1✔
3386
                bbox: None,
1✔
3387
            },
1✔
3388
            phantom: Default::default(),
1✔
3389
        });
1✔
3390

3391
        let DatasetIdAndName {
3392
            id: dataset_id1,
1✔
3393
            name: dataset_name1,
1✔
3394
        } = db
1✔
3395
            .add_dataset(
1✔
3396
                AddDataset {
1✔
3397
                    name: Some(DatasetName::new(None, "my_dataset".to_owned())),
1✔
3398
                    display_name: "Ogr Test".to_owned(),
1✔
3399
                    description: "desc".to_owned(),
1✔
3400
                    source_operator: "OgrSource".to_owned(),
1✔
3401
                    symbology: None,
1✔
3402
                    provenance: Some(vec![Provenance {
1✔
3403
                        citation: "citation".to_owned(),
1✔
3404
                        license: "license".to_owned(),
1✔
3405
                        uri: "uri".to_owned(),
1✔
3406
                    }]),
1✔
3407
                },
1✔
3408
                db.wrap_meta_data(meta_data.clone()),
1✔
3409
            )
1✔
3410
            .await
173✔
3411
            .unwrap();
1✔
3412

3413
        let DatasetIdAndName {
3414
            id: dataset_id2,
1✔
3415
            name: dataset_name2,
1✔
3416
        } = db
1✔
3417
            .add_dataset(
1✔
3418
                AddDataset {
1✔
3419
                    name: Some(DatasetName::new(
1✔
3420
                        Some(admin_session.user.id.to_string()),
1✔
3421
                        "my_dataset".to_owned(),
1✔
3422
                    )),
1✔
3423
                    display_name: "Ogr Test".to_owned(),
1✔
3424
                    description: "desc".to_owned(),
1✔
3425
                    source_operator: "OgrSource".to_owned(),
1✔
3426
                    symbology: None,
1✔
3427
                    provenance: Some(vec![Provenance {
1✔
3428
                        citation: "citation".to_owned(),
1✔
3429
                        license: "license".to_owned(),
1✔
3430
                        uri: "uri".to_owned(),
1✔
3431
                    }]),
1✔
3432
                },
1✔
3433
                db.wrap_meta_data(meta_data),
1✔
3434
            )
1✔
3435
            .await
7✔
3436
            .unwrap();
1✔
3437

1✔
3438
        assert_eq!(
1✔
3439
            db.resolve_dataset_name_to_id(&dataset_name1).await.unwrap(),
3✔
3440
            dataset_id1
3441
        );
3442
        assert_eq!(
3443
            db.resolve_dataset_name_to_id(&dataset_name2).await.unwrap(),
3✔
3444
            dataset_id2
3445
        );
3446
    }
1✔
3447

3448
    #[ge_context::test]
2✔
3449
    #[allow(clippy::too_many_lines)]
3450
    async fn it_bulk_updates_quota(app_ctx: ProPostgresContext<NoTls>) {
1✔
3451
        let admin_session = UserSession::admin_session();
1✔
3452
        let db = app_ctx.session_context(admin_session.clone()).db();
1✔
3453

3454
        let user1 = app_ctx
1✔
3455
            .register_user(UserRegistration {
1✔
3456
                email: "user1@example.com".into(),
1✔
3457
                password: "12345678".into(),
1✔
3458
                real_name: "User1".into(),
1✔
3459
            })
1✔
3460
            .await
11✔
3461
            .unwrap();
1✔
3462

3463
        let user2 = app_ctx
1✔
3464
            .register_user(UserRegistration {
1✔
3465
                email: "user2@example.com".into(),
1✔
3466
                password: "12345678".into(),
1✔
3467
                real_name: "User2".into(),
1✔
3468
            })
1✔
3469
            .await
9✔
3470
            .unwrap();
1✔
3471

1✔
3472
        // single item in bulk
1✔
3473
        db.bulk_increment_quota_used([(user1, 1)]).await.unwrap();
3✔
3474

1✔
3475
        assert_eq!(db.quota_used_by_user(&user1).await.unwrap(), 1);
3✔
3476

3477
        // multiple items in bulk
3478
        db.bulk_increment_quota_used([(user1, 1), (user2, 3)])
1✔
3479
            .await
3✔
3480
            .unwrap();
1✔
3481

1✔
3482
        assert_eq!(db.quota_used_by_user(&user1).await.unwrap(), 2);
3✔
3483
        assert_eq!(db.quota_used_by_user(&user2).await.unwrap(), 3);
3✔
3484
    }
1✔
3485

3486
    #[ge_context::test]
2✔
3487
    async fn it_persists_ml_models(app_ctx: ProPostgresContext<NoTls>) {
1✔
3488
        let id = MlModelId::from_str("3db69b02-6d7a-4112-a355-e3745be18a80").unwrap();
1✔
3489
        let input = MlModel {
1✔
3490
            id,
1✔
3491
            content: "model content".to_owned(),
1✔
3492
        };
1✔
3493

3494
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
3495

1✔
3496
        let db = app_ctx.session_context(session.clone()).db();
1✔
3497

1✔
3498
        db.store_ml_model(input.clone()).await.unwrap();
5✔
3499

3500
        let model = db.load_ml_model(id).await.unwrap();
3✔
3501

1✔
3502
        assert_eq!(model, input);
1✔
3503
    }
1✔
3504

3505
    #[ge_context::test]
2✔
3506
    async fn it_fails_to_load_nonexistent_ml_model(app_ctx: ProPostgresContext<NoTls>) {
1✔
3507
        let model_id = MlModelId::from_str("3db69b02-6d7a-4112-a355-e3745be18a80").unwrap();
1✔
3508

3509
        let session = app_ctx.create_anonymous_session().await.unwrap();
14✔
3510
        let db = app_ctx.session_context(session.clone()).db();
1✔
3511

3512
        let result = db.load_ml_model(model_id).await;
3✔
3513

3514
        match result {
1✔
3515
            Err(error::Error::MachineLearningError {
3516
                source: crate::pro::machine_learning::ml_error::MachineLearningError::UnknownModelIdInPostgres { .. },
3517
            }) => (),
1✔
3518
            _ => panic!("Expected UnknownModelId error"),
×
3519
        }
3520
    }
1✔
3521
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc