• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 6039131541

31 Aug 2023 02:19PM UTC coverage: 90.092% (+0.05%) from 90.041%
6039131541

push

github

web-flow
Merge pull request #866 from geo-engine/provider-def-mapping

refactor provider defs to pro/non-pro

991 of 991 new or added lines in 21 files covered. (100.0%)

106883 of 118637 relevant lines covered (90.09%)

61031.6 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.39
/services/src/pro/contexts/postgres.rs
1
use super::{ExecutionContextImpl, ProApplicationContext, ProGeoEngineDb, QuotaCheckerImpl};
2
use crate::api::model::datatypes::DatasetName;
3
use crate::contexts::{ApplicationContext, PostgresContext, QueryContextImpl, SessionId};
4
use crate::contexts::{GeoEngineDb, SessionContext};
5
use crate::datasets::add_from_directory::add_providers_from_directory;
6
use crate::datasets::upload::{Volume, Volumes};
7
use crate::error::{self, Error, Result};
8
use crate::layers::add_from_directory::UNSORTED_COLLECTION_ID;
9
use crate::layers::storage::INTERNAL_LAYER_DB_ROOT_COLLECTION_ID;
10
use crate::pro::datasets::add_datasets_from_directory;
11
use crate::pro::layers::add_from_directory::{
12
    add_layer_collections_from_directory, add_layers_from_directory,
13
    add_pro_providers_from_directory,
14
};
15
use crate::pro::permissions::Role;
16
use crate::pro::quota::{initialize_quota_tracking, QuotaTrackingFactory};
17
use crate::pro::tasks::{ProTaskManager, ProTaskManagerBackend};
18
use crate::pro::users::{OidcRequestDb, UserAuth, UserSession};
19
use crate::pro::util::config::{Cache, Oidc, Quota};
20
use crate::tasks::SimpleTaskManagerContext;
21
use crate::util::config::get_config_element;
22
use async_trait::async_trait;
23
use bb8_postgres::{
24
    bb8::Pool,
25
    bb8::PooledConnection,
26
    tokio_postgres::{tls::MakeTlsConnect, tls::TlsConnect, Config, Socket},
27
    PostgresConnectionManager,
28
};
29
use geoengine_datatypes::raster::TilingSpecification;
30
use geoengine_datatypes::util::test::TestDefault;
31
use geoengine_datatypes::util::Identifier;
32
use geoengine_operators::engine::{ChunkByteSize, QueryContextExtensions};
33
use geoengine_operators::pro::cache::shared_cache::SharedCache;
34
use geoengine_operators::pro::meta::quota::{ComputationContext, QuotaChecker};
35
use geoengine_operators::util::create_rayon_thread_pool;
36
use log::{debug, info};
37
use pwhash::bcrypt;
38
use rayon::ThreadPool;
39
use snafu::{ensure, ResultExt};
40
use std::path::PathBuf;
41
use std::sync::Arc;
42

43
// TODO: do not report postgres error details to user
44

45
/// A contex with references to Postgres backends of the dbs. Automatically migrates schema on instantiation
46
#[derive(Clone)]
209✔
47
pub struct ProPostgresContext<Tls>
48
where
49
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
50
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
51
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
52
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
53
{
54
    thread_pool: Arc<ThreadPool>,
55
    exe_ctx_tiling_spec: TilingSpecification,
56
    query_ctx_chunk_size: ChunkByteSize,
57
    task_manager: Arc<ProTaskManagerBackend>,
58
    oidc_request_db: Arc<Option<OidcRequestDb>>,
59
    quota: QuotaTrackingFactory,
60
    pub(crate) pool: Pool<PostgresConnectionManager<Tls>>,
61
    volumes: Volumes,
62
    tile_cache: Arc<SharedCache>,
63
}
64

65
impl<Tls> ProPostgresContext<Tls>
66
where
67
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
68
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
69
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
70
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
71
{
72
    pub async fn new_with_context_spec(
89✔
73
        config: Config,
89✔
74
        tls: Tls,
89✔
75
        exe_ctx_tiling_spec: TilingSpecification,
89✔
76
        query_ctx_chunk_size: ChunkByteSize,
89✔
77
        quota_config: Quota,
89✔
78
    ) -> Result<Self> {
89✔
79
        let pg_mgr = PostgresConnectionManager::new(config, tls);
89✔
80

81
        let pool = Pool::builder().build(pg_mgr).await?;
89✔
82

83
        let created_schema = PostgresContext::create_schema(pool.get().await?).await?;
1,068✔
84
        if created_schema {
89✔
85
            Self::create_pro_schema(pool.get().await?).await?;
853✔
86
        }
×
87

88
        let db = ProPostgresDb::new(pool.clone(), UserSession::admin_session());
89✔
89
        let quota = initialize_quota_tracking(
89✔
90
            quota_config.mode,
89✔
91
            db,
89✔
92
            quota_config.increment_quota_buffer_size,
89✔
93
            quota_config.increment_quota_buffer_timeout_seconds,
89✔
94
        );
89✔
95

89✔
96
        Ok(ProPostgresContext {
89✔
97
            task_manager: Default::default(),
89✔
98
            thread_pool: create_rayon_thread_pool(0),
89✔
99
            exe_ctx_tiling_spec,
89✔
100
            query_ctx_chunk_size,
89✔
101
            oidc_request_db: Arc::new(None),
89✔
102
            quota,
89✔
103
            pool,
89✔
104
            volumes: Default::default(),
89✔
105
            tile_cache: Arc::new(SharedCache::test_default()),
89✔
106
        })
89✔
107
    }
89✔
108

109
    #[allow(clippy::missing_panics_doc)]
110
    pub async fn new_with_oidc(
5✔
111
        config: Config,
5✔
112
        tls: Tls,
5✔
113
        oidc_db: OidcRequestDb,
5✔
114
        cache_config: Cache,
5✔
115
        quota_config: Quota,
5✔
116
    ) -> Result<Self> {
5✔
117
        let pg_mgr = PostgresConnectionManager::new(config, tls);
5✔
118

119
        let pool = Pool::builder().build(pg_mgr).await?;
5✔
120

121
        let created_schema = PostgresContext::create_schema(pool.get().await?).await?;
60✔
122
        if created_schema {
5✔
123
            Self::create_pro_schema(pool.get().await?).await?;
49✔
124
        }
×
125

126
        let db = ProPostgresDb::new(pool.clone(), UserSession::admin_session());
5✔
127
        let quota = initialize_quota_tracking(
5✔
128
            quota_config.mode,
5✔
129
            db,
5✔
130
            quota_config.increment_quota_buffer_size,
5✔
131
            quota_config.increment_quota_buffer_timeout_seconds,
5✔
132
        );
5✔
133

5✔
134
        Ok(ProPostgresContext {
5✔
135
            task_manager: Default::default(),
5✔
136
            thread_pool: create_rayon_thread_pool(0),
5✔
137
            exe_ctx_tiling_spec: TestDefault::test_default(),
5✔
138
            query_ctx_chunk_size: TestDefault::test_default(),
5✔
139
            oidc_request_db: Arc::new(Some(oidc_db)),
5✔
140
            quota,
5✔
141
            pool,
5✔
142
            volumes: Default::default(),
5✔
143
            tile_cache: Arc::new(
5✔
144
                SharedCache::new(
5✔
145
                    cache_config.cache_size_in_mb,
5✔
146
                    cache_config.landing_zone_ratio,
5✔
147
                )
5✔
148
                .expect("tile cache creation should work because the config is valid"),
5✔
149
            ),
5✔
150
        })
5✔
151
    }
5✔
152

153
    // TODO: check if the datasets exist already and don't output warnings when skipping them
154
    #[allow(clippy::too_many_arguments, clippy::missing_panics_doc)]
155
    pub async fn new_with_data(
×
156
        config: Config,
×
157
        tls: Tls,
×
158
        dataset_defs_path: PathBuf,
×
159
        provider_defs_path: PathBuf,
×
160
        layer_defs_path: PathBuf,
×
161
        layer_collection_defs_path: PathBuf,
×
162
        exe_ctx_tiling_spec: TilingSpecification,
×
163
        query_ctx_chunk_size: ChunkByteSize,
×
164
        oidc_config: Oidc,
×
165
        cache_config: Cache,
×
166
        quota_config: Quota,
×
167
    ) -> Result<Self> {
×
168
        let pg_mgr = PostgresConnectionManager::new(config, tls);
×
169

170
        let pool = Pool::builder().build(pg_mgr).await?;
×
171

172
        let created_schema = PostgresContext::create_schema(pool.get().await?).await?;
×
173
        if created_schema {
×
174
            Self::create_pro_schema(pool.get().await?).await?;
×
175
        }
×
176

177
        let db = ProPostgresDb::new(pool.clone(), UserSession::admin_session());
×
178
        let quota = initialize_quota_tracking(
×
179
            quota_config.mode,
×
180
            db,
×
181
            quota_config.increment_quota_buffer_size,
×
182
            quota_config.increment_quota_buffer_timeout_seconds,
×
183
        );
×
184

×
185
        let app_ctx = ProPostgresContext {
×
186
            task_manager: Default::default(),
×
187
            thread_pool: create_rayon_thread_pool(0),
×
188
            exe_ctx_tiling_spec,
×
189
            query_ctx_chunk_size,
×
190
            oidc_request_db: Arc::new(OidcRequestDb::try_from(oidc_config).ok()),
×
191
            quota,
×
192
            pool,
×
193
            volumes: Default::default(),
×
194
            tile_cache: Arc::new(
×
195
                SharedCache::new(
×
196
                    cache_config.cache_size_in_mb,
×
197
                    cache_config.landing_zone_ratio,
×
198
                )
×
199
                .expect("tile cache creation should work because the config is valid"),
×
200
            ),
×
201
        };
×
202

×
203
        if created_schema {
×
204
            info!("Populating database with initial data...");
×
205

206
            let mut db = app_ctx.session_context(UserSession::admin_session()).db();
×
207

×
208
            add_layers_from_directory(&mut db, layer_defs_path).await;
×
209
            add_layer_collections_from_directory(&mut db, layer_collection_defs_path).await;
×
210

211
            add_datasets_from_directory(&mut db, dataset_defs_path).await;
×
212

213
            add_providers_from_directory(&mut db, provider_defs_path.clone()).await;
×
214

215
            add_pro_providers_from_directory(&mut db, provider_defs_path.join("pro")).await;
×
216
        }
×
217

218
        Ok(app_ctx)
×
219
    }
×
220

221
    #[allow(clippy::too_many_lines)]
222
    /// Creates the database schema. Returns true if the schema was created, false if it already existed.
223
    pub(crate) async fn create_pro_schema(
94✔
224
        mut conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
94✔
225
    ) -> Result<()> {
94✔
226
        let user_config = get_config_element::<crate::pro::util::config::User>()?;
94✔
227

228
        let tx = conn.build_transaction().start().await?;
94✔
229

230
        tx.batch_execute(include_str!("schema.sql")).await?;
94✔
231

232
        let stmt = tx
94✔
233
            .prepare(
94✔
234
                r#"
94✔
235
            INSERT INTO roles (id, name) VALUES
94✔
236
                ($1, 'admin'),
94✔
237
                ($2, 'user'),
94✔
238
                ($3, 'anonymous');"#,
94✔
239
            )
94✔
240
            .await?;
94✔
241

242
        tx.execute(
94✔
243
            &stmt,
94✔
244
            &[
94✔
245
                &Role::admin_role_id(),
94✔
246
                &Role::registered_user_role_id(),
94✔
247
                &Role::anonymous_role_id(),
94✔
248
            ],
94✔
249
        )
94✔
250
        .await?;
94✔
251

252
        let stmt = tx
94✔
253
            .prepare(
94✔
254
                r#"
94✔
255
            INSERT INTO users (
94✔
256
                id, 
94✔
257
                email,
94✔
258
                password_hash,
94✔
259
                real_name,
94✔
260
                active)
94✔
261
            VALUES (
94✔
262
                $1, 
94✔
263
                $2,
94✔
264
                $3,
94✔
265
                'admin',
94✔
266
                true
94✔
267
            );"#,
94✔
268
            )
94✔
269
            .await?;
94✔
270

271
        tx.execute(
94✔
272
            &stmt,
94✔
273
            &[
94✔
274
                &Role::admin_role_id(),
94✔
275
                &user_config.admin_email,
94✔
276
                &bcrypt::hash(user_config.admin_password)
94✔
277
                    .expect("Admin password hash should be valid"),
94✔
278
            ],
94✔
279
        )
94✔
280
        .await?;
80✔
281

282
        let stmt = tx
94✔
283
            .prepare(
94✔
284
                r#"
94✔
285
            INSERT INTO user_roles 
94✔
286
                (user_id, role_id)
94✔
287
            VALUES 
94✔
288
                ($1, $1);"#,
94✔
289
            )
94✔
290
            .await?;
68✔
291

292
        tx.execute(&stmt, &[&Role::admin_role_id()]).await?;
94✔
293

294
        let stmt = tx
94✔
295
            .prepare(
94✔
296
                r#"
94✔
297
            INSERT INTO permissions
94✔
298
             (role_id, layer_collection_id, permission)  
94✔
299
            VALUES 
94✔
300
                ($1, $4, 'Owner'),
94✔
301
                ($2, $4, 'Read'),
94✔
302
                ($3, $4, 'Read'),
94✔
303
                ($1, $5, 'Owner'),
94✔
304
                ($2, $5, 'Read'),
94✔
305
                ($3, $5, 'Read');"#,
94✔
306
            )
94✔
307
            .await?;
68✔
308

309
        tx.execute(
94✔
310
            &stmt,
94✔
311
            &[
94✔
312
                &Role::admin_role_id(),
94✔
313
                &Role::registered_user_role_id(),
94✔
314
                &Role::anonymous_role_id(),
94✔
315
                &INTERNAL_LAYER_DB_ROOT_COLLECTION_ID,
94✔
316
                &UNSORTED_COLLECTION_ID,
94✔
317
            ],
94✔
318
        )
94✔
319
        .await?;
59✔
320

321
        tx.commit().await?;
95✔
322

323
        debug!("Created pro database schema");
×
324

325
        Ok(())
94✔
326
    }
94✔
327

328
    pub fn oidc_request_db(&self) -> Arc<Option<OidcRequestDb>> {
2✔
329
        self.oidc_request_db.clone()
2✔
330
    }
2✔
331
}
332

333
#[async_trait]
334
impl<Tls> ApplicationContext for ProPostgresContext<Tls>
335
where
336
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
337
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
338
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
339
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
340
{
341
    type SessionContext = PostgresSessionContext<Tls>;
342
    type Session = UserSession;
343

344
    fn session_context(&self, session: Self::Session) -> Self::SessionContext {
115✔
345
        PostgresSessionContext {
115✔
346
            session,
115✔
347
            context: self.clone(),
115✔
348
        }
115✔
349
    }
115✔
350

351
    async fn session_by_id(&self, session_id: SessionId) -> Result<Self::Session> {
49✔
352
        self.user_session_by_id(session_id)
49✔
353
            .await
370✔
354
            .map_err(Box::new)
49✔
355
            .context(error::Unauthorized)
49✔
356
    }
98✔
357
}
358

359
#[async_trait]
360
impl<Tls> ProApplicationContext for ProPostgresContext<Tls>
361
where
362
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
363
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
364
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
365
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
366
{
367
    fn oidc_request_db(&self) -> Option<&OidcRequestDb> {
6✔
368
        self.oidc_request_db.as_ref().as_ref()
6✔
369
    }
6✔
370
}
371

372
#[derive(Clone)]
×
373
pub struct PostgresSessionContext<Tls>
374
where
375
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
376
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
377
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
378
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
379
{
380
    session: UserSession,
381
    context: ProPostgresContext<Tls>,
382
}
383

384
#[async_trait]
385
impl<Tls> SessionContext for PostgresSessionContext<Tls>
386
where
387
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
388
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
389
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
390
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
391
{
392
    type Session = UserSession;
393
    type GeoEngineDB = ProPostgresDb<Tls>;
394

395
    type TaskContext = SimpleTaskManagerContext;
396
    type TaskManager = ProTaskManager; // this does not persist across restarts
397
    type QueryContext = QueryContextImpl;
398
    type ExecutionContext = ExecutionContextImpl<Self::GeoEngineDB>;
399

400
    fn db(&self) -> Self::GeoEngineDB {
123✔
401
        ProPostgresDb::new(self.context.pool.clone(), self.session.clone())
123✔
402
    }
123✔
403

404
    fn tasks(&self) -> Self::TaskManager {
1✔
405
        ProTaskManager::new(self.context.task_manager.clone(), self.session.clone())
1✔
406
    }
1✔
407

408
    fn query_context(&self) -> Result<Self::QueryContext> {
3✔
409
        // TODO: load config only once
3✔
410

3✔
411
        let mut extensions = QueryContextExtensions::default();
3✔
412
        extensions.insert(
3✔
413
            self.context
3✔
414
                .quota
3✔
415
                .create_quota_tracking(&self.session, ComputationContext::new()),
3✔
416
        );
3✔
417
        extensions.insert(Box::new(QuotaCheckerImpl { user_db: self.db() }) as QuotaChecker);
3✔
418
        extensions.insert(self.context.tile_cache.clone());
3✔
419

3✔
420
        Ok(QueryContextImpl::new_with_extensions(
3✔
421
            self.context.query_ctx_chunk_size,
3✔
422
            self.context.thread_pool.clone(),
3✔
423
            extensions,
3✔
424
        ))
3✔
425
    }
3✔
426

427
    fn execution_context(&self) -> Result<Self::ExecutionContext> {
6✔
428
        Ok(ExecutionContextImpl::<ProPostgresDb<Tls>>::new(
6✔
429
            self.db(),
6✔
430
            self.context.thread_pool.clone(),
6✔
431
            self.context.exe_ctx_tiling_spec,
6✔
432
        ))
6✔
433
    }
6✔
434

435
    fn volumes(&self) -> Result<Vec<Volume>> {
×
436
        ensure!(self.session.is_admin(), error::PermissionDenied);
×
437

438
        Ok(self.context.volumes.volumes.clone())
×
439
    }
×
440

441
    fn session(&self) -> &Self::Session {
×
442
        &self.session
×
443
    }
×
444
}
445

446
pub struct ProPostgresDb<Tls>
447
where
448
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
449
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
450
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
451
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
452
{
453
    pub(crate) conn_pool: Pool<PostgresConnectionManager<Tls>>,
454
    pub(crate) session: UserSession,
455
}
456

457
impl<Tls> ProPostgresDb<Tls>
458
where
459
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
460
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
461
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
462
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
463
{
464
    pub fn new(conn_pool: Pool<PostgresConnectionManager<Tls>>, session: UserSession) -> Self {
217✔
465
        Self { conn_pool, session }
217✔
466
    }
217✔
467

468
    /// Check whether the namepsace of the given dataset is allowed for insertion
469
    pub(crate) fn check_namespace(&self, id: &DatasetName) -> Result<()> {
23✔
470
        let is_ok = match &id.namespace {
23✔
471
            Some(namespace) => namespace.as_str() == self.session.user.id.to_string(),
17✔
472
            None => self.session.is_admin(),
6✔
473
        };
474

475
        if is_ok {
23✔
476
            Ok(())
23✔
477
        } else {
478
            Err(Error::InvalidDatasetIdNamespace)
×
479
        }
480
    }
23✔
481
}
482

483
impl<Tls> GeoEngineDb for ProPostgresDb<Tls>
484
where
485
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
486
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
487
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
488
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
489
{
490
}
491

492
impl<Tls> ProGeoEngineDb for ProPostgresDb<Tls>
493
where
494
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
495
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
496
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
497
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
498
{
499
}
500

501
#[cfg(test)]
502
mod tests {
503
    use std::str::FromStr;
504

505
    use super::*;
506
    use crate::api::model::datatypes::{DataProviderId, DatasetName, LayerId};
507
    use crate::api::model::responses::datasets::DatasetIdAndName;
508
    use crate::api::model::services::AddDataset;
509
    use crate::datasets::external::netcdfcf::NetCdfCfDataProviderDefinition;
510
    use crate::datasets::listing::{DatasetListOptions, DatasetListing, ProvenanceOutput};
511
    use crate::datasets::listing::{DatasetProvider, Provenance};
512
    use crate::datasets::storage::{DatasetStore, MetaDataDefinition};
513
    use crate::datasets::upload::{FileId, UploadId};
514
    use crate::datasets::upload::{FileUpload, Upload, UploadDb};
515
    use crate::layers::layer::{
516
        AddLayer, AddLayerCollection, CollectionItem, LayerCollection, LayerCollectionListOptions,
517
        LayerCollectionListing, LayerListing, ProviderLayerCollectionId, ProviderLayerId,
518
    };
519
    use crate::layers::listing::{LayerCollectionId, LayerCollectionProvider};
520
    use crate::layers::storage::{
521
        LayerDb, LayerProviderDb, LayerProviderListing, LayerProviderListingOptions,
522
        INTERNAL_PROVIDER_ID,
523
    };
524
    use crate::pro::permissions::{Permission, PermissionDb, RoleDescription, RoleId};
525
    use crate::pro::users::{
526
        ExternalUserClaims, RoleDb, UserCredentials, UserDb, UserId, UserRegistration,
527
    };
528
    use crate::pro::util::config::QuotaTrackingMode;
529
    use crate::pro::util::tests::with_pro_temp_context;
530
    use crate::pro::util::tests::{admin_login, register_ndvi_workflow_helper};
531
    use crate::projects::{
532
        CreateProject, LayerUpdate, LoadVersion, OrderBy, Plot, PlotUpdate, PointSymbology,
533
        ProjectDb, ProjectFilter, ProjectId, ProjectLayer, ProjectListOptions, ProjectListing,
534
        STRectangle, UpdateProject,
535
    };
536

537
    use crate::workflows::registry::WorkflowRegistry;
538
    use crate::workflows::workflow::Workflow;
539

540
    use bb8_postgres::tokio_postgres::NoTls;
541
    use futures::join;
542
    use geoengine_datatypes::collections::VectorDataType;
543
    use geoengine_datatypes::primitives::CacheTtlSeconds;
544
    use geoengine_datatypes::primitives::{
545
        BoundingBox2D, Coordinate2D, DateTime, Duration, FeatureDataType, Measurement,
546
        RasterQueryRectangle, SpatialResolution, TimeGranularity, TimeInstance, TimeInterval,
547
        TimeStep, VectorQueryRectangle,
548
    };
549
    use geoengine_datatypes::raster::RasterDataType;
550
    use geoengine_datatypes::spatial_reference::{SpatialReference, SpatialReferenceOption};
551
    use geoengine_datatypes::test_data;
552
    use geoengine_datatypes::util::Identifier;
553
    use geoengine_operators::engine::{
554
        MetaData, MetaDataProvider, MultipleRasterOrSingleVectorSource, PlotOperator,
555
        RasterResultDescriptor, StaticMetaData, TypedOperator, TypedResultDescriptor,
556
        VectorColumnInfo, VectorOperator, VectorResultDescriptor,
557
    };
558
    use geoengine_operators::mock::{MockPointSource, MockPointSourceParams};
559
    use geoengine_operators::plot::{Statistics, StatisticsParams};
560
    use geoengine_operators::source::{
561
        CsvHeader, FileNotFoundHandling, FormatSpecifics, GdalDatasetGeoTransform,
562
        GdalDatasetParameters, GdalLoadingInfo, GdalMetaDataList, GdalMetaDataRegular,
563
        GdalMetaDataStatic, GdalMetadataNetCdfCf, OgrSourceColumnSpec, OgrSourceDataset,
564
        OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceTimeFormat,
565
    };
566
    use geoengine_operators::util::input::MultiRasterOrVectorOperator::Raster;
567
    use openidconnect::SubjectIdentifier;
568
    use serde_json::json;
569

570
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
571
    async fn test() {
1✔
572
        with_pro_temp_context(|app_ctx, _| async move {
1✔
573
            anonymous(&app_ctx).await;
43✔
574

575
            let _user_id = user_reg_login(&app_ctx).await;
29✔
576

577
            let session = app_ctx
1✔
578
                .login(UserCredentials {
1✔
579
                    email: "foo@example.com".into(),
1✔
580
                    password: "secret123".into(),
1✔
581
                })
1✔
582
                .await
11✔
583
                .unwrap();
1✔
584

1✔
585
            create_projects(&app_ctx, &session).await;
116✔
586

587
            let projects = list_projects(&app_ctx, &session).await;
11✔
588

589
            set_session(&app_ctx, &projects).await;
29✔
590

591
            let project_id = projects[0].id;
1✔
592

1✔
593
            update_projects(&app_ctx, &session, project_id).await;
248✔
594

595
            add_permission(&app_ctx, &session, project_id).await;
16✔
596

597
            delete_project(&app_ctx, &session, project_id).await;
1✔
598
        })
1✔
599
        .await;
10✔
600
    }
601

602
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
603
    async fn test_external() {
1✔
604
        with_pro_temp_context(|app_ctx, _| async move {
1✔
605
            anonymous(&app_ctx).await;
42✔
606

607
            let session = external_user_login_twice(&app_ctx).await;
51✔
608

609
            create_projects(&app_ctx, &session).await;
116✔
610

611
            let projects = list_projects(&app_ctx, &session).await;
11✔
612

613
            set_session_external(&app_ctx, &projects).await;
29✔
614

615
            let project_id = projects[0].id;
1✔
616

1✔
617
            update_projects(&app_ctx, &session, project_id).await;
247✔
618

619
            add_permission(&app_ctx, &session, project_id).await;
37✔
620

621
            delete_project(&app_ctx, &session, project_id).await;
8✔
622
        })
1✔
623
        .await;
10✔
624
    }
625

626
    async fn set_session(app_ctx: &ProPostgresContext<NoTls>, projects: &[ProjectListing]) {
1✔
627
        let credentials = UserCredentials {
1✔
628
            email: "foo@example.com".into(),
1✔
629
            password: "secret123".into(),
1✔
630
        };
1✔
631

632
        let session = app_ctx.login(credentials).await.unwrap();
11✔
633

1✔
634
        set_session_in_database(app_ctx, projects, session).await;
18✔
635
    }
1✔
636

637
    async fn set_session_external(
1✔
638
        app_ctx: &ProPostgresContext<NoTls>,
1✔
639
        projects: &[ProjectListing],
1✔
640
    ) {
1✔
641
        let external_user_claims = ExternalUserClaims {
1✔
642
            external_id: SubjectIdentifier::new("Foo bar Id".into()),
1✔
643
            email: "foo@bar.de".into(),
1✔
644
            real_name: "Foo Bar".into(),
1✔
645
        };
1✔
646

647
        let session = app_ctx
1✔
648
            .login_external(external_user_claims, Duration::minutes(10))
1✔
649
            .await
11✔
650
            .unwrap();
1✔
651

1✔
652
        set_session_in_database(app_ctx, projects, session).await;
18✔
653
    }
1✔
654

655
    async fn set_session_in_database(
2✔
656
        app_ctx: &ProPostgresContext<NoTls>,
2✔
657
        projects: &[ProjectListing],
2✔
658
        session: UserSession,
2✔
659
    ) {
2✔
660
        let db = app_ctx.session_context(session.clone()).db();
2✔
661

2✔
662
        db.set_session_project(projects[0].id).await.unwrap();
6✔
663

2✔
664
        assert_eq!(
2✔
665
            app_ctx.session_by_id(session.id).await.unwrap().project,
12✔
666
            Some(projects[0].id)
2✔
667
        );
668

669
        let rect = STRectangle::new_unchecked(SpatialReference::epsg_4326(), 0., 1., 2., 3., 1, 2);
2✔
670
        db.set_session_view(rect.clone()).await.unwrap();
6✔
671
        assert_eq!(
2✔
672
            app_ctx.session_by_id(session.id).await.unwrap().view,
12✔
673
            Some(rect)
2✔
674
        );
675
    }
2✔
676

677
    async fn delete_project(
2✔
678
        app_ctx: &ProPostgresContext<NoTls>,
2✔
679
        session: &UserSession,
2✔
680
        project_id: ProjectId,
2✔
681
    ) {
2✔
682
        let db = app_ctx.session_context(session.clone()).db();
2✔
683

2✔
684
        db.delete_project(project_id).await.unwrap();
6✔
685

2✔
686
        assert!(db.load_project(project_id).await.is_err());
3✔
687
    }
2✔
688

689
    async fn add_permission(
2✔
690
        app_ctx: &ProPostgresContext<NoTls>,
2✔
691
        session: &UserSession,
2✔
692
        project_id: ProjectId,
2✔
693
    ) {
2✔
694
        let db = app_ctx.session_context(session.clone()).db();
2✔
695

2✔
696
        assert!(db
2✔
697
            .has_permission(project_id, Permission::Owner)
2✔
698
            .await
6✔
699
            .unwrap());
2✔
700

701
        let user2 = app_ctx
2✔
702
            .register_user(UserRegistration {
2✔
703
                email: "user2@example.com".into(),
2✔
704
                password: "12345678".into(),
2✔
705
                real_name: "User2".into(),
2✔
706
            })
2✔
707
            .await
14✔
708
            .unwrap();
2✔
709

710
        let session2 = app_ctx
2✔
711
            .login(UserCredentials {
2✔
712
                email: "user2@example.com".into(),
2✔
713
                password: "12345678".into(),
2✔
714
            })
2✔
715
            .await
15✔
716
            .unwrap();
2✔
717

2✔
718
        let db2 = app_ctx.session_context(session2.clone()).db();
2✔
719
        assert!(!db2
2✔
720
            .has_permission(project_id, Permission::Owner)
2✔
721
            .await
6✔
722
            .unwrap());
2✔
723

724
        db.add_permission(user2.into(), project_id, Permission::Read)
2✔
725
            .await
7✔
726
            .unwrap();
2✔
727

2✔
728
        assert!(db2
2✔
729
            .has_permission(project_id, Permission::Read)
2✔
730
            .await
5✔
731
            .unwrap());
2✔
732
    }
2✔
733

734
    #[allow(clippy::too_many_lines)]
735
    async fn update_projects(
2✔
736
        app_ctx: &ProPostgresContext<NoTls>,
2✔
737
        session: &UserSession,
2✔
738
        project_id: ProjectId,
2✔
739
    ) {
2✔
740
        let db = app_ctx.session_context(session.clone()).db();
2✔
741

742
        let project = db
2✔
743
            .load_project_version(project_id, LoadVersion::Latest)
2✔
744
            .await
90✔
745
            .unwrap();
2✔
746

747
        let layer_workflow_id = db
2✔
748
            .register_workflow(Workflow {
2✔
749
                operator: TypedOperator::Vector(
2✔
750
                    MockPointSource {
2✔
751
                        params: MockPointSourceParams {
2✔
752
                            points: vec![Coordinate2D::new(1., 2.); 3],
2✔
753
                        },
2✔
754
                    }
2✔
755
                    .boxed(),
2✔
756
                ),
2✔
757
            })
2✔
758
            .await
6✔
759
            .unwrap();
2✔
760

2✔
761
        assert!(db.load_workflow(&layer_workflow_id).await.is_ok());
6✔
762

763
        let plot_workflow_id = db
2✔
764
            .register_workflow(Workflow {
2✔
765
                operator: Statistics {
2✔
766
                    params: StatisticsParams {
2✔
767
                        column_names: vec![],
2✔
768
                    },
2✔
769
                    sources: MultipleRasterOrSingleVectorSource {
2✔
770
                        source: Raster(vec![]),
2✔
771
                    },
2✔
772
                }
2✔
773
                .boxed()
2✔
774
                .into(),
2✔
775
            })
2✔
776
            .await
6✔
777
            .unwrap();
2✔
778

2✔
779
        assert!(db.load_workflow(&plot_workflow_id).await.is_ok());
6✔
780

781
        // add a plot
782
        let update = UpdateProject {
2✔
783
            id: project.id,
2✔
784
            name: Some("Test9 Updated".into()),
2✔
785
            description: None,
2✔
786
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
2✔
787
                workflow: layer_workflow_id,
2✔
788
                name: "TestLayer".into(),
2✔
789
                symbology: PointSymbology::default().into(),
2✔
790
                visibility: Default::default(),
2✔
791
            })]),
2✔
792
            plots: Some(vec![PlotUpdate::UpdateOrInsert(Plot {
2✔
793
                workflow: plot_workflow_id,
2✔
794
                name: "Test Plot".into(),
2✔
795
            })]),
2✔
796
            bounds: None,
2✔
797
            time_step: None,
2✔
798
        };
2✔
799
        db.update_project(update).await.unwrap();
154✔
800

801
        let versions = db.list_project_versions(project_id).await.unwrap();
12✔
802
        assert_eq!(versions.len(), 2);
2✔
803

804
        // add second plot
805
        let update = UpdateProject {
2✔
806
            id: project.id,
2✔
807
            name: Some("Test9 Updated".into()),
2✔
808
            description: None,
2✔
809
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
2✔
810
                workflow: layer_workflow_id,
2✔
811
                name: "TestLayer".into(),
2✔
812
                symbology: PointSymbology::default().into(),
2✔
813
                visibility: Default::default(),
2✔
814
            })]),
2✔
815
            plots: Some(vec![
2✔
816
                PlotUpdate::UpdateOrInsert(Plot {
2✔
817
                    workflow: plot_workflow_id,
2✔
818
                    name: "Test Plot".into(),
2✔
819
                }),
2✔
820
                PlotUpdate::UpdateOrInsert(Plot {
2✔
821
                    workflow: plot_workflow_id,
2✔
822
                    name: "Test Plot".into(),
2✔
823
                }),
2✔
824
            ]),
2✔
825
            bounds: None,
2✔
826
            time_step: None,
2✔
827
        };
2✔
828
        db.update_project(update).await.unwrap();
146✔
829

830
        let versions = db.list_project_versions(project_id).await.unwrap();
13✔
831
        assert_eq!(versions.len(), 3);
2✔
832

833
        // delete plots
834
        let update = UpdateProject {
2✔
835
            id: project.id,
2✔
836
            name: None,
2✔
837
            description: None,
2✔
838
            layers: None,
2✔
839
            plots: Some(vec![]),
2✔
840
            bounds: None,
2✔
841
            time_step: None,
2✔
842
        };
2✔
843
        db.update_project(update).await.unwrap();
44✔
844

845
        let versions = db.list_project_versions(project_id).await.unwrap();
12✔
846
        assert_eq!(versions.len(), 4);
2✔
847
    }
2✔
848

849
    async fn list_projects(
2✔
850
        app_ctx: &ProPostgresContext<NoTls>,
2✔
851
        session: &UserSession,
2✔
852
    ) -> Vec<ProjectListing> {
2✔
853
        let options = ProjectListOptions {
2✔
854
            filter: ProjectFilter::None,
2✔
855
            order: OrderBy::NameDesc,
2✔
856
            offset: 0,
2✔
857
            limit: 2,
2✔
858
        };
2✔
859

2✔
860
        let db = app_ctx.session_context(session.clone()).db();
2✔
861

862
        let projects = db.list_projects(options).await.unwrap();
22✔
863

2✔
864
        assert_eq!(projects.len(), 2);
2✔
865
        assert_eq!(projects[0].name, "Test9");
2✔
866
        assert_eq!(projects[1].name, "Test8");
2✔
867
        projects
2✔
868
    }
2✔
869

870
    async fn create_projects(app_ctx: &ProPostgresContext<NoTls>, session: &UserSession) {
2✔
871
        let db = app_ctx.session_context(session.clone()).db();
2✔
872

873
        for i in 0..10 {
22✔
874
            let create = CreateProject {
20✔
875
                name: format!("Test{i}"),
20✔
876
                description: format!("Test{}", 10 - i),
20✔
877
                bounds: STRectangle::new(
20✔
878
                    SpatialReferenceOption::Unreferenced,
20✔
879
                    0.,
20✔
880
                    0.,
20✔
881
                    1.,
20✔
882
                    1.,
20✔
883
                    0,
20✔
884
                    1,
20✔
885
                )
20✔
886
                .unwrap(),
20✔
887
                time_step: None,
20✔
888
            };
20✔
889
            db.create_project(create).await.unwrap();
232✔
890
        }
891
    }
2✔
892

893
    async fn user_reg_login(app_ctx: &ProPostgresContext<NoTls>) -> UserId {
1✔
894
        let user_registration = UserRegistration {
1✔
895
            email: "foo@example.com".into(),
1✔
896
            password: "secret123".into(),
1✔
897
            real_name: "Foo Bar".into(),
1✔
898
        };
1✔
899

900
        let user_id = app_ctx.register_user(user_registration).await.unwrap();
11✔
901

1✔
902
        let credentials = UserCredentials {
1✔
903
            email: "foo@example.com".into(),
1✔
904
            password: "secret123".into(),
1✔
905
        };
1✔
906

907
        let session = app_ctx.login(credentials).await.unwrap();
5✔
908

1✔
909
        let db = app_ctx.session_context(session.clone()).db();
1✔
910

1✔
911
        app_ctx.session_by_id(session.id).await.unwrap();
6✔
912

1✔
913
        db.logout().await.unwrap();
3✔
914

1✔
915
        assert!(app_ctx.session_by_id(session.id).await.is_err());
4✔
916

917
        user_id
1✔
918
    }
1✔
919

920
    //TODO: No duplicate tests for postgres and hashmap implementation possible?
921
    async fn external_user_login_twice(app_ctx: &ProPostgresContext<NoTls>) -> UserSession {
1✔
922
        let external_user_claims = ExternalUserClaims {
1✔
923
            external_id: SubjectIdentifier::new("Foo bar Id".into()),
1✔
924
            email: "foo@bar.de".into(),
1✔
925
            real_name: "Foo Bar".into(),
1✔
926
        };
1✔
927
        let duration = Duration::minutes(30);
1✔
928

929
        //NEW
930
        let login_result = app_ctx
1✔
931
            .login_external(external_user_claims.clone(), duration)
1✔
932
            .await;
21✔
933
        assert!(login_result.is_ok());
1✔
934

935
        let session_1 = login_result.unwrap();
1✔
936
        let user_id = session_1.user.id; //TODO: Not a deterministic test.
1✔
937

1✔
938
        let db1 = app_ctx.session_context(session_1.clone()).db();
1✔
939

1✔
940
        assert!(session_1.user.email.is_some());
1✔
941
        assert_eq!(session_1.user.email.unwrap(), "foo@bar.de");
1✔
942
        assert!(session_1.user.real_name.is_some());
1✔
943
        assert_eq!(session_1.user.real_name.unwrap(), "Foo Bar");
1✔
944

945
        let expected_duration = session_1.created + duration;
1✔
946
        assert_eq!(session_1.valid_until, expected_duration);
1✔
947

948
        assert!(app_ctx.session_by_id(session_1.id).await.is_ok());
6✔
949

950
        assert!(db1.logout().await.is_ok());
3✔
951

952
        assert!(app_ctx.session_by_id(session_1.id).await.is_err());
4✔
953

954
        let duration = Duration::minutes(10);
1✔
955
        let login_result = app_ctx
1✔
956
            .login_external(external_user_claims.clone(), duration)
1✔
957
            .await;
11✔
958
        assert!(login_result.is_ok());
1✔
959

960
        let session_2 = login_result.unwrap();
1✔
961
        let result = session_2.clone();
1✔
962

1✔
963
        assert!(session_2.user.email.is_some()); //TODO: Technically, user details could change for each login. For simplicity, this is not covered yet.
1✔
964
        assert_eq!(session_2.user.email.unwrap(), "foo@bar.de");
1✔
965
        assert!(session_2.user.real_name.is_some());
1✔
966
        assert_eq!(session_2.user.real_name.unwrap(), "Foo Bar");
1✔
967
        assert_eq!(session_2.user.id, user_id);
1✔
968

969
        let expected_duration = session_2.created + duration;
1✔
970
        assert_eq!(session_2.valid_until, expected_duration);
1✔
971

972
        assert!(app_ctx.session_by_id(session_2.id).await.is_ok());
6✔
973

974
        result
1✔
975
    }
1✔
976

977
    async fn anonymous(app_ctx: &ProPostgresContext<NoTls>) {
2✔
978
        let now: DateTime = chrono::offset::Utc::now().into();
2✔
979
        let session = app_ctx.create_anonymous_session().await.unwrap();
29✔
980
        let then: DateTime = chrono::offset::Utc::now().into();
2✔
981

2✔
982
        assert!(session.created >= now && session.created <= then);
2✔
983
        assert!(session.valid_until > session.created);
2✔
984

985
        let session = app_ctx.session_by_id(session.id).await.unwrap();
42✔
986

2✔
987
        let db = app_ctx.session_context(session.clone()).db();
2✔
988

2✔
989
        db.logout().await.unwrap();
6✔
990

2✔
991
        assert!(app_ctx.session_by_id(session.id).await.is_err());
8✔
992
    }
2✔
993

994
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
995
    async fn it_persists_workflows() {
1✔
996
        with_pro_temp_context(|app_ctx, _pg_config| async move {
1✔
997
            let workflow = Workflow {
1✔
998
                operator: TypedOperator::Vector(
1✔
999
                    MockPointSource {
1✔
1000
                        params: MockPointSourceParams {
1✔
1001
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1002
                        },
1✔
1003
                    }
1✔
1004
                    .boxed(),
1✔
1005
                ),
1✔
1006
            };
1✔
1007

1008
            let session = app_ctx.create_anonymous_session().await.unwrap();
15✔
1009
let ctx = app_ctx.session_context(session);
1✔
1010

1✔
1011
            let db = ctx
1✔
1012
                .db();
1✔
1013
            let id = db
1✔
1014
                .register_workflow(workflow)
1✔
1015
                .await
3✔
1016
                .unwrap();
1✔
1017

1✔
1018
            drop(ctx);
1✔
1019

1020
            let workflow = db.load_workflow(&id).await.unwrap();
3✔
1021

1✔
1022
            let json = serde_json::to_string(&workflow).unwrap();
1✔
1023
            assert_eq!(json, r#"{"type":"Vector","operator":{"type":"MockPointSource","params":{"points":[{"x":1.0,"y":2.0},{"x":1.0,"y":2.0},{"x":1.0,"y":2.0}]}}}"#);
1✔
1024
        })
1✔
1025
        .await;
12✔
1026
    }
1027

1028
    #[allow(clippy::too_many_lines)]
1029
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1030
    async fn it_persists_datasets() {
1✔
1031
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1032
            let loading_info = OgrSourceDataset {
1✔
1033
                file_name: PathBuf::from("test.csv"),
1✔
1034
                layer_name: "test.csv".to_owned(),
1✔
1035
                data_type: Some(VectorDataType::MultiPoint),
1✔
1036
                time: OgrSourceDatasetTimeType::Start {
1✔
1037
                    start_field: "start".to_owned(),
1✔
1038
                    start_format: OgrSourceTimeFormat::Auto,
1✔
1039
                    duration: OgrSourceDurationSpec::Zero,
1✔
1040
                },
1✔
1041
                default_geometry: None,
1✔
1042
                columns: Some(OgrSourceColumnSpec {
1✔
1043
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
1044
                        header: CsvHeader::Auto,
1✔
1045
                    }),
1✔
1046
                    x: "x".to_owned(),
1✔
1047
                    y: None,
1✔
1048
                    int: vec![],
1✔
1049
                    float: vec![],
1✔
1050
                    text: vec![],
1✔
1051
                    bool: vec![],
1✔
1052
                    datetime: vec![],
1✔
1053
                    rename: None,
1✔
1054
                }),
1✔
1055
                force_ogr_time_filter: false,
1✔
1056
                force_ogr_spatial_filter: false,
1✔
1057
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1058
                sql_query: None,
1✔
1059
                attribute_query: None,
1✔
1060
                cache_ttl: CacheTtlSeconds::default(),
1✔
1061
            };
1✔
1062

1✔
1063
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1064
                OgrSourceDataset,
1✔
1065
                VectorResultDescriptor,
1✔
1066
                VectorQueryRectangle,
1✔
1067
            > {
1✔
1068
                loading_info: loading_info.clone(),
1✔
1069
                result_descriptor: VectorResultDescriptor {
1✔
1070
                    data_type: VectorDataType::MultiPoint,
1✔
1071
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1072
                    columns: [(
1✔
1073
                        "foo".to_owned(),
1✔
1074
                        VectorColumnInfo {
1✔
1075
                            data_type: FeatureDataType::Float,
1✔
1076
                            measurement: Measurement::Unitless,
1✔
1077
                        },
1✔
1078
                    )]
1✔
1079
                    .into_iter()
1✔
1080
                    .collect(),
1✔
1081
                    time: None,
1✔
1082
                    bbox: None,
1✔
1083
                },
1✔
1084
                phantom: Default::default(),
1✔
1085
            });
1✔
1086

1087
            let session = app_ctx.create_anonymous_session().await.unwrap();
14✔
1088

1✔
1089
            let dataset_name = DatasetName::new(Some(session.user.id.to_string()), "my_dataset");
1✔
1090

1✔
1091
            let db = app_ctx.session_context(session.clone()).db();
1✔
1092
            let wrap = db.wrap_meta_data(meta_data);
1✔
1093
            let DatasetIdAndName {
1094
                id: dataset_id,
1✔
1095
                name: dataset_name,
1✔
1096
            } = db
1✔
1097
                .add_dataset(
1✔
1098
                    AddDataset {
1✔
1099
                        name: Some(dataset_name.clone()),
1✔
1100
                        display_name: "Ogr Test".to_owned(),
1✔
1101
                        description: "desc".to_owned(),
1✔
1102
                        source_operator: "OgrSource".to_owned(),
1✔
1103
                        symbology: None,
1✔
1104
                        provenance: Some(vec![Provenance {
1✔
1105
                            citation: "citation".to_owned(),
1✔
1106
                            license: "license".to_owned(),
1✔
1107
                            uri: "uri".to_owned(),
1✔
1108
                        }]),
1✔
1109
                    },
1✔
1110
                    wrap,
1✔
1111
                )
1✔
1112
                .await
166✔
1113
                .unwrap();
1✔
1114

1115
            let datasets = db
1✔
1116
                .list_datasets(DatasetListOptions {
1✔
1117
                    filter: None,
1✔
1118
                    order: crate::datasets::listing::OrderBy::NameAsc,
1✔
1119
                    offset: 0,
1✔
1120
                    limit: 10,
1✔
1121
                })
1✔
1122
                .await
3✔
1123
                .unwrap();
1✔
1124

1✔
1125
            assert_eq!(datasets.len(), 1);
1✔
1126

1127
            assert_eq!(
1✔
1128
                datasets[0],
1✔
1129
                DatasetListing {
1✔
1130
                    id: dataset_id,
1✔
1131
                    name: dataset_name,
1✔
1132
                    display_name: "Ogr Test".to_owned(),
1✔
1133
                    description: "desc".to_owned(),
1✔
1134
                    source_operator: "OgrSource".to_owned(),
1✔
1135
                    symbology: None,
1✔
1136
                    tags: vec![],
1✔
1137
                    result_descriptor: TypedResultDescriptor::Vector(VectorResultDescriptor {
1✔
1138
                        data_type: VectorDataType::MultiPoint,
1✔
1139
                        spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1140
                        columns: [(
1✔
1141
                            "foo".to_owned(),
1✔
1142
                            VectorColumnInfo {
1✔
1143
                                data_type: FeatureDataType::Float,
1✔
1144
                                measurement: Measurement::Unitless
1✔
1145
                            }
1✔
1146
                        )]
1✔
1147
                        .into_iter()
1✔
1148
                        .collect(),
1✔
1149
                        time: None,
1✔
1150
                        bbox: None,
1✔
1151
                    })
1✔
1152
                    .into(),
1✔
1153
                },
1✔
1154
            );
1✔
1155

1156
            let provenance = db.load_provenance(&dataset_id).await.unwrap();
3✔
1157

1✔
1158
            assert_eq!(
1✔
1159
                provenance,
1✔
1160
                ProvenanceOutput {
1✔
1161
                    data: dataset_id.into(),
1✔
1162
                    provenance: Some(vec![Provenance {
1✔
1163
                        citation: "citation".to_owned(),
1✔
1164
                        license: "license".to_owned(),
1✔
1165
                        uri: "uri".to_owned(),
1✔
1166
                    }])
1✔
1167
                }
1✔
1168
            );
1✔
1169

1170
            let meta_data: Box<dyn MetaData<OgrSourceDataset, _, _>> =
1✔
1171
                db.meta_data(&dataset_id.into()).await.unwrap();
7✔
1172

1173
            assert_eq!(
1✔
1174
                meta_data
1✔
1175
                    .loading_info(VectorQueryRectangle {
1✔
1176
                        spatial_bounds: BoundingBox2D::new_unchecked(
1✔
1177
                            (-180., -90.).into(),
1✔
1178
                            (180., 90.).into()
1✔
1179
                        ),
1✔
1180
                        time_interval: TimeInterval::default(),
1✔
1181
                        spatial_resolution: SpatialResolution::zero_point_one(),
1✔
1182
                    })
1✔
1183
                    .await
×
1184
                    .unwrap(),
1✔
1185
                loading_info
1186
            );
1187
        })
1✔
1188
        .await;
10✔
1189
    }
1190

1191
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1192
    async fn it_persists_uploads() {
1✔
1193
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1194
            let id = UploadId::from_str("2de18cd8-4a38-4111-a445-e3734bc18a80").unwrap();
1✔
1195
            let input = Upload {
1✔
1196
                id,
1✔
1197
                files: vec![FileUpload {
1✔
1198
                    id: FileId::from_str("e80afab0-831d-4d40-95d6-1e4dfd277e72").unwrap(),
1✔
1199
                    name: "test.csv".to_owned(),
1✔
1200
                    byte_size: 1337,
1✔
1201
                }],
1✔
1202
            };
1✔
1203

1204
            let session = app_ctx.create_anonymous_session().await.unwrap();
14✔
1205

1✔
1206
            let db = app_ctx.session_context(session.clone()).db();
1✔
1207

1✔
1208
            db.create_upload(input.clone()).await.unwrap();
10✔
1209

1210
            let upload = db.load_upload(id).await.unwrap();
2✔
1211

1✔
1212
            assert_eq!(upload, input);
1✔
1213
        })
1✔
1214
        .await;
10✔
1215
    }
1216

1217
    #[allow(clippy::too_many_lines)]
1218
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1219
    async fn it_persists_layer_providers() {
1✔
1220
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1221
            let db = app_ctx.session_context(UserSession::admin_session()).db();
1✔
1222

1✔
1223
            let provider = NetCdfCfDataProviderDefinition {
1✔
1224
                name: "netcdfcf".to_string(),
1✔
1225
                path: test_data!("netcdf4d/").into(),
1✔
1226
                overviews: test_data!("netcdf4d/overviews/").into(),
1✔
1227
                cache_ttl: CacheTtlSeconds::new(0),
1✔
1228
            };
1✔
1229

1230
            let provider_id = db.add_layer_provider(provider.into()).await.unwrap();
1✔
1231

1232
            let providers = db
1✔
1233
                .list_layer_providers(LayerProviderListingOptions {
1✔
1234
                    offset: 0,
1✔
1235
                    limit: 10,
1✔
1236
                })
1✔
1237
                .await
×
1238
                .unwrap();
1✔
1239

1✔
1240
            assert_eq!(providers.len(), 1);
1✔
1241

1242
            assert_eq!(
1✔
1243
                providers[0],
1✔
1244
                LayerProviderListing {
1✔
1245
                    id: provider_id,
1✔
1246
                    name: "netcdfcf".to_owned(),
1✔
1247
                    description: "NetCdfCfProviderDefinition".to_owned(),
1✔
1248
                }
1✔
1249
            );
1✔
1250

1251
            let provider = db.load_layer_provider(provider_id).await.unwrap();
1✔
1252

1253
            let datasets = provider
1✔
1254
                .load_layer_collection(
1255
                    &provider.get_root_layer_collection_id().await.unwrap(),
1✔
1256
                    LayerCollectionListOptions {
1✔
1257
                        offset: 0,
1✔
1258
                        limit: 10,
1✔
1259
                    },
1✔
1260
                )
1261
                .await
4✔
1262
                .unwrap();
1✔
1263

1✔
1264
            assert_eq!(datasets.items.len(), 3);
1✔
1265
        })
1✔
1266
        .await;
10✔
1267
    }
1268

1269
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1270
    async fn it_lists_only_permitted_datasets() {
1✔
1271
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1272
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1273
            let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1274

1✔
1275
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1276
            let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1277

1✔
1278
            let descriptor = VectorResultDescriptor {
1✔
1279
                data_type: VectorDataType::Data,
1✔
1280
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1281
                columns: Default::default(),
1✔
1282
                time: None,
1✔
1283
                bbox: None,
1✔
1284
            };
1✔
1285

1✔
1286
            let ds = AddDataset {
1✔
1287
                name: None,
1✔
1288
                display_name: "OgrDataset".to_string(),
1✔
1289
                description: "My Ogr dataset".to_string(),
1✔
1290
                source_operator: "OgrSource".to_string(),
1✔
1291
                symbology: None,
1✔
1292
                provenance: None,
1✔
1293
            };
1✔
1294

1✔
1295
            let meta = StaticMetaData {
1✔
1296
                loading_info: OgrSourceDataset {
1✔
1297
                    file_name: Default::default(),
1✔
1298
                    layer_name: String::new(),
1✔
1299
                    data_type: None,
1✔
1300
                    time: Default::default(),
1✔
1301
                    default_geometry: None,
1✔
1302
                    columns: None,
1✔
1303
                    force_ogr_time_filter: false,
1✔
1304
                    force_ogr_spatial_filter: false,
1✔
1305
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1306
                    sql_query: None,
1✔
1307
                    attribute_query: None,
1✔
1308
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1309
                },
1✔
1310
                result_descriptor: descriptor.clone(),
1✔
1311
                phantom: Default::default(),
1✔
1312
            };
1✔
1313

1✔
1314
            let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1315

1316
            let _id = db1.add_dataset(ds, meta).await.unwrap();
171✔
1317

1318
            let list1 = db1
1✔
1319
                .list_datasets(DatasetListOptions {
1✔
1320
                    filter: None,
1✔
1321
                    order: crate::datasets::listing::OrderBy::NameAsc,
1✔
1322
                    offset: 0,
1✔
1323
                    limit: 1,
1✔
1324
                })
1✔
1325
                .await
3✔
1326
                .unwrap();
1✔
1327

1✔
1328
            assert_eq!(list1.len(), 1);
1✔
1329

1330
            let list2 = db2
1✔
1331
                .list_datasets(DatasetListOptions {
1✔
1332
                    filter: None,
1✔
1333
                    order: crate::datasets::listing::OrderBy::NameAsc,
1✔
1334
                    offset: 0,
1✔
1335
                    limit: 1,
1✔
1336
                })
1✔
1337
                .await
3✔
1338
                .unwrap();
1✔
1339

1✔
1340
            assert_eq!(list2.len(), 0);
1✔
1341
        })
1✔
1342
        .await;
11✔
1343
    }
1344

1345
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1346
    async fn it_shows_only_permitted_provenance() {
1✔
1347
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1348
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
1✔
1349
            let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1350

1✔
1351
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1352
            let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1353

1✔
1354
            let descriptor = VectorResultDescriptor {
1✔
1355
                data_type: VectorDataType::Data,
1✔
1356
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1357
                columns: Default::default(),
1✔
1358
                time: None,
1✔
1359
                bbox: None,
1✔
1360
            };
1✔
1361

1✔
1362
            let ds = AddDataset {
1✔
1363
                name: None,
1✔
1364
                display_name: "OgrDataset".to_string(),
1✔
1365
                description: "My Ogr dataset".to_string(),
1✔
1366
                source_operator: "OgrSource".to_string(),
1✔
1367
                symbology: None,
1✔
1368
                provenance: None,
1✔
1369
            };
1✔
1370

1✔
1371
            let meta = StaticMetaData {
1✔
1372
                loading_info: OgrSourceDataset {
1✔
1373
                    file_name: Default::default(),
1✔
1374
                    layer_name: String::new(),
1✔
1375
                    data_type: None,
1✔
1376
                    time: Default::default(),
1✔
1377
                    default_geometry: None,
1✔
1378
                    columns: None,
1✔
1379
                    force_ogr_time_filter: false,
1✔
1380
                    force_ogr_spatial_filter: false,
1✔
1381
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1382
                    sql_query: None,
1✔
1383
                    attribute_query: None,
1✔
1384
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1385
                },
1✔
1386
                result_descriptor: descriptor.clone(),
1✔
1387
                phantom: Default::default(),
1✔
1388
            };
1✔
1389

1✔
1390
            let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1391

1392
            let id = db1.add_dataset(ds, meta).await.unwrap().id;
170✔
1393

1394
            assert!(db1.load_provenance(&id).await.is_ok());
3✔
1395

1396
            assert!(db2.load_provenance(&id).await.is_err());
4✔
1397
        })
1✔
1398
        .await;
12✔
1399
    }
1400

1401
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1402
    async fn it_updates_permissions() {
1✔
1403
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1404
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
14✔
1405
            let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1406

1✔
1407
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1408
            let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1409

1✔
1410
            let descriptor = VectorResultDescriptor {
1✔
1411
                data_type: VectorDataType::Data,
1✔
1412
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1413
                columns: Default::default(),
1✔
1414
                time: None,
1✔
1415
                bbox: None,
1✔
1416
            };
1✔
1417

1✔
1418
            let ds = AddDataset {
1✔
1419
                name: None,
1✔
1420
                display_name: "OgrDataset".to_string(),
1✔
1421
                description: "My Ogr dataset".to_string(),
1✔
1422
                source_operator: "OgrSource".to_string(),
1✔
1423
                symbology: None,
1✔
1424
                provenance: None,
1✔
1425
            };
1✔
1426

1✔
1427
            let meta = StaticMetaData {
1✔
1428
                loading_info: OgrSourceDataset {
1✔
1429
                    file_name: Default::default(),
1✔
1430
                    layer_name: String::new(),
1✔
1431
                    data_type: None,
1✔
1432
                    time: Default::default(),
1✔
1433
                    default_geometry: None,
1✔
1434
                    columns: None,
1✔
1435
                    force_ogr_time_filter: false,
1✔
1436
                    force_ogr_spatial_filter: false,
1✔
1437
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1438
                    sql_query: None,
1✔
1439
                    attribute_query: None,
1✔
1440
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1441
                },
1✔
1442
                result_descriptor: descriptor.clone(),
1✔
1443
                phantom: Default::default(),
1✔
1444
            };
1✔
1445

1✔
1446
            let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1447

1448
            let id = db1.add_dataset(ds, meta).await.unwrap().id;
171✔
1449

1450
            assert!(db1.load_dataset(&id).await.is_ok());
4✔
1451

1452
            assert!(db2.load_dataset(&id).await.is_err());
3✔
1453

1454
            db1.add_permission(session2.user.id.into(), id, Permission::Read)
1✔
1455
                .await
7✔
1456
                .unwrap();
1✔
1457

1458
            assert!(db2.load_dataset(&id).await.is_ok());
3✔
1459
        })
1✔
1460
        .await;
12✔
1461
    }
1462

1463
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1464
    async fn it_uses_roles_for_permissions() {
1✔
1465
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1466
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1467
            let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1468

1✔
1469
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1470
            let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1471

1✔
1472
            let descriptor = VectorResultDescriptor {
1✔
1473
                data_type: VectorDataType::Data,
1✔
1474
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1475
                columns: Default::default(),
1✔
1476
                time: None,
1✔
1477
                bbox: None,
1✔
1478
            };
1✔
1479

1✔
1480
            let ds = AddDataset {
1✔
1481
                name: None,
1✔
1482
                display_name: "OgrDataset".to_string(),
1✔
1483
                description: "My Ogr dataset".to_string(),
1✔
1484
                source_operator: "OgrSource".to_string(),
1✔
1485
                symbology: None,
1✔
1486
                provenance: None,
1✔
1487
            };
1✔
1488

1✔
1489
            let meta = StaticMetaData {
1✔
1490
                loading_info: OgrSourceDataset {
1✔
1491
                    file_name: Default::default(),
1✔
1492
                    layer_name: String::new(),
1✔
1493
                    data_type: None,
1✔
1494
                    time: Default::default(),
1✔
1495
                    default_geometry: None,
1✔
1496
                    columns: None,
1✔
1497
                    force_ogr_time_filter: false,
1✔
1498
                    force_ogr_spatial_filter: false,
1✔
1499
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1500
                    sql_query: None,
1✔
1501
                    attribute_query: None,
1✔
1502
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1503
                },
1✔
1504
                result_descriptor: descriptor.clone(),
1✔
1505
                phantom: Default::default(),
1✔
1506
            };
1✔
1507

1✔
1508
            let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1509

1510
            let id = db1.add_dataset(ds, meta).await.unwrap().id;
171✔
1511

1512
            assert!(db1.load_dataset(&id).await.is_ok());
4✔
1513

1514
            assert!(db2.load_dataset(&id).await.is_err());
3✔
1515

1516
            db1.add_permission(session2.user.id.into(), id, Permission::Read)
1✔
1517
                .await
7✔
1518
                .unwrap();
1✔
1519

1520
            assert!(db2.load_dataset(&id).await.is_ok());
3✔
1521
        })
1✔
1522
        .await;
12✔
1523
    }
1524

1525
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1526
    async fn it_secures_meta_data() {
1✔
1527
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1528
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
14✔
1529
            let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1530

1✔
1531
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1532
            let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1533

1✔
1534
            let descriptor = VectorResultDescriptor {
1✔
1535
                data_type: VectorDataType::Data,
1✔
1536
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1537
                columns: Default::default(),
1✔
1538
                time: None,
1✔
1539
                bbox: None,
1✔
1540
            };
1✔
1541

1✔
1542
            let ds = AddDataset {
1✔
1543
                name: None,
1✔
1544
                display_name: "OgrDataset".to_string(),
1✔
1545
                description: "My Ogr dataset".to_string(),
1✔
1546
                source_operator: "OgrSource".to_string(),
1✔
1547
                symbology: None,
1✔
1548
                provenance: None,
1✔
1549
            };
1✔
1550

1✔
1551
            let meta = StaticMetaData {
1✔
1552
                loading_info: OgrSourceDataset {
1✔
1553
                    file_name: Default::default(),
1✔
1554
                    layer_name: String::new(),
1✔
1555
                    data_type: None,
1✔
1556
                    time: Default::default(),
1✔
1557
                    default_geometry: None,
1✔
1558
                    columns: None,
1✔
1559
                    force_ogr_time_filter: false,
1✔
1560
                    force_ogr_spatial_filter: false,
1✔
1561
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1562
                    sql_query: None,
1✔
1563
                    attribute_query: None,
1✔
1564
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1565
                },
1✔
1566
                result_descriptor: descriptor.clone(),
1✔
1567
                phantom: Default::default(),
1✔
1568
            };
1✔
1569

1✔
1570
            let meta = db1.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1571

1572
            let id = db1.add_dataset(ds, meta).await.unwrap().id;
171✔
1573

1574
            let meta: geoengine_operators::util::Result<
1✔
1575
                Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1576
            > = db1.meta_data(&id.into()).await;
8✔
1577

1578
            assert!(meta.is_ok());
1✔
1579

1580
            let meta: geoengine_operators::util::Result<
1✔
1581
                Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1582
            > = db2.meta_data(&id.into()).await;
3✔
1583

1584
            assert!(meta.is_err());
1✔
1585

1586
            db1.add_permission(session2.user.id.into(), id, Permission::Read)
1✔
1587
                .await
6✔
1588
                .unwrap();
1✔
1589

1590
            let meta: geoengine_operators::util::Result<
1✔
1591
                Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1592
            > = db2.meta_data(&id.into()).await;
6✔
1593

1594
            assert!(meta.is_ok());
1✔
1595
        })
1✔
1596
        .await;
11✔
1597
    }
1598

1599
    #[allow(clippy::too_many_lines)]
1600
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1601
    async fn it_loads_all_meta_data_types() {
1✔
1602
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1603
            let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
1604

1✔
1605
            let db = app_ctx.session_context(session.clone()).db();
1✔
1606

1✔
1607
            let vector_descriptor = VectorResultDescriptor {
1✔
1608
                data_type: VectorDataType::Data,
1✔
1609
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1610
                columns: Default::default(),
1✔
1611
                time: None,
1✔
1612
                bbox: None,
1✔
1613
            };
1✔
1614

1✔
1615
            let raster_descriptor = RasterResultDescriptor {
1✔
1616
                data_type: RasterDataType::U8,
1✔
1617
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1618
                measurement: Default::default(),
1✔
1619
                time: None,
1✔
1620
                bbox: None,
1✔
1621
                resolution: None,
1✔
1622
            };
1✔
1623

1✔
1624
            let vector_ds = AddDataset {
1✔
1625
                name: None,
1✔
1626
                display_name: "OgrDataset".to_string(),
1✔
1627
                description: "My Ogr dataset".to_string(),
1✔
1628
                source_operator: "OgrSource".to_string(),
1✔
1629
                symbology: None,
1✔
1630
                provenance: None,
1✔
1631
            };
1✔
1632

1✔
1633
            let raster_ds = AddDataset {
1✔
1634
                name: None,
1✔
1635
                display_name: "GdalDataset".to_string(),
1✔
1636
                description: "My Gdal dataset".to_string(),
1✔
1637
                source_operator: "GdalSource".to_string(),
1✔
1638
                symbology: None,
1✔
1639
                provenance: None,
1✔
1640
            };
1✔
1641

1✔
1642
            let gdal_params = GdalDatasetParameters {
1✔
1643
                file_path: Default::default(),
1✔
1644
                rasterband_channel: 0,
1✔
1645
                geo_transform: GdalDatasetGeoTransform {
1✔
1646
                    origin_coordinate: Default::default(),
1✔
1647
                    x_pixel_size: 0.0,
1✔
1648
                    y_pixel_size: 0.0,
1✔
1649
                },
1✔
1650
                width: 0,
1✔
1651
                height: 0,
1✔
1652
                file_not_found_handling: FileNotFoundHandling::NoData,
1✔
1653
                no_data_value: None,
1✔
1654
                properties_mapping: None,
1✔
1655
                gdal_open_options: None,
1✔
1656
                gdal_config_options: None,
1✔
1657
                allow_alphaband_as_mask: false,
1✔
1658
                retry: None,
1✔
1659
            };
1✔
1660

1✔
1661
            let meta = StaticMetaData {
1✔
1662
                loading_info: OgrSourceDataset {
1✔
1663
                    file_name: Default::default(),
1✔
1664
                    layer_name: String::new(),
1✔
1665
                    data_type: None,
1✔
1666
                    time: Default::default(),
1✔
1667
                    default_geometry: None,
1✔
1668
                    columns: None,
1✔
1669
                    force_ogr_time_filter: false,
1✔
1670
                    force_ogr_spatial_filter: false,
1✔
1671
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1672
                    sql_query: None,
1✔
1673
                    attribute_query: None,
1✔
1674
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1675
                },
1✔
1676
                result_descriptor: vector_descriptor.clone(),
1✔
1677
                phantom: Default::default(),
1✔
1678
            };
1✔
1679

1✔
1680
            let meta = db.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1681

1682
            let id = db.add_dataset(vector_ds, meta).await.unwrap().id;
162✔
1683

1684
            let meta: geoengine_operators::util::Result<
1✔
1685
                Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1686
            > = db.meta_data(&id.into()).await;
7✔
1687

1688
            assert!(meta.is_ok());
1✔
1689

1690
            let meta = GdalMetaDataRegular {
1✔
1691
                result_descriptor: raster_descriptor.clone(),
1✔
1692
                params: gdal_params.clone(),
1✔
1693
                time_placeholders: Default::default(),
1✔
1694
                data_time: Default::default(),
1✔
1695
                step: TimeStep {
1✔
1696
                    granularity: TimeGranularity::Millis,
1✔
1697
                    step: 0,
1✔
1698
                },
1✔
1699
                cache_ttl: CacheTtlSeconds::default(),
1✔
1700
            };
1✔
1701

1✔
1702
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataRegular(meta));
1✔
1703

1704
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
8✔
1705

1706
            let meta: geoengine_operators::util::Result<
1✔
1707
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1708
            > = db.meta_data(&id.into()).await;
6✔
1709

1710
            assert!(meta.is_ok());
1✔
1711

1712
            let meta = GdalMetaDataStatic {
1✔
1713
                time: None,
1✔
1714
                params: gdal_params.clone(),
1✔
1715
                result_descriptor: raster_descriptor.clone(),
1✔
1716
                cache_ttl: CacheTtlSeconds::default(),
1✔
1717
            };
1✔
1718

1✔
1719
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalStatic(meta));
1✔
1720

1721
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1722

1723
            let meta: geoengine_operators::util::Result<
1✔
1724
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1725
            > = db.meta_data(&id.into()).await;
6✔
1726

1727
            assert!(meta.is_ok());
1✔
1728

1729
            let meta = GdalMetaDataList {
1✔
1730
                result_descriptor: raster_descriptor.clone(),
1✔
1731
                params: vec![],
1✔
1732
            };
1✔
1733

1✔
1734
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataList(meta));
1✔
1735

1736
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1737

1738
            let meta: geoengine_operators::util::Result<
1✔
1739
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1740
            > = db.meta_data(&id.into()).await;
6✔
1741

1742
            assert!(meta.is_ok());
1✔
1743

1744
            let meta = GdalMetadataNetCdfCf {
1✔
1745
                result_descriptor: raster_descriptor.clone(),
1✔
1746
                params: gdal_params.clone(),
1✔
1747
                start: TimeInstance::MIN,
1✔
1748
                end: TimeInstance::MAX,
1✔
1749
                step: TimeStep {
1✔
1750
                    granularity: TimeGranularity::Millis,
1✔
1751
                    step: 0,
1✔
1752
                },
1✔
1753
                band_offset: 0,
1✔
1754
                cache_ttl: CacheTtlSeconds::default(),
1✔
1755
            };
1✔
1756

1✔
1757
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetadataNetCdfCf(meta));
1✔
1758

1759
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
7✔
1760

1761
            let meta: geoengine_operators::util::Result<
1✔
1762
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1763
            > = db.meta_data(&id.into()).await;
6✔
1764

1765
            assert!(meta.is_ok());
1✔
1766
        })
1✔
1767
        .await;
12✔
1768
    }
1769

1770
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1771
    async fn it_secures_uploads() {
1✔
1772
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1773
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1774
            let session2 = app_ctx.create_anonymous_session().await.unwrap();
15✔
1775

1✔
1776
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
1777
            let db2 = app_ctx.session_context(session2.clone()).db();
1✔
1778

1✔
1779
            let upload_id = UploadId::new();
1✔
1780

1✔
1781
            let upload = Upload {
1✔
1782
                id: upload_id,
1✔
1783
                files: vec![FileUpload {
1✔
1784
                    id: FileId::new(),
1✔
1785
                    name: "test.bin".to_owned(),
1✔
1786
                    byte_size: 1024,
1✔
1787
                }],
1✔
1788
            };
1✔
1789

1✔
1790
            db1.create_upload(upload).await.unwrap();
12✔
1791

1792
            assert!(db1.load_upload(upload_id).await.is_ok());
3✔
1793

1794
            assert!(db2.load_upload(upload_id).await.is_err());
3✔
1795
        })
1✔
1796
        .await;
10✔
1797
    }
1798

1799
    #[allow(clippy::too_many_lines)]
1800
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1801
    async fn it_collects_layers() {
1✔
1802
        with_pro_temp_context(|app_ctx, _| async move {
1✔
1803
            let session = admin_login(&app_ctx).await;
7✔
1804

1805
            let layer_db = app_ctx.session_context(session).db();
1✔
1806

1✔
1807
            let workflow = Workflow {
1✔
1808
                operator: TypedOperator::Vector(
1✔
1809
                    MockPointSource {
1✔
1810
                        params: MockPointSourceParams {
1✔
1811
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1812
                        },
1✔
1813
                    }
1✔
1814
                    .boxed(),
1✔
1815
                ),
1✔
1816
            };
1✔
1817

1818
            let root_collection_id = layer_db.get_root_layer_collection_id().await.unwrap();
1✔
1819

1820
            let layer1 = layer_db
1✔
1821
                .add_layer(
1✔
1822
                    AddLayer {
1✔
1823
                        name: "Layer1".to_string(),
1✔
1824
                        description: "Layer 1".to_string(),
1✔
1825
                        symbology: None,
1✔
1826
                        workflow: workflow.clone(),
1✔
1827
                        metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1828
                        properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1829
                    },
1✔
1830
                    &root_collection_id,
1✔
1831
                )
1✔
1832
                .await
42✔
1833
                .unwrap();
1✔
1834

1835
            assert_eq!(
1✔
1836
                layer_db.load_layer(&layer1).await.unwrap(),
5✔
1837
                crate::layers::layer::Layer {
1✔
1838
                    id: ProviderLayerId {
1✔
1839
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1840
                        layer_id: layer1.clone(),
1✔
1841
                    },
1✔
1842
                    name: "Layer1".to_string(),
1✔
1843
                    description: "Layer 1".to_string(),
1✔
1844
                    symbology: None,
1✔
1845
                    workflow: workflow.clone(),
1✔
1846
                    metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1847
                    properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1848
                }
1✔
1849
            );
1850

1851
            let collection1_id = layer_db
1✔
1852
                .add_layer_collection(
1✔
1853
                    AddLayerCollection {
1✔
1854
                        name: "Collection1".to_string(),
1✔
1855
                        description: "Collection 1".to_string(),
1✔
1856
                        properties: Default::default(),
1✔
1857
                    },
1✔
1858
                    &root_collection_id,
1✔
1859
                )
1✔
1860
                .await
11✔
1861
                .unwrap();
1✔
1862

1863
            let layer2 = layer_db
1✔
1864
                .add_layer(
1✔
1865
                    AddLayer {
1✔
1866
                        name: "Layer2".to_string(),
1✔
1867
                        description: "Layer 2".to_string(),
1✔
1868
                        symbology: None,
1✔
1869
                        workflow: workflow.clone(),
1✔
1870
                        metadata: Default::default(),
1✔
1871
                        properties: Default::default(),
1✔
1872
                    },
1✔
1873
                    &collection1_id,
1✔
1874
                )
1✔
1875
                .await
13✔
1876
                .unwrap();
1✔
1877

1878
            let collection2_id = layer_db
1✔
1879
                .add_layer_collection(
1✔
1880
                    AddLayerCollection {
1✔
1881
                        name: "Collection2".to_string(),
1✔
1882
                        description: "Collection 2".to_string(),
1✔
1883
                        properties: Default::default(),
1✔
1884
                    },
1✔
1885
                    &collection1_id,
1✔
1886
                )
1✔
1887
                .await
12✔
1888
                .unwrap();
1✔
1889

1✔
1890
            layer_db
1✔
1891
                .add_collection_to_parent(&collection2_id, &collection1_id)
1✔
1892
                .await
6✔
1893
                .unwrap();
1✔
1894

1895
            let root_collection = layer_db
1✔
1896
                .load_layer_collection(
1✔
1897
                    &root_collection_id,
1✔
1898
                    LayerCollectionListOptions {
1✔
1899
                        offset: 0,
1✔
1900
                        limit: 20,
1✔
1901
                    },
1✔
1902
                )
1✔
1903
                .await
8✔
1904
                .unwrap();
1✔
1905

1✔
1906
            assert_eq!(
1✔
1907
                root_collection,
1✔
1908
                LayerCollection {
1✔
1909
                    id: ProviderLayerCollectionId {
1✔
1910
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1911
                        collection_id: root_collection_id,
1✔
1912
                    },
1✔
1913
                    name: "Layers".to_string(),
1✔
1914
                    description: "All available Geo Engine layers".to_string(),
1✔
1915
                    items: vec![
1✔
1916
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1917
                            id: ProviderLayerCollectionId {
1✔
1918
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1919
                                collection_id: collection1_id.clone(),
1✔
1920
                            },
1✔
1921
                            name: "Collection1".to_string(),
1✔
1922
                            description: "Collection 1".to_string(),
1✔
1923
                            properties: Default::default(),
1✔
1924
                        }),
1✔
1925
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1926
                            id: ProviderLayerCollectionId {
1✔
1927
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1928
                                collection_id: LayerCollectionId(
1✔
1929
                                    UNSORTED_COLLECTION_ID.to_string()
1✔
1930
                                ),
1✔
1931
                            },
1✔
1932
                            name: "Unsorted".to_string(),
1✔
1933
                            description: "Unsorted Layers".to_string(),
1✔
1934
                            properties: Default::default(),
1✔
1935
                        }),
1✔
1936
                        CollectionItem::Layer(LayerListing {
1✔
1937
                            id: ProviderLayerId {
1✔
1938
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1939
                                layer_id: layer1,
1✔
1940
                            },
1✔
1941
                            name: "Layer1".to_string(),
1✔
1942
                            description: "Layer 1".to_string(),
1✔
1943
                            properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1944
                        })
1✔
1945
                    ],
1✔
1946
                    entry_label: None,
1✔
1947
                    properties: vec![],
1✔
1948
                }
1✔
1949
            );
1✔
1950

1951
            let collection1 = layer_db
1✔
1952
                .load_layer_collection(
1✔
1953
                    &collection1_id,
1✔
1954
                    LayerCollectionListOptions {
1✔
1955
                        offset: 0,
1✔
1956
                        limit: 20,
1✔
1957
                    },
1✔
1958
                )
1✔
1959
                .await
8✔
1960
                .unwrap();
1✔
1961

1✔
1962
            assert_eq!(
1✔
1963
                collection1,
1✔
1964
                LayerCollection {
1✔
1965
                    id: ProviderLayerCollectionId {
1✔
1966
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1967
                        collection_id: collection1_id,
1✔
1968
                    },
1✔
1969
                    name: "Collection1".to_string(),
1✔
1970
                    description: "Collection 1".to_string(),
1✔
1971
                    items: vec![
1✔
1972
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1973
                            id: ProviderLayerCollectionId {
1✔
1974
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1975
                                collection_id: collection2_id,
1✔
1976
                            },
1✔
1977
                            name: "Collection2".to_string(),
1✔
1978
                            description: "Collection 2".to_string(),
1✔
1979
                            properties: Default::default(),
1✔
1980
                        }),
1✔
1981
                        CollectionItem::Layer(LayerListing {
1✔
1982
                            id: ProviderLayerId {
1✔
1983
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1984
                                layer_id: layer2,
1✔
1985
                            },
1✔
1986
                            name: "Layer2".to_string(),
1✔
1987
                            description: "Layer 2".to_string(),
1✔
1988
                            properties: vec![],
1✔
1989
                        })
1✔
1990
                    ],
1✔
1991
                    entry_label: None,
1✔
1992
                    properties: vec![],
1✔
1993
                }
1✔
1994
            );
1✔
1995
        })
1✔
1996
        .await;
10✔
1997
    }
1998

1999
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2000
    async fn it_tracks_used_quota_in_postgres() {
1✔
2001
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2002
            let _user = app_ctx
1✔
2003
                .register_user(UserRegistration {
1✔
2004
                    email: "foo@example.com".to_string(),
1✔
2005
                    password: "secret1234".to_string(),
1✔
2006
                    real_name: "Foo Bar".to_string(),
1✔
2007
                })
1✔
2008
                .await
3✔
2009
                .unwrap();
1✔
2010

2011
            let session = app_ctx
1✔
2012
                .login(UserCredentials {
1✔
2013
                    email: "foo@example.com".to_string(),
1✔
2014
                    password: "secret1234".to_string(),
1✔
2015
                })
1✔
2016
                .await
7✔
2017
                .unwrap();
1✔
2018

2019
            let admin_session = admin_login(&app_ctx).await;
9✔
2020

2021
            let quota = initialize_quota_tracking(
1✔
2022
                QuotaTrackingMode::Check,
1✔
2023
                app_ctx.session_context(admin_session).db(),
1✔
2024
                0,
1✔
2025
                60,
1✔
2026
            );
1✔
2027

1✔
2028
            let tracking = quota.create_quota_tracking(&session, ComputationContext::new());
1✔
2029

1✔
2030
            tracking.work_unit_done();
1✔
2031
            tracking.work_unit_done();
1✔
2032

1✔
2033
            let db = app_ctx.session_context(session).db();
1✔
2034

1✔
2035
            // wait for quota to be recorded
1✔
2036
            let mut success = false;
1✔
2037
            for _ in 0..10 {
2✔
2038
                let used = db.quota_used().await.unwrap();
5✔
2039
                tokio::time::sleep(std::time::Duration::from_millis(100)).await;
2✔
2040

2041
                if used == 2 {
2✔
2042
                    success = true;
1✔
2043
                    break;
1✔
2044
                }
1✔
2045
            }
2046

2047
            assert!(success);
1✔
2048
        })
1✔
2049
        .await;
10✔
2050
    }
2051

2052
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2053
    async fn it_tracks_available_quota() {
1✔
2054
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2055
            let user = app_ctx
1✔
2056
                .register_user(UserRegistration {
1✔
2057
                    email: "foo@example.com".to_string(),
1✔
2058
                    password: "secret1234".to_string(),
1✔
2059
                    real_name: "Foo Bar".to_string(),
1✔
2060
                })
1✔
2061
                .await
9✔
2062
                .unwrap();
1✔
2063

2064
            let session = app_ctx
1✔
2065
                .login(UserCredentials {
1✔
2066
                    email: "foo@example.com".to_string(),
1✔
2067
                    password: "secret1234".to_string(),
1✔
2068
                })
1✔
2069
                .await
6✔
2070
                .unwrap();
1✔
2071

2072
            let admin_session = admin_login(&app_ctx).await;
2✔
2073

2074
            app_ctx
1✔
2075
                .session_context(admin_session.clone())
1✔
2076
                .db()
1✔
2077
                .update_quota_available_by_user(&user, 1)
1✔
2078
                .await
1✔
2079
                .unwrap();
1✔
2080

1✔
2081
            let quota = initialize_quota_tracking(
1✔
2082
                QuotaTrackingMode::Check,
1✔
2083
                app_ctx.session_context(admin_session).db(),
1✔
2084
                0,
1✔
2085
                60,
1✔
2086
            );
1✔
2087

1✔
2088
            let tracking = quota.create_quota_tracking(&session, ComputationContext::new());
1✔
2089

1✔
2090
            tracking.work_unit_done();
1✔
2091
            tracking.work_unit_done();
1✔
2092

1✔
2093
            let db = app_ctx.session_context(session).db();
1✔
2094

1✔
2095
            // wait for quota to be recorded
1✔
2096
            let mut success = false;
1✔
2097
            for _ in 0..10 {
1✔
2098
                let available = db.quota_available().await.unwrap();
2✔
2099
                tokio::time::sleep(std::time::Duration::from_millis(100)).await;
1✔
2100

2101
                if available == -1 {
1✔
2102
                    success = true;
1✔
2103
                    break;
1✔
2104
                }
×
2105
            }
2106

2107
            assert!(success);
1✔
2108
        })
1✔
2109
        .await;
10✔
2110
    }
2111

2112
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2113
    async fn it_updates_quota_in_postgres() {
1✔
2114
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2115
            let user = app_ctx
1✔
2116
                .register_user(UserRegistration {
1✔
2117
                    email: "foo@example.com".to_string(),
1✔
2118
                    password: "secret1234".to_string(),
1✔
2119
                    real_name: "Foo Bar".to_string(),
1✔
2120
                })
1✔
2121
                .await
11✔
2122
                .unwrap();
1✔
2123

2124
            let session = app_ctx
1✔
2125
                .login(UserCredentials {
1✔
2126
                    email: "foo@example.com".to_string(),
1✔
2127
                    password: "secret1234".to_string(),
1✔
2128
                })
1✔
2129
                .await
10✔
2130
                .unwrap();
1✔
2131

1✔
2132
            let db = app_ctx.session_context(session.clone()).db();
1✔
2133
            let admin_db = app_ctx.session_context(UserSession::admin_session()).db();
1✔
2134

2135
            assert_eq!(
1✔
2136
                db.quota_available().await.unwrap(),
3✔
2137
                crate::util::config::get_config_element::<crate::pro::util::config::Quota>()
1✔
2138
                    .unwrap()
1✔
2139
                    .default_available_quota
2140
            );
2141

2142
            assert_eq!(
1✔
2143
                admin_db.quota_available_by_user(&user).await.unwrap(),
3✔
2144
                crate::util::config::get_config_element::<crate::pro::util::config::Quota>()
1✔
2145
                    .unwrap()
1✔
2146
                    .default_available_quota
2147
            );
2148

2149
            admin_db
1✔
2150
                .update_quota_available_by_user(&user, 123)
1✔
2151
                .await
1✔
2152
                .unwrap();
1✔
2153

2154
            assert_eq!(db.quota_available().await.unwrap(), 123);
3✔
2155

2156
            assert_eq!(admin_db.quota_available_by_user(&user).await.unwrap(), 123);
3✔
2157
        })
1✔
2158
        .await;
10✔
2159
    }
2160

2161
    #[allow(clippy::too_many_lines)]
2162
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2163
    async fn it_removes_layer_collections() {
1✔
2164
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2165
            let session = admin_login(&app_ctx).await;
10✔
2166

2167
            let layer_db = app_ctx.session_context(session).db();
1✔
2168

1✔
2169
            let layer = AddLayer {
1✔
2170
                name: "layer".to_string(),
1✔
2171
                description: "description".to_string(),
1✔
2172
                workflow: Workflow {
1✔
2173
                    operator: TypedOperator::Vector(
1✔
2174
                        MockPointSource {
1✔
2175
                            params: MockPointSourceParams {
1✔
2176
                                points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2177
                            },
1✔
2178
                        }
1✔
2179
                        .boxed(),
1✔
2180
                    ),
1✔
2181
                },
1✔
2182
                symbology: None,
1✔
2183
                metadata: Default::default(),
1✔
2184
                properties: Default::default(),
1✔
2185
            };
1✔
2186

2187
            let root_collection = &layer_db.get_root_layer_collection_id().await.unwrap();
1✔
2188

1✔
2189
            let collection = AddLayerCollection {
1✔
2190
                name: "top collection".to_string(),
1✔
2191
                description: "description".to_string(),
1✔
2192
                properties: Default::default(),
1✔
2193
            };
1✔
2194

2195
            let top_c_id = layer_db
1✔
2196
                .add_layer_collection(collection, root_collection)
1✔
2197
                .await
20✔
2198
                .unwrap();
1✔
2199

2200
            let l_id = layer_db.add_layer(layer, &top_c_id).await.unwrap();
45✔
2201

1✔
2202
            let collection = AddLayerCollection {
1✔
2203
                name: "empty collection".to_string(),
1✔
2204
                description: "description".to_string(),
1✔
2205
                properties: Default::default(),
1✔
2206
            };
1✔
2207

2208
            let empty_c_id = layer_db
1✔
2209
                .add_layer_collection(collection, &top_c_id)
1✔
2210
                .await
12✔
2211
                .unwrap();
1✔
2212

2213
            let items = layer_db
1✔
2214
                .load_layer_collection(
1✔
2215
                    &top_c_id,
1✔
2216
                    LayerCollectionListOptions {
1✔
2217
                        offset: 0,
1✔
2218
                        limit: 20,
1✔
2219
                    },
1✔
2220
                )
1✔
2221
                .await
8✔
2222
                .unwrap();
1✔
2223

1✔
2224
            assert_eq!(
1✔
2225
                items,
1✔
2226
                LayerCollection {
1✔
2227
                    id: ProviderLayerCollectionId {
1✔
2228
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
2229
                        collection_id: top_c_id.clone(),
1✔
2230
                    },
1✔
2231
                    name: "top collection".to_string(),
1✔
2232
                    description: "description".to_string(),
1✔
2233
                    items: vec![
1✔
2234
                        CollectionItem::Collection(LayerCollectionListing {
1✔
2235
                            id: ProviderLayerCollectionId {
1✔
2236
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
2237
                                collection_id: empty_c_id.clone(),
1✔
2238
                            },
1✔
2239
                            name: "empty collection".to_string(),
1✔
2240
                            description: "description".to_string(),
1✔
2241
                            properties: Default::default(),
1✔
2242
                        }),
1✔
2243
                        CollectionItem::Layer(LayerListing {
1✔
2244
                            id: ProviderLayerId {
1✔
2245
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
2246
                                layer_id: l_id.clone(),
1✔
2247
                            },
1✔
2248
                            name: "layer".to_string(),
1✔
2249
                            description: "description".to_string(),
1✔
2250
                            properties: vec![],
1✔
2251
                        })
1✔
2252
                    ],
1✔
2253
                    entry_label: None,
1✔
2254
                    properties: vec![],
1✔
2255
                }
1✔
2256
            );
1✔
2257

2258
            // remove empty collection
2259
            layer_db.remove_layer_collection(&empty_c_id).await.unwrap();
12✔
2260

2261
            let items = layer_db
1✔
2262
                .load_layer_collection(
1✔
2263
                    &top_c_id,
1✔
2264
                    LayerCollectionListOptions {
1✔
2265
                        offset: 0,
1✔
2266
                        limit: 20,
1✔
2267
                    },
1✔
2268
                )
1✔
2269
                .await
8✔
2270
                .unwrap();
1✔
2271

1✔
2272
            assert_eq!(
1✔
2273
                items,
1✔
2274
                LayerCollection {
1✔
2275
                    id: ProviderLayerCollectionId {
1✔
2276
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
2277
                        collection_id: top_c_id.clone(),
1✔
2278
                    },
1✔
2279
                    name: "top collection".to_string(),
1✔
2280
                    description: "description".to_string(),
1✔
2281
                    items: vec![CollectionItem::Layer(LayerListing {
1✔
2282
                        id: ProviderLayerId {
1✔
2283
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
2284
                            layer_id: l_id.clone(),
1✔
2285
                        },
1✔
2286
                        name: "layer".to_string(),
1✔
2287
                        description: "description".to_string(),
1✔
2288
                        properties: vec![],
1✔
2289
                    })],
1✔
2290
                    entry_label: None,
1✔
2291
                    properties: vec![],
1✔
2292
                }
1✔
2293
            );
1✔
2294

2295
            // remove top (not root) collection
2296
            layer_db.remove_layer_collection(&top_c_id).await.unwrap();
12✔
2297

1✔
2298
            layer_db
1✔
2299
                .load_layer_collection(
1✔
2300
                    &top_c_id,
1✔
2301
                    LayerCollectionListOptions {
1✔
2302
                        offset: 0,
1✔
2303
                        limit: 20,
1✔
2304
                    },
1✔
2305
                )
1✔
2306
                .await
3✔
2307
                .unwrap_err();
1✔
2308

1✔
2309
            // should be deleted automatically
1✔
2310
            layer_db.load_layer(&l_id).await.unwrap_err();
3✔
2311

1✔
2312
            // it is not allowed to remove the root collection
1✔
2313
            layer_db
1✔
2314
                .remove_layer_collection(root_collection)
1✔
2315
                .await
3✔
2316
                .unwrap_err();
1✔
2317
            layer_db
1✔
2318
                .load_layer_collection(
1✔
2319
                    root_collection,
1✔
2320
                    LayerCollectionListOptions {
1✔
2321
                        offset: 0,
1✔
2322
                        limit: 20,
1✔
2323
                    },
1✔
2324
                )
1✔
2325
                .await
8✔
2326
                .unwrap();
1✔
2327
        })
1✔
2328
        .await;
11✔
2329
    }
2330

2331
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2332
    #[allow(clippy::too_many_lines)]
2333
    async fn it_removes_collections_from_collections() {
1✔
2334
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2335
            let session = admin_login(&app_ctx).await;
11✔
2336

2337
            let db = app_ctx.session_context(session).db();
1✔
2338

2339
            let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
2340

2341
            let mid_collection_id = db
1✔
2342
                .add_layer_collection(
1✔
2343
                    AddLayerCollection {
1✔
2344
                        name: "mid collection".to_string(),
1✔
2345
                        description: "description".to_string(),
1✔
2346
                        properties: Default::default(),
1✔
2347
                    },
1✔
2348
                    root_collection_id,
1✔
2349
                )
1✔
2350
                .await
21✔
2351
                .unwrap();
1✔
2352

2353
            let bottom_collection_id = db
1✔
2354
                .add_layer_collection(
1✔
2355
                    AddLayerCollection {
1✔
2356
                        name: "bottom collection".to_string(),
1✔
2357
                        description: "description".to_string(),
1✔
2358
                        properties: Default::default(),
1✔
2359
                    },
1✔
2360
                    &mid_collection_id,
1✔
2361
                )
1✔
2362
                .await
12✔
2363
                .unwrap();
1✔
2364

2365
            let layer_id = db
1✔
2366
                .add_layer(
1✔
2367
                    AddLayer {
1✔
2368
                        name: "layer".to_string(),
1✔
2369
                        description: "description".to_string(),
1✔
2370
                        workflow: Workflow {
1✔
2371
                            operator: TypedOperator::Vector(
1✔
2372
                                MockPointSource {
1✔
2373
                                    params: MockPointSourceParams {
1✔
2374
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2375
                                    },
1✔
2376
                                }
1✔
2377
                                .boxed(),
1✔
2378
                            ),
1✔
2379
                        },
1✔
2380
                        symbology: None,
1✔
2381
                        metadata: Default::default(),
1✔
2382
                        properties: Default::default(),
1✔
2383
                    },
1✔
2384
                    &mid_collection_id,
1✔
2385
                )
1✔
2386
                .await
45✔
2387
                .unwrap();
1✔
2388

1✔
2389
            // removing the mid collection…
1✔
2390
            db.remove_layer_collection_from_parent(&mid_collection_id, root_collection_id)
1✔
2391
                .await
14✔
2392
                .unwrap();
1✔
2393

1✔
2394
            // …should remove itself
1✔
2395
            db.load_layer_collection(&mid_collection_id, LayerCollectionListOptions::default())
1✔
2396
                .await
3✔
2397
                .unwrap_err();
1✔
2398

1✔
2399
            // …should remove the bottom collection
1✔
2400
            db.load_layer_collection(&bottom_collection_id, LayerCollectionListOptions::default())
1✔
2401
                .await
3✔
2402
                .unwrap_err();
1✔
2403

1✔
2404
            // … and should remove the layer of the bottom collection
1✔
2405
            db.load_layer(&layer_id).await.unwrap_err();
3✔
2406

1✔
2407
            // the root collection is still there
1✔
2408
            db.load_layer_collection(root_collection_id, LayerCollectionListOptions::default())
1✔
2409
                .await
8✔
2410
                .unwrap();
1✔
2411
        })
1✔
2412
        .await;
12✔
2413
    }
2414

2415
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2416
    #[allow(clippy::too_many_lines)]
2417
    async fn it_removes_layers_from_collections() {
1✔
2418
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2419
            let session = admin_login(&app_ctx).await;
11✔
2420

2421
            let db = app_ctx.session_context(session).db();
1✔
2422

2423
            let root_collection = &db.get_root_layer_collection_id().await.unwrap();
1✔
2424

2425
            let another_collection = db
1✔
2426
                .add_layer_collection(
1✔
2427
                    AddLayerCollection {
1✔
2428
                        name: "top collection".to_string(),
1✔
2429
                        description: "description".to_string(),
1✔
2430
                        properties: Default::default(),
1✔
2431
                    },
1✔
2432
                    root_collection,
1✔
2433
                )
1✔
2434
                .await
21✔
2435
                .unwrap();
1✔
2436

2437
            let layer_in_one_collection = db
1✔
2438
                .add_layer(
1✔
2439
                    AddLayer {
1✔
2440
                        name: "layer 1".to_string(),
1✔
2441
                        description: "description".to_string(),
1✔
2442
                        workflow: Workflow {
1✔
2443
                            operator: TypedOperator::Vector(
1✔
2444
                                MockPointSource {
1✔
2445
                                    params: MockPointSourceParams {
1✔
2446
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2447
                                    },
1✔
2448
                                }
1✔
2449
                                .boxed(),
1✔
2450
                            ),
1✔
2451
                        },
1✔
2452
                        symbology: None,
1✔
2453
                        metadata: Default::default(),
1✔
2454
                        properties: Default::default(),
1✔
2455
                    },
1✔
2456
                    &another_collection,
1✔
2457
                )
1✔
2458
                .await
45✔
2459
                .unwrap();
1✔
2460

2461
            let layer_in_two_collections = db
1✔
2462
                .add_layer(
1✔
2463
                    AddLayer {
1✔
2464
                        name: "layer 2".to_string(),
1✔
2465
                        description: "description".to_string(),
1✔
2466
                        workflow: Workflow {
1✔
2467
                            operator: TypedOperator::Vector(
1✔
2468
                                MockPointSource {
1✔
2469
                                    params: MockPointSourceParams {
1✔
2470
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
2471
                                    },
1✔
2472
                                }
1✔
2473
                                .boxed(),
1✔
2474
                            ),
1✔
2475
                        },
1✔
2476
                        symbology: None,
1✔
2477
                        metadata: Default::default(),
1✔
2478
                        properties: Default::default(),
1✔
2479
                    },
1✔
2480
                    &another_collection,
1✔
2481
                )
1✔
2482
                .await
14✔
2483
                .unwrap();
1✔
2484

1✔
2485
            db.add_layer_to_collection(&layer_in_two_collections, root_collection)
1✔
2486
                .await
6✔
2487
                .unwrap();
1✔
2488

1✔
2489
            // remove first layer --> should be deleted entirely
1✔
2490

1✔
2491
            db.remove_layer_from_collection(&layer_in_one_collection, &another_collection)
1✔
2492
                .await
10✔
2493
                .unwrap();
1✔
2494

2495
            let number_of_layer_in_collection = db
1✔
2496
                .load_layer_collection(
1✔
2497
                    &another_collection,
1✔
2498
                    LayerCollectionListOptions {
1✔
2499
                        offset: 0,
1✔
2500
                        limit: 20,
1✔
2501
                    },
1✔
2502
                )
1✔
2503
                .await
8✔
2504
                .unwrap()
1✔
2505
                .items
1✔
2506
                .len();
1✔
2507
            assert_eq!(
1✔
2508
                number_of_layer_in_collection,
1✔
2509
                1 /* only the other collection should be here */
1✔
2510
            );
1✔
2511

2512
            db.load_layer(&layer_in_one_collection).await.unwrap_err();
3✔
2513

1✔
2514
            // remove second layer --> should only be gone in collection
1✔
2515

1✔
2516
            db.remove_layer_from_collection(&layer_in_two_collections, &another_collection)
1✔
2517
                .await
10✔
2518
                .unwrap();
1✔
2519

2520
            let number_of_layer_in_collection = db
1✔
2521
                .load_layer_collection(
1✔
2522
                    &another_collection,
1✔
2523
                    LayerCollectionListOptions {
1✔
2524
                        offset: 0,
1✔
2525
                        limit: 20,
1✔
2526
                    },
1✔
2527
                )
1✔
2528
                .await
8✔
2529
                .unwrap()
1✔
2530
                .items
1✔
2531
                .len();
1✔
2532
            assert_eq!(
1✔
2533
                number_of_layer_in_collection,
1✔
2534
                0 /* both layers were deleted */
1✔
2535
            );
1✔
2536

2537
            db.load_layer(&layer_in_two_collections).await.unwrap();
6✔
2538
        })
1✔
2539
        .await;
11✔
2540
    }
2541

2542
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2543
    #[allow(clippy::too_many_lines)]
2544
    async fn it_deletes_dataset() {
1✔
2545
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2546
            let loading_info = OgrSourceDataset {
1✔
2547
                file_name: PathBuf::from("test.csv"),
1✔
2548
                layer_name: "test.csv".to_owned(),
1✔
2549
                data_type: Some(VectorDataType::MultiPoint),
1✔
2550
                time: OgrSourceDatasetTimeType::Start {
1✔
2551
                    start_field: "start".to_owned(),
1✔
2552
                    start_format: OgrSourceTimeFormat::Auto,
1✔
2553
                    duration: OgrSourceDurationSpec::Zero,
1✔
2554
                },
1✔
2555
                default_geometry: None,
1✔
2556
                columns: Some(OgrSourceColumnSpec {
1✔
2557
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
2558
                        header: CsvHeader::Auto,
1✔
2559
                    }),
1✔
2560
                    x: "x".to_owned(),
1✔
2561
                    y: None,
1✔
2562
                    int: vec![],
1✔
2563
                    float: vec![],
1✔
2564
                    text: vec![],
1✔
2565
                    bool: vec![],
1✔
2566
                    datetime: vec![],
1✔
2567
                    rename: None,
1✔
2568
                }),
1✔
2569
                force_ogr_time_filter: false,
1✔
2570
                force_ogr_spatial_filter: false,
1✔
2571
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2572
                sql_query: None,
1✔
2573
                attribute_query: None,
1✔
2574
                cache_ttl: CacheTtlSeconds::default(),
1✔
2575
            };
1✔
2576

1✔
2577
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
2578
                OgrSourceDataset,
1✔
2579
                VectorResultDescriptor,
1✔
2580
                VectorQueryRectangle,
1✔
2581
            > {
1✔
2582
                loading_info: loading_info.clone(),
1✔
2583
                result_descriptor: VectorResultDescriptor {
1✔
2584
                    data_type: VectorDataType::MultiPoint,
1✔
2585
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2586
                    columns: [(
1✔
2587
                        "foo".to_owned(),
1✔
2588
                        VectorColumnInfo {
1✔
2589
                            data_type: FeatureDataType::Float,
1✔
2590
                            measurement: Measurement::Unitless,
1✔
2591
                        },
1✔
2592
                    )]
1✔
2593
                    .into_iter()
1✔
2594
                    .collect(),
1✔
2595
                    time: None,
1✔
2596
                    bbox: None,
1✔
2597
                },
1✔
2598
                phantom: Default::default(),
1✔
2599
            });
1✔
2600

2601
            let session = app_ctx.create_anonymous_session().await.unwrap();
14✔
2602

1✔
2603
            let dataset_name = DatasetName::new(Some(session.user.id.to_string()), "my_dataset");
1✔
2604

1✔
2605
            let db = app_ctx.session_context(session.clone()).db();
1✔
2606
            let wrap = db.wrap_meta_data(meta_data);
1✔
2607
            let dataset_id = db
1✔
2608
                .add_dataset(
1✔
2609
                    AddDataset {
1✔
2610
                        name: Some(dataset_name),
1✔
2611
                        display_name: "Ogr Test".to_owned(),
1✔
2612
                        description: "desc".to_owned(),
1✔
2613
                        source_operator: "OgrSource".to_owned(),
1✔
2614
                        symbology: None,
1✔
2615
                        provenance: Some(vec![Provenance {
1✔
2616
                            citation: "citation".to_owned(),
1✔
2617
                            license: "license".to_owned(),
1✔
2618
                            uri: "uri".to_owned(),
1✔
2619
                        }]),
1✔
2620
                    },
1✔
2621
                    wrap,
1✔
2622
                )
1✔
2623
                .await
171✔
2624
                .unwrap()
1✔
2625
                .id;
2626

2627
            assert!(db.load_dataset(&dataset_id).await.is_ok());
4✔
2628

2629
            db.delete_dataset(dataset_id).await.unwrap();
11✔
2630

2631
            assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
2632
        })
1✔
2633
        .await;
12✔
2634
    }
2635

2636
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2637
    #[allow(clippy::too_many_lines)]
2638
    async fn it_deletes_admin_dataset() {
1✔
2639
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2640
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
2641

1✔
2642
            let loading_info = OgrSourceDataset {
1✔
2643
                file_name: PathBuf::from("test.csv"),
1✔
2644
                layer_name: "test.csv".to_owned(),
1✔
2645
                data_type: Some(VectorDataType::MultiPoint),
1✔
2646
                time: OgrSourceDatasetTimeType::Start {
1✔
2647
                    start_field: "start".to_owned(),
1✔
2648
                    start_format: OgrSourceTimeFormat::Auto,
1✔
2649
                    duration: OgrSourceDurationSpec::Zero,
1✔
2650
                },
1✔
2651
                default_geometry: None,
1✔
2652
                columns: Some(OgrSourceColumnSpec {
1✔
2653
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
2654
                        header: CsvHeader::Auto,
1✔
2655
                    }),
1✔
2656
                    x: "x".to_owned(),
1✔
2657
                    y: None,
1✔
2658
                    int: vec![],
1✔
2659
                    float: vec![],
1✔
2660
                    text: vec![],
1✔
2661
                    bool: vec![],
1✔
2662
                    datetime: vec![],
1✔
2663
                    rename: None,
1✔
2664
                }),
1✔
2665
                force_ogr_time_filter: false,
1✔
2666
                force_ogr_spatial_filter: false,
1✔
2667
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2668
                sql_query: None,
1✔
2669
                attribute_query: None,
1✔
2670
                cache_ttl: CacheTtlSeconds::default(),
1✔
2671
            };
1✔
2672

1✔
2673
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
2674
                OgrSourceDataset,
1✔
2675
                VectorResultDescriptor,
1✔
2676
                VectorQueryRectangle,
1✔
2677
            > {
1✔
2678
                loading_info: loading_info.clone(),
1✔
2679
                result_descriptor: VectorResultDescriptor {
1✔
2680
                    data_type: VectorDataType::MultiPoint,
1✔
2681
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2682
                    columns: [(
1✔
2683
                        "foo".to_owned(),
1✔
2684
                        VectorColumnInfo {
1✔
2685
                            data_type: FeatureDataType::Float,
1✔
2686
                            measurement: Measurement::Unitless,
1✔
2687
                        },
1✔
2688
                    )]
1✔
2689
                    .into_iter()
1✔
2690
                    .collect(),
1✔
2691
                    time: None,
1✔
2692
                    bbox: None,
1✔
2693
                },
1✔
2694
                phantom: Default::default(),
1✔
2695
            });
1✔
2696

2697
            let session = admin_login(&app_ctx).await;
1✔
2698

2699
            let db = app_ctx.session_context(session).db();
1✔
2700
            let wrap = db.wrap_meta_data(meta_data);
1✔
2701
            let dataset_id = db
1✔
2702
                .add_dataset(
1✔
2703
                    AddDataset {
1✔
2704
                        name: Some(dataset_name),
1✔
2705
                        display_name: "Ogr Test".to_owned(),
1✔
2706
                        description: "desc".to_owned(),
1✔
2707
                        source_operator: "OgrSource".to_owned(),
1✔
2708
                        symbology: None,
1✔
2709
                        provenance: Some(vec![Provenance {
1✔
2710
                            citation: "citation".to_owned(),
1✔
2711
                            license: "license".to_owned(),
1✔
2712
                            uri: "uri".to_owned(),
1✔
2713
                        }]),
1✔
2714
                    },
1✔
2715
                    wrap,
1✔
2716
                )
1✔
2717
                .await
87✔
2718
                .unwrap()
1✔
2719
                .id;
2720

2721
            assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
2722

2723
            db.delete_dataset(dataset_id).await.unwrap();
11✔
2724

2725
            assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
2726
        })
1✔
2727
        .await;
11✔
2728
    }
2729

2730
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2731
    async fn test_missing_layer_dataset_in_collection_listing() {
1✔
2732
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2733
            let session = admin_login(&app_ctx).await;
11✔
2734
            let db = app_ctx.session_context(session).db();
1✔
2735

2736
            let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
2737

2738
            let top_collection_id = db
1✔
2739
                .add_layer_collection(
1✔
2740
                    AddLayerCollection {
1✔
2741
                        name: "top collection".to_string(),
1✔
2742
                        description: "description".to_string(),
1✔
2743
                        properties: Default::default(),
1✔
2744
                    },
1✔
2745
                    root_collection_id,
1✔
2746
                )
1✔
2747
                .await
18✔
2748
                .unwrap();
1✔
2749

1✔
2750
            let faux_layer = LayerId("faux".to_string());
1✔
2751

1✔
2752
            // this should fail
1✔
2753
            db.add_layer_to_collection(&faux_layer, &top_collection_id)
1✔
2754
                .await
2✔
2755
                .unwrap_err();
1✔
2756

2757
            let root_collection_layers = db
1✔
2758
                .load_layer_collection(
1✔
2759
                    &top_collection_id,
1✔
2760
                    LayerCollectionListOptions {
1✔
2761
                        offset: 0,
1✔
2762
                        limit: 20,
1✔
2763
                    },
1✔
2764
                )
1✔
2765
                .await
6✔
2766
                .unwrap();
1✔
2767

1✔
2768
            assert_eq!(
1✔
2769
                root_collection_layers,
1✔
2770
                LayerCollection {
1✔
2771
                    id: ProviderLayerCollectionId {
1✔
2772
                        provider_id: DataProviderId(
1✔
2773
                            "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74".try_into().unwrap()
1✔
2774
                        ),
1✔
2775
                        collection_id: top_collection_id.clone(),
1✔
2776
                    },
1✔
2777
                    name: "top collection".to_string(),
1✔
2778
                    description: "description".to_string(),
1✔
2779
                    items: vec![],
1✔
2780
                    entry_label: None,
1✔
2781
                    properties: vec![],
1✔
2782
                }
1✔
2783
            );
1✔
2784
        })
1✔
2785
        .await;
12✔
2786
    }
2787

2788
    #[allow(clippy::too_many_lines)]
2789
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2790
    async fn it_restricts_layer_permissions() {
1✔
2791
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2792
            let admin_session = admin_login(&app_ctx).await;
11✔
2793
            let session1 = app_ctx.create_anonymous_session().await.unwrap();
15✔
2794

1✔
2795
            let admin_db = app_ctx.session_context(admin_session.clone()).db();
1✔
2796
            let db1 = app_ctx.session_context(session1.clone()).db();
1✔
2797

2798
            let root = admin_db.get_root_layer_collection_id().await.unwrap();
1✔
2799

2800
            // add new collection as admin
2801
            let new_collection_id = admin_db
1✔
2802
                .add_layer_collection(
1✔
2803
                    AddLayerCollection {
1✔
2804
                        name: "admin collection".to_string(),
1✔
2805
                        description: String::new(),
1✔
2806
                        properties: Default::default(),
1✔
2807
                    },
1✔
2808
                    &root,
1✔
2809
                )
1✔
2810
                .await
21✔
2811
                .unwrap();
1✔
2812

2813
            // load as regular user, not visible
2814
            let collection = db1
1✔
2815
                .load_layer_collection(
1✔
2816
                    &root,
1✔
2817
                    LayerCollectionListOptions {
1✔
2818
                        offset: 0,
1✔
2819
                        limit: 10,
1✔
2820
                    },
1✔
2821
                )
1✔
2822
                .await
8✔
2823
                .unwrap();
1✔
2824
            assert!(!collection.items.iter().any(|c| match c {
1✔
2825
                CollectionItem::Collection(c) => c.id.collection_id == new_collection_id,
1✔
2826
                CollectionItem::Layer(_) => false,
×
2827
            }));
1✔
2828

2829
            // give user read permission
2830
            admin_db
1✔
2831
                .add_permission(
1✔
2832
                    session1.user.id.into(),
1✔
2833
                    new_collection_id.clone(),
1✔
2834
                    Permission::Read,
1✔
2835
                )
1✔
2836
                .await
6✔
2837
                .unwrap();
1✔
2838

2839
            // now visible
2840
            let collection = db1
1✔
2841
                .load_layer_collection(
1✔
2842
                    &root,
1✔
2843
                    LayerCollectionListOptions {
1✔
2844
                        offset: 0,
1✔
2845
                        limit: 10,
1✔
2846
                    },
1✔
2847
                )
1✔
2848
                .await
8✔
2849
                .unwrap();
1✔
2850

1✔
2851
            assert!(collection.items.iter().any(|c| match c {
1✔
2852
                CollectionItem::Collection(c) => c.id.collection_id == new_collection_id,
1✔
2853
                CollectionItem::Layer(_) => false,
×
2854
            }));
1✔
2855

2856
            // add new layer in the collection as user, fails because only read permission
2857
            let result = db1
1✔
2858
                .add_layer_collection(
1✔
2859
                    AddLayerCollection {
1✔
2860
                        name: "user layer".to_string(),
1✔
2861
                        description: String::new(),
1✔
2862
                        properties: Default::default(),
1✔
2863
                    },
1✔
2864
                    &new_collection_id,
1✔
2865
                )
1✔
2866
                .await;
3✔
2867

2868
            assert!(result.is_err());
1✔
2869

2870
            // give user owner permission
2871
            admin_db
1✔
2872
                .add_permission(
1✔
2873
                    session1.user.id.into(),
1✔
2874
                    new_collection_id.clone(),
1✔
2875
                    Permission::Owner,
1✔
2876
                )
1✔
2877
                .await
6✔
2878
                .unwrap();
1✔
2879

1✔
2880
            // add now works
1✔
2881
            db1.add_layer_collection(
1✔
2882
                AddLayerCollection {
1✔
2883
                    name: "user layer".to_string(),
1✔
2884
                    description: String::new(),
1✔
2885
                    properties: Default::default(),
1✔
2886
                },
1✔
2887
                &new_collection_id,
1✔
2888
            )
1✔
2889
            .await
12✔
2890
            .unwrap();
1✔
2891

1✔
2892
            // remove permissions again
1✔
2893
            admin_db
1✔
2894
                .remove_permission(
1✔
2895
                    session1.user.id.into(),
1✔
2896
                    new_collection_id.clone(),
1✔
2897
                    Permission::Read,
1✔
2898
                )
1✔
2899
                .await
6✔
2900
                .unwrap();
1✔
2901
            admin_db
1✔
2902
                .remove_permission(
1✔
2903
                    session1.user.id.into(),
1✔
2904
                    new_collection_id.clone(),
1✔
2905
                    Permission::Owner,
1✔
2906
                )
1✔
2907
                .await
6✔
2908
                .unwrap();
1✔
2909

2910
            // access is gone now
2911
            let result = db1
1✔
2912
                .add_layer_collection(
1✔
2913
                    AddLayerCollection {
1✔
2914
                        name: "user layer".to_string(),
1✔
2915
                        description: String::new(),
1✔
2916
                        properties: Default::default(),
1✔
2917
                    },
1✔
2918
                    &root,
1✔
2919
                )
1✔
2920
                .await;
3✔
2921

2922
            assert!(result.is_err());
1✔
2923

2924
            let collection = db1
1✔
2925
                .load_layer_collection(
1✔
2926
                    &root,
1✔
2927
                    LayerCollectionListOptions {
1✔
2928
                        offset: 0,
1✔
2929
                        limit: 10,
1✔
2930
                    },
1✔
2931
                )
1✔
2932
                .await
8✔
2933
                .unwrap();
1✔
2934

1✔
2935
            assert!(!collection.items.iter().any(|c| match c {
1✔
2936
                CollectionItem::Collection(c) => c.id.collection_id == new_collection_id,
1✔
2937
                CollectionItem::Layer(_) => false,
×
2938
            }));
1✔
2939
        })
1✔
2940
        .await;
12✔
2941
    }
2942

2943
    #[allow(clippy::too_many_lines)]
2944
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2945
    async fn it_handles_user_roles() {
1✔
2946
        with_pro_temp_context(|app_ctx, _| async move {
1✔
2947
            let admin_session = admin_login(&app_ctx).await;
10✔
2948
            let user_id = app_ctx
1✔
2949
                .register_user(UserRegistration {
1✔
2950
                    email: "foo@example.com".to_string(),
1✔
2951
                    password: "secret123".to_string(),
1✔
2952
                    real_name: "Foo Bar".to_string(),
1✔
2953
                })
1✔
2954
                .await
9✔
2955
                .unwrap();
1✔
2956

1✔
2957
            let admin_db = app_ctx.session_context(admin_session.clone()).db();
1✔
2958

2959
            // create a new role
2960
            let role_id = admin_db.add_role("foo").await.unwrap();
3✔
2961

2962
            let user_session = app_ctx
1✔
2963
                .login(UserCredentials {
1✔
2964
                    email: "foo@example.com".to_string(),
1✔
2965
                    password: "secret123".to_string(),
1✔
2966
                })
1✔
2967
                .await
6✔
2968
                .unwrap();
1✔
2969

1✔
2970
            // user does not have the role yet
1✔
2971

1✔
2972
            assert!(!user_session.roles.contains(&role_id));
1✔
2973

2974
            //user can query their role descriptions (user role and registered user)
2975
            assert_eq!(user_session.roles.len(), 2);
1✔
2976

2977
            let expected_user_role_description = RoleDescription {
1✔
2978
                role: Role {
1✔
2979
                    id: RoleId::from(user_id),
1✔
2980
                    name: "foo@example.com".to_string(),
1✔
2981
                },
1✔
2982
                individual: true,
1✔
2983
            };
1✔
2984
            let expected_registered_role_description = RoleDescription {
1✔
2985
                role: Role {
1✔
2986
                    id: Role::registered_user_role_id(),
1✔
2987
                    name: "user".to_string(),
1✔
2988
                },
1✔
2989
                individual: false,
1✔
2990
            };
1✔
2991

2992
            let user_role_descriptions = app_ctx
1✔
2993
                .session_context(user_session.clone())
1✔
2994
                .db()
1✔
2995
                .get_role_descriptions(&user_id)
1✔
2996
                .await
×
2997
                .unwrap();
1✔
2998
            assert_eq!(
1✔
2999
                vec![
1✔
3000
                    expected_user_role_description.clone(),
1✔
3001
                    expected_registered_role_description.clone(),
1✔
3002
                ],
1✔
3003
                user_role_descriptions
1✔
3004
            );
1✔
3005

3006
            // we assign the role to the user
3007
            admin_db.assign_role(&role_id, &user_id).await.unwrap();
1✔
3008

3009
            let user_session = app_ctx
1✔
3010
                .login(UserCredentials {
1✔
3011
                    email: "foo@example.com".to_string(),
1✔
3012
                    password: "secret123".to_string(),
1✔
3013
                })
1✔
3014
                .await
7✔
3015
                .unwrap();
1✔
3016

1✔
3017
            // should be present now
1✔
3018
            assert!(user_session.roles.contains(&role_id));
1✔
3019

3020
            //user can query their role descriptions (now an additional foo role)
3021
            let expected_foo_role_description = RoleDescription {
1✔
3022
                role: Role {
1✔
3023
                    id: role_id,
1✔
3024
                    name: "foo".to_string(),
1✔
3025
                },
1✔
3026
                individual: false,
1✔
3027
            };
1✔
3028

3029
            let user_role_descriptions = app_ctx
1✔
3030
                .session_context(user_session.clone())
1✔
3031
                .db()
1✔
3032
                .get_role_descriptions(&user_id)
1✔
3033
                .await
×
3034
                .unwrap();
1✔
3035
            assert_eq!(
1✔
3036
                vec![
1✔
3037
                    expected_foo_role_description,
1✔
3038
                    expected_user_role_description.clone(),
1✔
3039
                    expected_registered_role_description.clone(),
1✔
3040
                ],
1✔
3041
                user_role_descriptions
1✔
3042
            );
1✔
3043

3044
            // we revoke it
3045
            admin_db.revoke_role(&role_id, &user_id).await.unwrap();
1✔
3046

3047
            let user_session = app_ctx
1✔
3048
                .login(UserCredentials {
1✔
3049
                    email: "foo@example.com".to_string(),
1✔
3050
                    password: "secret123".to_string(),
1✔
3051
                })
1✔
3052
                .await
6✔
3053
                .unwrap();
1✔
3054

1✔
3055
            // the role is gone now
1✔
3056
            assert!(!user_session.roles.contains(&role_id));
1✔
3057

3058
            //user can query their role descriptions (user role and registered user)
3059
            let user_role_descriptions = app_ctx
1✔
3060
                .session_context(user_session.clone())
1✔
3061
                .db()
1✔
3062
                .get_role_descriptions(&user_id)
1✔
3063
                .await
3✔
3064
                .unwrap();
1✔
3065
            assert_eq!(
1✔
3066
                vec![
1✔
3067
                    expected_user_role_description.clone(),
1✔
3068
                    expected_registered_role_description.clone(),
1✔
3069
                ],
1✔
3070
                user_role_descriptions
1✔
3071
            );
1✔
3072

3073
            // assign it again and then delete the whole role, should not be present at user
3074

3075
            admin_db.assign_role(&role_id, &user_id).await.unwrap();
3✔
3076

1✔
3077
            admin_db.remove_role(&role_id).await.unwrap();
3✔
3078

3079
            let user_session = app_ctx
1✔
3080
                .login(UserCredentials {
1✔
3081
                    email: "foo@example.com".to_string(),
1✔
3082
                    password: "secret123".to_string(),
1✔
3083
                })
1✔
3084
                .await
11✔
3085
                .unwrap();
1✔
3086

1✔
3087
            assert!(!user_session.roles.contains(&role_id));
1✔
3088

3089
            //user can query their role descriptions (user role and registered user)
3090
            let user_role_descriptions = app_ctx
1✔
3091
                .session_context(user_session.clone())
1✔
3092
                .db()
1✔
3093
                .get_role_descriptions(&user_id)
1✔
3094
                .await
2✔
3095
                .unwrap();
1✔
3096
            assert_eq!(
1✔
3097
                vec![
1✔
3098
                    expected_user_role_description,
1✔
3099
                    expected_registered_role_description.clone(),
1✔
3100
                ],
1✔
3101
                user_role_descriptions
1✔
3102
            );
1✔
3103
        })
1✔
3104
        .await;
9✔
3105
    }
3106

3107
    #[allow(clippy::too_many_lines)]
3108
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
3109
    async fn it_updates_project_layer_symbology() {
1✔
3110
        with_pro_temp_context(|app_ctx, _| async move {
1✔
3111
            let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
3112

3113
            let (_, workflow_id) = register_ndvi_workflow_helper(&app_ctx).await;
183✔
3114

3115
            let db = app_ctx.session_context(session.clone()).db();
1✔
3116

1✔
3117
            let create_project: CreateProject = serde_json::from_value(json!({
1✔
3118
                "name": "Default",
1✔
3119
                "description": "Default project",
1✔
3120
                "bounds": {
1✔
3121
                    "boundingBox": {
1✔
3122
                        "lowerLeftCoordinate": {
1✔
3123
                            "x": -180,
1✔
3124
                            "y": -90
1✔
3125
                        },
1✔
3126
                        "upperRightCoordinate": {
1✔
3127
                            "x": 180,
1✔
3128
                            "y": 90
1✔
3129
                        }
1✔
3130
                    },
1✔
3131
                    "spatialReference": "EPSG:4326",
1✔
3132
                    "timeInterval": {
1✔
3133
                        "start": 1_396_353_600_000i64,
1✔
3134
                        "end": 1_396_353_600_000i64
1✔
3135
                    }
1✔
3136
                },
1✔
3137
                "timeStep": {
1✔
3138
                    "step": 1,
1✔
3139
                    "granularity": "months"
1✔
3140
                }
1✔
3141
            }))
1✔
3142
            .unwrap();
1✔
3143

3144
            let project_id = db.create_project(create_project).await.unwrap();
13✔
3145

1✔
3146
            let update: UpdateProject = serde_json::from_value(json!({
1✔
3147
                "id": project_id.to_string(),
1✔
3148
                "layers": [{
1✔
3149
                    "name": "NDVI",
1✔
3150
                    "workflow": workflow_id.to_string(),
1✔
3151
                    "visibility": {
1✔
3152
                        "data": true,
1✔
3153
                        "legend": false
1✔
3154
                    },
1✔
3155
                    "symbology": {
1✔
3156
                        "type": "raster",
1✔
3157
                        "opacity": 1,
1✔
3158
                        "colorizer": {
1✔
3159
                            "type": "linearGradient",
1✔
3160
                            "breakpoints": [{
1✔
3161
                                "value": 1,
1✔
3162
                                "color": [0, 0, 0, 255]
1✔
3163
                            }, {
1✔
3164
                                "value": 255,
1✔
3165
                                "color": [255, 255, 255, 255]
1✔
3166
                            }],
1✔
3167
                            "noDataColor": [0, 0, 0, 0],
1✔
3168
                            "overColor": [255, 255, 255, 127],
1✔
3169
                            "underColor": [255, 255, 255, 127]
1✔
3170
                        }
1✔
3171
                    }
1✔
3172
                }]
1✔
3173
            }))
1✔
3174
            .unwrap();
1✔
3175

1✔
3176
            db.update_project(update).await.unwrap();
81✔
3177

1✔
3178
            let update: UpdateProject = serde_json::from_value(json!({
1✔
3179
                "id": project_id.to_string(),
1✔
3180
                "layers": [{
1✔
3181
                    "name": "NDVI",
1✔
3182
                    "workflow": workflow_id.to_string(),
1✔
3183
                    "visibility": {
1✔
3184
                        "data": true,
1✔
3185
                        "legend": false
1✔
3186
                    },
1✔
3187
                    "symbology": {
1✔
3188
                        "type": "raster",
1✔
3189
                        "opacity": 1,
1✔
3190
                        "colorizer": {
1✔
3191
                            "type": "linearGradient",
1✔
3192
                            "breakpoints": [{
1✔
3193
                                "value": 1,
1✔
3194
                                "color": [0, 0, 4, 255]
1✔
3195
                            }, {
1✔
3196
                                "value": 17.866_666_666_666_667,
1✔
3197
                                "color": [11, 9, 36, 255]
1✔
3198
                            }, {
1✔
3199
                                "value": 34.733_333_333_333_334,
1✔
3200
                                "color": [32, 17, 75, 255]
1✔
3201
                            }, {
1✔
3202
                                "value": 51.6,
1✔
3203
                                "color": [59, 15, 112, 255]
1✔
3204
                            }, {
1✔
3205
                                "value": 68.466_666_666_666_67,
1✔
3206
                                "color": [87, 21, 126, 255]
1✔
3207
                            }, {
1✔
3208
                                "value": 85.333_333_333_333_33,
1✔
3209
                                "color": [114, 31, 129, 255]
1✔
3210
                            }, {
1✔
3211
                                "value": 102.199_999_999_999_99,
1✔
3212
                                "color": [140, 41, 129, 255]
1✔
3213
                            }, {
1✔
3214
                                "value": 119.066_666_666_666_65,
1✔
3215
                                "color": [168, 50, 125, 255]
1✔
3216
                            }, {
1✔
3217
                                "value": 135.933_333_333_333_34,
1✔
3218
                                "color": [196, 60, 117, 255]
1✔
3219
                            }, {
1✔
3220
                                "value": 152.799_999_999_999_98,
1✔
3221
                                "color": [222, 73, 104, 255]
1✔
3222
                            }, {
1✔
3223
                                "value": 169.666_666_666_666_66,
1✔
3224
                                "color": [241, 96, 93, 255]
1✔
3225
                            }, {
1✔
3226
                                "value": 186.533_333_333_333_33,
1✔
3227
                                "color": [250, 127, 94, 255]
1✔
3228
                            }, {
1✔
3229
                                "value": 203.399_999_999_999_98,
1✔
3230
                                "color": [254, 159, 109, 255]
1✔
3231
                            }, {
1✔
3232
                                "value": 220.266_666_666_666_65,
1✔
3233
                                "color": [254, 191, 132, 255]
1✔
3234
                            }, {
1✔
3235
                                "value": 237.133_333_333_333_3,
1✔
3236
                                "color": [253, 222, 160, 255]
1✔
3237
                            }, {
1✔
3238
                                "value": 254,
1✔
3239
                                "color": [252, 253, 191, 255]
1✔
3240
                            }],
1✔
3241
                            "noDataColor": [0, 0, 0, 0],
1✔
3242
                            "overColor": [255, 255, 255, 127],
1✔
3243
                            "underColor": [255, 255, 255, 127]
1✔
3244
                        }
1✔
3245
                    }
1✔
3246
                }]
1✔
3247
            }))
1✔
3248
            .unwrap();
1✔
3249

1✔
3250
            db.update_project(update).await.unwrap();
69✔
3251

1✔
3252
            let update: UpdateProject = serde_json::from_value(json!({
1✔
3253
                "id": project_id.to_string(),
1✔
3254
                "layers": [{
1✔
3255
                    "name": "NDVI",
1✔
3256
                    "workflow": workflow_id.to_string(),
1✔
3257
                    "visibility": {
1✔
3258
                        "data": true,
1✔
3259
                        "legend": false
1✔
3260
                    },
1✔
3261
                    "symbology": {
1✔
3262
                        "type": "raster",
1✔
3263
                        "opacity": 1,
1✔
3264
                        "colorizer": {
1✔
3265
                            "type": "linearGradient",
1✔
3266
                            "breakpoints": [{
1✔
3267
                                "value": 1,
1✔
3268
                                "color": [0, 0, 4, 255]
1✔
3269
                            }, {
1✔
3270
                                "value": 17.866_666_666_666_667,
1✔
3271
                                "color": [11, 9, 36, 255]
1✔
3272
                            }, {
1✔
3273
                                "value": 34.733_333_333_333_334,
1✔
3274
                                "color": [32, 17, 75, 255]
1✔
3275
                            }, {
1✔
3276
                                "value": 51.6,
1✔
3277
                                "color": [59, 15, 112, 255]
1✔
3278
                            }, {
1✔
3279
                                "value": 68.466_666_666_666_67,
1✔
3280
                                "color": [87, 21, 126, 255]
1✔
3281
                            }, {
1✔
3282
                                "value": 85.333_333_333_333_33,
1✔
3283
                                "color": [114, 31, 129, 255]
1✔
3284
                            }, {
1✔
3285
                                "value": 102.199_999_999_999_99,
1✔
3286
                                "color": [140, 41, 129, 255]
1✔
3287
                            }, {
1✔
3288
                                "value": 119.066_666_666_666_65,
1✔
3289
                                "color": [168, 50, 125, 255]
1✔
3290
                            }, {
1✔
3291
                                "value": 135.933_333_333_333_34,
1✔
3292
                                "color": [196, 60, 117, 255]
1✔
3293
                            }, {
1✔
3294
                                "value": 152.799_999_999_999_98,
1✔
3295
                                "color": [222, 73, 104, 255]
1✔
3296
                            }, {
1✔
3297
                                "value": 169.666_666_666_666_66,
1✔
3298
                                "color": [241, 96, 93, 255]
1✔
3299
                            }, {
1✔
3300
                                "value": 186.533_333_333_333_33,
1✔
3301
                                "color": [250, 127, 94, 255]
1✔
3302
                            }, {
1✔
3303
                                "value": 203.399_999_999_999_98,
1✔
3304
                                "color": [254, 159, 109, 255]
1✔
3305
                            }, {
1✔
3306
                                "value": 220.266_666_666_666_65,
1✔
3307
                                "color": [254, 191, 132, 255]
1✔
3308
                            }, {
1✔
3309
                                "value": 237.133_333_333_333_3,
1✔
3310
                                "color": [253, 222, 160, 255]
1✔
3311
                            }, {
1✔
3312
                                "value": 254,
1✔
3313
                                "color": [252, 253, 191, 255]
1✔
3314
                            }],
1✔
3315
                            "noDataColor": [0, 0, 0, 0],
1✔
3316
                            "overColor": [255, 255, 255, 127],
1✔
3317
                            "underColor": [255, 255, 255, 127]
1✔
3318
                        }
1✔
3319
                    }
1✔
3320
                }]
1✔
3321
            }))
1✔
3322
            .unwrap();
1✔
3323

1✔
3324
            db.update_project(update).await.unwrap();
23✔
3325

1✔
3326
            let update: UpdateProject = serde_json::from_value(json!({
1✔
3327
                "id": project_id.to_string(),
1✔
3328
                "layers": [{
1✔
3329
                    "name": "NDVI",
1✔
3330
                    "workflow": workflow_id.to_string(),
1✔
3331
                    "visibility": {
1✔
3332
                        "data": true,
1✔
3333
                        "legend": false
1✔
3334
                    },
1✔
3335
                    "symbology": {
1✔
3336
                        "type": "raster",
1✔
3337
                        "opacity": 1,
1✔
3338
                        "colorizer": {
1✔
3339
                            "type": "linearGradient",
1✔
3340
                            "breakpoints": [{
1✔
3341
                                "value": 1,
1✔
3342
                                "color": [0, 0, 4, 255]
1✔
3343
                            }, {
1✔
3344
                                "value": 17.933_333_333_333_334,
1✔
3345
                                "color": [11, 9, 36, 255]
1✔
3346
                            }, {
1✔
3347
                                "value": 34.866_666_666_666_67,
1✔
3348
                                "color": [32, 17, 75, 255]
1✔
3349
                            }, {
1✔
3350
                                "value": 51.800_000_000_000_004,
1✔
3351
                                "color": [59, 15, 112, 255]
1✔
3352
                            }, {
1✔
3353
                                "value": 68.733_333_333_333_33,
1✔
3354
                                "color": [87, 21, 126, 255]
1✔
3355
                            }, {
1✔
3356
                                "value": 85.666_666_666_666_66,
1✔
3357
                                "color": [114, 31, 129, 255]
1✔
3358
                            }, {
1✔
3359
                                "value": 102.6,
1✔
3360
                                "color": [140, 41, 129, 255]
1✔
3361
                            }, {
1✔
3362
                                "value": 119.533_333_333_333_32,
1✔
3363
                                "color": [168, 50, 125, 255]
1✔
3364
                            }, {
1✔
3365
                                "value": 136.466_666_666_666_67,
1✔
3366
                                "color": [196, 60, 117, 255]
1✔
3367
                            }, {
1✔
3368
                                "value": 153.4,
1✔
3369
                                "color": [222, 73, 104, 255]
1✔
3370
                            }, {
1✔
3371
                                "value": 170.333_333_333_333_31,
1✔
3372
                                "color": [241, 96, 93, 255]
1✔
3373
                            }, {
1✔
3374
                                "value": 187.266_666_666_666_65,
1✔
3375
                                "color": [250, 127, 94, 255]
1✔
3376
                            }, {
1✔
3377
                                "value": 204.2,
1✔
3378
                                "color": [254, 159, 109, 255]
1✔
3379
                            }, {
1✔
3380
                                "value": 221.133_333_333_333_33,
1✔
3381
                                "color": [254, 191, 132, 255]
1✔
3382
                            }, {
1✔
3383
                                "value": 238.066_666_666_666_63,
1✔
3384
                                "color": [253, 222, 160, 255]
1✔
3385
                            }, {
1✔
3386
                                "value": 255,
1✔
3387
                                "color": [252, 253, 191, 255]
1✔
3388
                            }],
1✔
3389
                            "noDataColor": [0, 0, 0, 0],
1✔
3390
                            "overColor": [255, 255, 255, 127],
1✔
3391
                            "underColor": [255, 255, 255, 127]
1✔
3392
                        }
1✔
3393
                    }
1✔
3394
                }]
1✔
3395
            }))
1✔
3396
            .unwrap();
1✔
3397

1✔
3398
            let update = update;
1✔
3399

3400
            // run two updates concurrently
3401
            let (r0, r1) = join!(db.update_project(update.clone()), db.update_project(update));
1✔
3402

3403
            assert!(r0.is_ok());
1✔
3404
            assert!(r1.is_ok());
1✔
3405
        })
1✔
3406
        .await;
9✔
3407
    }
3408

3409
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
3410
    #[allow(clippy::too_many_lines)]
3411
    async fn it_resolves_dataset_names_to_ids() {
1✔
3412
        with_pro_temp_context(|app_ctx, _| async move {
1✔
3413
            let admin_session = UserSession::admin_session();
1✔
3414
            let db = app_ctx.session_context(admin_session.clone()).db();
1✔
3415

1✔
3416
            let loading_info = OgrSourceDataset {
1✔
3417
                file_name: PathBuf::from("test.csv"),
1✔
3418
                layer_name: "test.csv".to_owned(),
1✔
3419
                data_type: Some(VectorDataType::MultiPoint),
1✔
3420
                time: OgrSourceDatasetTimeType::Start {
1✔
3421
                    start_field: "start".to_owned(),
1✔
3422
                    start_format: OgrSourceTimeFormat::Auto,
1✔
3423
                    duration: OgrSourceDurationSpec::Zero,
1✔
3424
                },
1✔
3425
                default_geometry: None,
1✔
3426
                columns: Some(OgrSourceColumnSpec {
1✔
3427
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
3428
                        header: CsvHeader::Auto,
1✔
3429
                    }),
1✔
3430
                    x: "x".to_owned(),
1✔
3431
                    y: None,
1✔
3432
                    int: vec![],
1✔
3433
                    float: vec![],
1✔
3434
                    text: vec![],
1✔
3435
                    bool: vec![],
1✔
3436
                    datetime: vec![],
1✔
3437
                    rename: None,
1✔
3438
                }),
1✔
3439
                force_ogr_time_filter: false,
1✔
3440
                force_ogr_spatial_filter: false,
1✔
3441
                on_error: OgrSourceErrorSpec::Ignore,
1✔
3442
                sql_query: None,
1✔
3443
                attribute_query: None,
1✔
3444
                cache_ttl: CacheTtlSeconds::default(),
1✔
3445
            };
1✔
3446

1✔
3447
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
3448
                OgrSourceDataset,
1✔
3449
                VectorResultDescriptor,
1✔
3450
                VectorQueryRectangle,
1✔
3451
            > {
1✔
3452
                loading_info: loading_info.clone(),
1✔
3453
                result_descriptor: VectorResultDescriptor {
1✔
3454
                    data_type: VectorDataType::MultiPoint,
1✔
3455
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
3456
                    columns: [(
1✔
3457
                        "foo".to_owned(),
1✔
3458
                        VectorColumnInfo {
1✔
3459
                            data_type: FeatureDataType::Float,
1✔
3460
                            measurement: Measurement::Unitless,
1✔
3461
                        },
1✔
3462
                    )]
1✔
3463
                    .into_iter()
1✔
3464
                    .collect(),
1✔
3465
                    time: None,
1✔
3466
                    bbox: None,
1✔
3467
                },
1✔
3468
                phantom: Default::default(),
1✔
3469
            });
1✔
3470

3471
            let DatasetIdAndName {
3472
                id: dataset_id1,
1✔
3473
                name: dataset_name1,
1✔
3474
            } = db
1✔
3475
                .add_dataset(
1✔
3476
                    AddDataset {
1✔
3477
                        name: Some(DatasetName::new(None, "my_dataset".to_owned())),
1✔
3478
                        display_name: "Ogr Test".to_owned(),
1✔
3479
                        description: "desc".to_owned(),
1✔
3480
                        source_operator: "OgrSource".to_owned(),
1✔
3481
                        symbology: None,
1✔
3482
                        provenance: Some(vec![Provenance {
1✔
3483
                            citation: "citation".to_owned(),
1✔
3484
                            license: "license".to_owned(),
1✔
3485
                            uri: "uri".to_owned(),
1✔
3486
                        }]),
1✔
3487
                    },
1✔
3488
                    db.wrap_meta_data(meta_data.clone()),
1✔
3489
                )
1✔
3490
                .await
167✔
3491
                .unwrap();
1✔
3492

3493
            let DatasetIdAndName {
3494
                id: dataset_id2,
1✔
3495
                name: dataset_name2,
1✔
3496
            } = db
1✔
3497
                .add_dataset(
1✔
3498
                    AddDataset {
1✔
3499
                        name: Some(DatasetName::new(
1✔
3500
                            Some(admin_session.user.id.to_string()),
1✔
3501
                            "my_dataset".to_owned(),
1✔
3502
                        )),
1✔
3503
                        display_name: "Ogr Test".to_owned(),
1✔
3504
                        description: "desc".to_owned(),
1✔
3505
                        source_operator: "OgrSource".to_owned(),
1✔
3506
                        symbology: None,
1✔
3507
                        provenance: Some(vec![Provenance {
1✔
3508
                            citation: "citation".to_owned(),
1✔
3509
                            license: "license".to_owned(),
1✔
3510
                            uri: "uri".to_owned(),
1✔
3511
                        }]),
1✔
3512
                    },
1✔
3513
                    db.wrap_meta_data(meta_data),
1✔
3514
                )
1✔
3515
                .await
7✔
3516
                .unwrap();
1✔
3517

3518
            assert_eq!(
1✔
3519
                db.resolve_dataset_name_to_id(&dataset_name1).await.unwrap(),
3✔
3520
                dataset_id1
3521
            );
3522
            assert_eq!(
1✔
3523
                db.resolve_dataset_name_to_id(&dataset_name2).await.unwrap(),
3✔
3524
                dataset_id2
3525
            );
3526
        })
1✔
3527
        .await;
11✔
3528
    }
3529

3530
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
3531
    #[allow(clippy::too_many_lines)]
3532
    async fn it_bulk_updates_quota() {
1✔
3533
        with_pro_temp_context(|app_ctx, _| async move {
1✔
3534
            let admin_session = UserSession::admin_session();
1✔
3535
            let db = app_ctx.session_context(admin_session.clone()).db();
1✔
3536

3537
            let user1 = app_ctx
1✔
3538
                .register_user(UserRegistration {
1✔
3539
                    email: "user1@example.com".into(),
1✔
3540
                    password: "12345678".into(),
1✔
3541
                    real_name: "User1".into(),
1✔
3542
                })
1✔
3543
                .await
10✔
3544
                .unwrap();
1✔
3545

3546
            let user2 = app_ctx
1✔
3547
                .register_user(UserRegistration {
1✔
3548
                    email: "user2@example.com".into(),
1✔
3549
                    password: "12345678".into(),
1✔
3550
                    real_name: "User2".into(),
1✔
3551
                })
1✔
3552
                .await
9✔
3553
                .unwrap();
1✔
3554

1✔
3555
            // single item in bulk
1✔
3556
            db.bulk_increment_quota_used([(user1, 1)]).await.unwrap();
2✔
3557

3558
            assert_eq!(db.quota_used_by_user(&user1).await.unwrap(), 1);
3✔
3559

3560
            // multiple items in bulk
3561
            db.bulk_increment_quota_used([(user1, 1), (user2, 3)])
1✔
3562
                .await
3✔
3563
                .unwrap();
1✔
3564

3565
            assert_eq!(db.quota_used_by_user(&user1).await.unwrap(), 2);
3✔
3566
            assert_eq!(db.quota_used_by_user(&user2).await.unwrap(), 3);
3✔
3567
        })
1✔
3568
        .await;
11✔
3569
    }
3570
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc