• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 5751943360

03 Aug 2023 02:19PM UTC coverage: 89.422% (+0.4%) from 88.974%
5751943360

push

github

web-flow
Merge pull request #840 from geo-engine/remove_in_memory

Remove in memory contexts and dbs

5338 of 5338 new or added lines in 37 files covered. (100.0%)

103772 of 116048 relevant lines covered (89.42%)

62390.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.21
/services/src/contexts/postgres.rs
1
use crate::api::model::datatypes::DatasetName;
2
use crate::contexts::{ApplicationContext, QueryContextImpl, SessionId, SimpleSession};
3
use crate::contexts::{GeoEngineDb, SessionContext};
4
use crate::datasets::add_from_directory::{
5
    add_datasets_from_directory, add_providers_from_directory,
6
};
7
use crate::datasets::upload::{Volume, Volumes};
8
use crate::error::{self, Error, Result};
9
use crate::layers::add_from_directory::{
10
    add_layer_collections_from_directory, add_layers_from_directory, UNSORTED_COLLECTION_ID,
11
};
12
use crate::layers::storage::INTERNAL_LAYER_DB_ROOT_COLLECTION_ID;
13

14
use crate::projects::{ProjectId, STRectangle};
15
use crate::tasks::{SimpleTaskManager, SimpleTaskManagerBackend, SimpleTaskManagerContext};
16
use crate::util::config::get_config_element;
17
use async_trait::async_trait;
18
use bb8_postgres::{
19
    bb8::Pool,
20
    bb8::PooledConnection,
21
    tokio_postgres::{error::SqlState, tls::MakeTlsConnect, tls::TlsConnect, Config, Socket},
22
    PostgresConnectionManager,
23
};
24
use geoengine_datatypes::raster::TilingSpecification;
25
use geoengine_operators::engine::ChunkByteSize;
26
use geoengine_operators::util::create_rayon_thread_pool;
27
use log::{debug, info};
28
use rayon::ThreadPool;
29
use std::path::PathBuf;
30
use std::sync::Arc;
31

32
use super::{ExecutionContextImpl, Session, SimpleApplicationContext};
33

34
// TODO: distinguish user-facing errors from system-facing error messages
35

36
/// A context with references to Postgres backends of the database.
37
#[derive(Clone)]
619✔
38
pub struct PostgresContext<Tls>
39
where
40
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
41
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
42
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
43
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
44
{
45
    default_session_id: SessionId,
46
    thread_pool: Arc<ThreadPool>,
47
    exe_ctx_tiling_spec: TilingSpecification,
48
    query_ctx_chunk_size: ChunkByteSize,
49
    task_manager: Arc<SimpleTaskManagerBackend>,
50
    pool: Pool<PostgresConnectionManager<Tls>>,
51
    volumes: Volumes,
52
}
53

54
enum DatabaseStatus {
55
    Unitialized,
56
    InitializedClearDatabase,
57
    InitializedKeepDatabase,
58
}
59

60
impl<Tls> PostgresContext<Tls>
61
where
62
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
63
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
64
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
65
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
66
{
67
    pub async fn new_with_context_spec(
228✔
68
        config: Config,
228✔
69
        tls: Tls,
228✔
70
        exe_ctx_tiling_spec: TilingSpecification,
228✔
71
        query_ctx_chunk_size: ChunkByteSize,
228✔
72
    ) -> Result<Self> {
228✔
73
        let pg_mgr = PostgresConnectionManager::new(config, tls);
228✔
74

75
        let pool = Pool::builder().build(pg_mgr).await?;
228✔
76
        let created_schema = Self::create_schema(pool.get().await?).await?;
2,736✔
77

78
        let session = if created_schema {
228✔
79
            let session = SimpleSession::default();
228✔
80
            Self::create_default_session(pool.get().await?, session.id()).await?;
456✔
81
            session
228✔
82
        } else {
83
            Self::load_default_session(pool.get().await?).await?
×
84
        };
85

86
        Ok(PostgresContext {
228✔
87
            default_session_id: session.id(),
228✔
88
            task_manager: Default::default(),
228✔
89
            thread_pool: create_rayon_thread_pool(0),
228✔
90
            exe_ctx_tiling_spec,
228✔
91
            query_ctx_chunk_size,
228✔
92
            pool,
228✔
93
            volumes: Default::default(),
228✔
94
        })
228✔
95
    }
228✔
96

97
    // TODO: check if the datasets exist already and don't output warnings when skipping them
98
    #[allow(clippy::too_many_arguments)]
99
    pub async fn new_with_data(
×
100
        config: Config,
×
101
        tls: Tls,
×
102
        dataset_defs_path: PathBuf,
×
103
        provider_defs_path: PathBuf,
×
104
        layer_defs_path: PathBuf,
×
105
        layer_collection_defs_path: PathBuf,
×
106
        exe_ctx_tiling_spec: TilingSpecification,
×
107
        query_ctx_chunk_size: ChunkByteSize,
×
108
    ) -> Result<Self> {
×
109
        let pg_mgr = PostgresConnectionManager::new(config, tls);
×
110

111
        let pool = Pool::builder().build(pg_mgr).await?;
×
112
        let created_schema = Self::create_schema(pool.get().await?).await?;
×
113

114
        let session = if created_schema {
×
115
            let session = SimpleSession::default();
×
116
            Self::create_default_session(pool.get().await?, session.id()).await?;
×
117
            session
×
118
        } else {
119
            Self::load_default_session(pool.get().await?).await?
×
120
        };
121

122
        let app_ctx = PostgresContext {
×
123
            default_session_id: session.id(),
×
124
            task_manager: Default::default(),
×
125
            thread_pool: create_rayon_thread_pool(0),
×
126
            exe_ctx_tiling_spec,
×
127
            query_ctx_chunk_size,
×
128
            pool,
×
129
            volumes: Default::default(),
×
130
        };
×
131

×
132
        if created_schema {
×
133
            info!("Populating database with initial data...");
×
134

135
            let ctx = app_ctx.session_context(session);
×
136

×
137
            let mut db = ctx.db();
×
138
            add_layers_from_directory(&mut db, layer_defs_path).await;
×
139
            add_layer_collections_from_directory(&mut db, layer_collection_defs_path).await;
×
140

141
            add_datasets_from_directory(&mut db, dataset_defs_path).await;
×
142

143
            add_providers_from_directory(&mut db, provider_defs_path, &[]).await;
×
144
        }
×
145

146
        Ok(app_ctx)
×
147
    }
×
148

149
    async fn check_schema_status(
319✔
150
        conn: &PooledConnection<'_, PostgresConnectionManager<Tls>>,
319✔
151
    ) -> Result<DatabaseStatus> {
319✔
152
        let stmt = match conn
319✔
153
            .prepare("SELECT clear_database_on_start from geoengine;")
319✔
154
            .await
319✔
155
        {
156
            Ok(stmt) => stmt,
×
157
            Err(e) => {
319✔
158
                if let Some(code) = e.code() {
319✔
159
                    if *code == SqlState::UNDEFINED_TABLE {
319✔
160
                        info!("Initializing schema.");
×
161
                        return Ok(DatabaseStatus::Unitialized);
319✔
162
                    }
×
163
                }
×
164
                return Err(error::Error::TokioPostgres { source: e });
×
165
            }
166
        };
167

168
        let row = conn.query_one(&stmt, &[]).await?;
×
169

170
        if row.get(0) {
×
171
            Ok(DatabaseStatus::InitializedClearDatabase)
×
172
        } else {
173
            Ok(DatabaseStatus::InitializedKeepDatabase)
×
174
        }
175
    }
319✔
176

177
    #[allow(clippy::too_many_lines)]
178
    /// Creates the database schema. Returns true if the schema was created, false if it already existed.
179
    pub(crate) async fn create_schema(
319✔
180
        mut conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
319✔
181
    ) -> Result<bool> {
319✔
182
        let postgres_config = get_config_element::<crate::util::config::Postgres>()?;
319✔
183

184
        let database_status = Self::check_schema_status(&conn).await?;
319✔
185

186
        match database_status {
×
187
            DatabaseStatus::InitializedClearDatabase if postgres_config.clear_database_on_start => {
×
188
                let schema_name = postgres_config.schema;
×
189
                info!("Clearing schema {}.", schema_name);
×
190
                conn.batch_execute(&format!(
×
191
                    "DROP SCHEMA {schema_name} CASCADE; CREATE SCHEMA {schema_name};"
×
192
                ))
×
193
                .await?;
×
194
            }
195
            DatabaseStatus::InitializedKeepDatabase if postgres_config.clear_database_on_start => {
×
196
                return Err(Error::ClearDatabaseOnStartupNotAllowed)
×
197
            }
198
            DatabaseStatus::InitializedClearDatabase | DatabaseStatus::InitializedKeepDatabase => {
199
                return Ok(false)
×
200
            }
201
            DatabaseStatus::Unitialized => (),
319✔
202
        };
203

204
        let tx = conn.build_transaction().start().await?;
319✔
205

206
        tx.batch_execute(include_str!("schema.sql")).await?;
319✔
207

208
        let stmt = tx
319✔
209
            .prepare(
319✔
210
                "
319✔
211
            INSERT INTO geoengine (clear_database_on_start) VALUES ($1);",
319✔
212
            )
319✔
213
            .await?;
319✔
214

215
        tx.execute(&stmt, &[&postgres_config.clear_database_on_start])
319✔
216
            .await?;
319✔
217

218
        let stmt = tx
319✔
219
            .prepare(
319✔
220
                r#"
319✔
221
            INSERT INTO layer_collections (
319✔
222
                id,
319✔
223
                name,
319✔
224
                description,
319✔
225
                properties
319✔
226
            ) VALUES (
319✔
227
                $1,
319✔
228
                'Layers',
319✔
229
                'All available Geo Engine layers',
319✔
230
                ARRAY[]::"PropertyType"[]
319✔
231
            );"#,
319✔
232
            )
319✔
233
            .await?;
319✔
234

235
        tx.execute(&stmt, &[&INTERNAL_LAYER_DB_ROOT_COLLECTION_ID])
319✔
236
            .await?;
319✔
237

238
        let stmt = tx
319✔
239
            .prepare(
319✔
240
                r#"INSERT INTO layer_collections (
319✔
241
                id,
319✔
242
                name,
319✔
243
                description,
319✔
244
                properties
319✔
245
            ) VALUES (
319✔
246
                $1,
319✔
247
                'Unsorted',
319✔
248
                'Unsorted Layers',
319✔
249
                ARRAY[]::"PropertyType"[]
319✔
250
            );"#,
319✔
251
            )
319✔
252
            .await?;
319✔
253

254
        tx.execute(&stmt, &[&UNSORTED_COLLECTION_ID]).await?;
319✔
255

256
        let stmt = tx
319✔
257
            .prepare(
319✔
258
                r#"
319✔
259
            INSERT INTO collection_children (parent, child) 
319✔
260
            VALUES ($1, $2);"#,
319✔
261
            )
319✔
262
            .await?;
319✔
263

264
        tx.execute(
319✔
265
            &stmt,
319✔
266
            &[
319✔
267
                &INTERNAL_LAYER_DB_ROOT_COLLECTION_ID,
319✔
268
                &UNSORTED_COLLECTION_ID,
319✔
269
            ],
319✔
270
        )
319✔
271
        .await?;
319✔
272

273
        tx.commit().await?;
319✔
274

275
        debug!("Created database schema");
×
276

277
        Ok(true)
319✔
278
    }
319✔
279

280
    async fn create_default_session(
228✔
281
        conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
228✔
282
        session_id: SessionId,
228✔
283
    ) -> Result<()> {
228✔
284
        let stmt = conn
228✔
285
            .prepare("INSERT INTO sessions (id, project_id, view) VALUES ($1, NULL ,NULL);")
228✔
286
            .await?;
228✔
287

288
        conn.execute(&stmt, &[&session_id]).await?;
228✔
289

290
        Ok(())
228✔
291
    }
228✔
292
    async fn load_default_session(
65✔
293
        conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
65✔
294
    ) -> Result<SimpleSession> {
65✔
295
        let stmt = conn
65✔
296
            .prepare("SELECT id, project_id, view FROM sessions LIMIT 1;")
65✔
297
            .await?;
349✔
298

299
        let row = conn.query_one(&stmt, &[]).await?;
65✔
300

301
        Ok(SimpleSession::new(row.get(0), row.get(1), row.get(2)))
65✔
302
    }
65✔
303
}
304

305
#[async_trait]
306
impl<Tls> SimpleApplicationContext for PostgresContext<Tls>
307
where
308
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
309
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
310
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
311
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
312
{
313
    async fn default_session_id(&self) -> SessionId {
78✔
314
        self.default_session_id
78✔
315
    }
78✔
316

317
    async fn default_session(&self) -> Result<SimpleSession> {
65✔
318
        Self::load_default_session(self.pool.get().await?).await
412✔
319
    }
130✔
320

321
    async fn update_default_session_project(&self, project: ProjectId) -> Result<()> {
1✔
322
        let conn = self.pool.get().await?;
1✔
323

324
        let stmt = conn
1✔
325
            .prepare("UPDATE sessions SET project_id = $1 WHERE id = $2;")
1✔
326
            .await?;
1✔
327

328
        conn.execute(&stmt, &[&project, &self.default_session_id])
1✔
329
            .await?;
1✔
330

331
        Ok(())
1✔
332
    }
2✔
333

334
    async fn update_default_session_view(&self, view: STRectangle) -> Result<()> {
1✔
335
        let conn = self.pool.get().await?;
1✔
336

337
        let stmt = conn
1✔
338
            .prepare("UPDATE sessions SET view = $1 WHERE id = $2;")
1✔
339
            .await?;
1✔
340

341
        conn.execute(&stmt, &[&view, &self.default_session_id])
1✔
342
            .await?;
1✔
343

344
        Ok(())
1✔
345
    }
2✔
346

347
    async fn default_session_context(&self) -> Result<Self::SessionContext> {
277✔
348
        Ok(self.session_context(self.session_by_id(self.default_session_id).await?))
4,013✔
349
    }
554✔
350
}
351

352
#[async_trait]
353
impl<Tls> ApplicationContext for PostgresContext<Tls>
354
where
355
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
356
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
357
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
358
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
359
{
360
    type SessionContext = PostgresSessionContext<Tls>;
361
    type Session = SimpleSession;
362

363
    fn session_context(&self, session: Self::Session) -> Self::SessionContext {
441✔
364
        PostgresSessionContext {
441✔
365
            session,
441✔
366
            context: self.clone(),
441✔
367
        }
441✔
368
    }
441✔
369

370
    async fn session_by_id(&self, session_id: SessionId) -> Result<Self::Session> {
379✔
371
        let mut conn = self.pool.get().await?;
379✔
372

373
        let tx = conn.build_transaction().start().await?;
373✔
374

375
        let stmt = tx
373✔
376
            .prepare(
373✔
377
                "
373✔
378
            SELECT           
373✔
379
                project_id,
373✔
380
                view
373✔
381
            FROM sessions
373✔
382
            WHERE id = $1;",
373✔
383
            )
373✔
384
            .await?;
3,274✔
385

386
        let row = tx
373✔
387
            .query_one(&stmt, &[&session_id])
373✔
388
            .await
343✔
389
            .map_err(|_error| error::Error::InvalidSession)?;
373✔
390

391
        Ok(SimpleSession::new(session_id, row.get(0), row.get(1)))
373✔
392
    }
752✔
393
}
394

395
#[derive(Clone)]
×
396
pub struct PostgresSessionContext<Tls>
397
where
398
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
399
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
400
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
401
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
402
{
403
    session: SimpleSession,
404
    context: PostgresContext<Tls>,
405
}
406

407
#[async_trait]
408
impl<Tls> SessionContext for PostgresSessionContext<Tls>
409
where
410
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
411
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
412
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
413
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
414
{
415
    type Session = SimpleSession;
416
    type GeoEngineDB = PostgresDb<Tls>;
417

418
    type TaskContext = SimpleTaskManagerContext;
419
    type TaskManager = SimpleTaskManager; // this does not persist across restarts
420
    type QueryContext = QueryContextImpl;
421
    type ExecutionContext = ExecutionContextImpl<Self::GeoEngineDB>;
422

423
    fn db(&self) -> Self::GeoEngineDB {
388✔
424
        PostgresDb::new(self.context.pool.clone())
388✔
425
    }
388✔
426

427
    fn tasks(&self) -> Self::TaskManager {
36✔
428
        SimpleTaskManager::new(self.context.task_manager.clone())
36✔
429
    }
36✔
430

431
    fn query_context(&self) -> Result<Self::QueryContext> {
27✔
432
        Ok(QueryContextImpl::new(
27✔
433
            self.context.query_ctx_chunk_size,
27✔
434
            self.context.thread_pool.clone(),
27✔
435
        ))
27✔
436
    }
27✔
437

438
    fn execution_context(&self) -> Result<Self::ExecutionContext> {
50✔
439
        Ok(ExecutionContextImpl::<PostgresDb<Tls>>::new(
50✔
440
            self.db(),
50✔
441
            self.context.thread_pool.clone(),
50✔
442
            self.context.exe_ctx_tiling_spec,
50✔
443
        ))
50✔
444
    }
50✔
445

446
    fn volumes(&self) -> Result<Vec<Volume>> {
×
447
        Ok(self.context.volumes.volumes.clone())
×
448
    }
×
449

450
    fn session(&self) -> &Self::Session {
110✔
451
        &self.session
110✔
452
    }
110✔
453
}
454

455
pub struct PostgresDb<Tls>
456
where
457
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
458
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
459
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
460
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
461
{
462
    pub(crate) conn_pool: Pool<PostgresConnectionManager<Tls>>,
463
}
464

465
impl<Tls> PostgresDb<Tls>
466
where
467
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
468
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
469
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
470
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
471
{
472
    pub fn new(conn_pool: Pool<PostgresConnectionManager<Tls>>) -> Self {
388✔
473
        Self { conn_pool }
388✔
474
    }
388✔
475

476
    /// Check whether the namepsace of the given dataset is allowed for insertion
477
    /// Check whether the namepsace of the given dataset is allowed for insertion
478
    pub(crate) fn check_namespace(id: &DatasetName) -> Result<()> {
68✔
479
        // due to a lack of users, etc., we only allow one namespace for now
68✔
480
        if id.namespace.is_none() {
68✔
481
            Ok(())
68✔
482
        } else {
483
            Err(Error::InvalidDatasetIdNamespace)
×
484
        }
485
    }
68✔
486
}
487

488
impl<Tls> GeoEngineDb for PostgresDb<Tls>
489
where
490
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
491
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
492
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
493
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
494
{
495
}
496

497
#[cfg(test)]
498
mod tests {
499
    use std::str::FromStr;
500

501
    use super::*;
502
    use crate::api::model::datatypes::{
503
        Breakpoint, ClassificationMeasurement, Colorizer, ContinuousMeasurement, DataProviderId,
504
        DatasetName, DefaultColors, LayerId, LinearGradient, LogarithmicGradient, Measurement,
505
        NotNanF64, OverUnderColors, Palette, RgbaColor, SpatialPartition2D,
506
    };
507
    use crate::api::model::operators::PlotResultDescriptor;
508
    use crate::api::model::responses::datasets::DatasetIdAndName;
509
    use crate::api::model::services::AddDataset;
510
    use crate::api::model::ColorizerTypeDbType;
511
    use crate::datasets::external::mock::{MockCollection, MockExternalLayerProviderDefinition};
512
    use crate::datasets::listing::{DatasetListOptions, DatasetListing, ProvenanceOutput};
513
    use crate::datasets::listing::{DatasetProvider, Provenance};
514
    use crate::datasets::storage::{DatasetStore, MetaDataDefinition};
515
    use crate::datasets::upload::{FileId, UploadId};
516
    use crate::datasets::upload::{FileUpload, Upload, UploadDb};
517
    use crate::layers::layer::{
518
        AddLayer, AddLayerCollection, CollectionItem, LayerCollection, LayerCollectionListOptions,
519
        LayerCollectionListing, LayerListing, ProviderLayerCollectionId, ProviderLayerId,
520
    };
521
    use crate::layers::listing::{LayerCollectionId, LayerCollectionProvider};
522
    use crate::layers::storage::{
523
        LayerDb, LayerProviderDb, LayerProviderListing, LayerProviderListingOptions,
524
        INTERNAL_PROVIDER_ID,
525
    };
526
    use crate::projects::{
527
        ColorParam, CreateProject, DerivedColor, DerivedNumber, LayerUpdate, LineSymbology,
528
        LoadVersion, NumberParam, OrderBy, Plot, PlotUpdate, PointSymbology, PolygonSymbology,
529
        ProjectDb, ProjectFilter, ProjectId, ProjectLayer, ProjectListOptions, ProjectListing,
530
        RasterSymbology, STRectangle, StrokeParam, Symbology, TextSymbology, UpdateProject,
531
    };
532
    use crate::util::tests::register_ndvi_workflow_helper;
533
    use crate::util::tests::with_temp_context;
534
    use crate::workflows::registry::WorkflowRegistry;
535
    use crate::workflows::workflow::Workflow;
536
    use bb8_postgres::tokio_postgres::NoTls;
537
    use futures::join;
538
    use geoengine_datatypes::collections::VectorDataType;
539
    use geoengine_datatypes::primitives::CacheTtlSeconds;
540
    use geoengine_datatypes::primitives::{
541
        BoundingBox2D, Coordinate2D, FeatureDataType, RasterQueryRectangle, SpatialResolution,
542
        TimeGranularity, TimeInstance, TimeInterval, TimeStep, VectorQueryRectangle,
543
    };
544
    use geoengine_datatypes::raster::RasterDataType;
545
    use geoengine_datatypes::spatial_reference::{SpatialReference, SpatialReferenceOption};
546
    use geoengine_operators::engine::{
547
        MetaData, MetaDataProvider, MultipleRasterOrSingleVectorSource, PlotOperator,
548
        RasterResultDescriptor, StaticMetaData, TypedOperator, TypedResultDescriptor,
549
        VectorColumnInfo, VectorOperator, VectorResultDescriptor,
550
    };
551
    use geoengine_operators::mock::{MockPointSource, MockPointSourceParams};
552
    use geoengine_operators::plot::{Statistics, StatisticsParams};
553
    use geoengine_operators::source::{
554
        CsvHeader, FileNotFoundHandling, FormatSpecifics, GdalDatasetGeoTransform,
555
        GdalDatasetParameters, GdalLoadingInfo, GdalMetaDataList, GdalMetaDataRegular,
556
        GdalMetaDataStatic, GdalMetadataNetCdfCf, OgrSourceColumnSpec, OgrSourceDataset,
557
        OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceTimeFormat,
558
    };
559
    use geoengine_operators::util::input::MultiRasterOrVectorOperator::Raster;
560
    use ordered_float::NotNan;
561
    use serde_json::json;
562

563
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
564
    async fn test() {
1✔
565
        with_temp_context(|app_ctx, _| async move {
1✔
566
            let session = app_ctx.default_session().await.unwrap();
18✔
567

1✔
568
            create_projects(&app_ctx, &session).await;
74✔
569

570
            let projects = list_projects(&app_ctx, &session).await;
11✔
571

572
            let project_id = projects[0].id;
1✔
573

1✔
574
            update_projects(&app_ctx, &session, project_id).await;
157✔
575

576
            delete_project(&app_ctx, &session, project_id).await;
6✔
577
        })
1✔
578
        .await;
11✔
579
    }
580

581
    async fn delete_project(
1✔
582
        app_ctx: &PostgresContext<NoTls>,
1✔
583
        session: &SimpleSession,
1✔
584
        project_id: ProjectId,
1✔
585
    ) {
1✔
586
        let db = app_ctx.session_context(session.clone()).db();
1✔
587

1✔
588
        db.delete_project(project_id).await.unwrap();
3✔
589

1✔
590
        assert!(db.load_project(project_id).await.is_err());
3✔
591
    }
1✔
592

593
    #[allow(clippy::too_many_lines)]
594
    async fn update_projects(
1✔
595
        app_ctx: &PostgresContext<NoTls>,
1✔
596
        session: &SimpleSession,
1✔
597
        project_id: ProjectId,
1✔
598
    ) {
1✔
599
        let db = app_ctx.session_context(session.clone()).db();
1✔
600

601
        let project = db
1✔
602
            .load_project_version(project_id, LoadVersion::Latest)
1✔
603
            .await
38✔
604
            .unwrap();
1✔
605

606
        let layer_workflow_id = db
1✔
607
            .register_workflow(Workflow {
1✔
608
                operator: TypedOperator::Vector(
1✔
609
                    MockPointSource {
1✔
610
                        params: MockPointSourceParams {
1✔
611
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
612
                        },
1✔
613
                    }
1✔
614
                    .boxed(),
1✔
615
                ),
1✔
616
            })
1✔
617
            .await
3✔
618
            .unwrap();
1✔
619

1✔
620
        assert!(db.load_workflow(&layer_workflow_id).await.is_ok());
3✔
621

622
        let plot_workflow_id = db
1✔
623
            .register_workflow(Workflow {
1✔
624
                operator: Statistics {
1✔
625
                    params: StatisticsParams {
1✔
626
                        column_names: vec![],
1✔
627
                    },
1✔
628
                    sources: MultipleRasterOrSingleVectorSource {
1✔
629
                        source: Raster(vec![]),
1✔
630
                    },
1✔
631
                }
1✔
632
                .boxed()
1✔
633
                .into(),
1✔
634
            })
1✔
635
            .await
3✔
636
            .unwrap();
1✔
637

1✔
638
        assert!(db.load_workflow(&plot_workflow_id).await.is_ok());
3✔
639

640
        // add a plot
641
        let update = UpdateProject {
1✔
642
            id: project.id,
1✔
643
            name: Some("Test9 Updated".into()),
1✔
644
            description: None,
1✔
645
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
1✔
646
                workflow: layer_workflow_id,
1✔
647
                name: "TestLayer".into(),
1✔
648
                symbology: PointSymbology::default().into(),
1✔
649
                visibility: Default::default(),
1✔
650
            })]),
1✔
651
            plots: Some(vec![PlotUpdate::UpdateOrInsert(Plot {
1✔
652
                workflow: plot_workflow_id,
1✔
653
                name: "Test Plot".into(),
1✔
654
            })]),
1✔
655
            bounds: None,
1✔
656
            time_step: None,
1✔
657
        };
1✔
658
        db.update_project(update).await.unwrap();
65✔
659

660
        let versions = db.list_project_versions(project_id).await.unwrap();
3✔
661
        assert_eq!(versions.len(), 2);
1✔
662

663
        // add second plot
664
        let update = UpdateProject {
1✔
665
            id: project.id,
1✔
666
            name: Some("Test9 Updated".into()),
1✔
667
            description: None,
1✔
668
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
1✔
669
                workflow: layer_workflow_id,
1✔
670
                name: "TestLayer".into(),
1✔
671
                symbology: PointSymbology::default().into(),
1✔
672
                visibility: Default::default(),
1✔
673
            })]),
1✔
674
            plots: Some(vec![
1✔
675
                PlotUpdate::UpdateOrInsert(Plot {
1✔
676
                    workflow: plot_workflow_id,
1✔
677
                    name: "Test Plot".into(),
1✔
678
                }),
1✔
679
                PlotUpdate::UpdateOrInsert(Plot {
1✔
680
                    workflow: plot_workflow_id,
1✔
681
                    name: "Test Plot".into(),
1✔
682
                }),
1✔
683
            ]),
1✔
684
            bounds: None,
1✔
685
            time_step: None,
1✔
686
        };
1✔
687
        db.update_project(update).await.unwrap();
19✔
688

689
        let versions = db.list_project_versions(project_id).await.unwrap();
3✔
690
        assert_eq!(versions.len(), 3);
1✔
691

692
        // delete plots
693
        let update = UpdateProject {
1✔
694
            id: project.id,
1✔
695
            name: None,
1✔
696
            description: None,
1✔
697
            layers: None,
1✔
698
            plots: Some(vec![]),
1✔
699
            bounds: None,
1✔
700
            time_step: None,
1✔
701
        };
1✔
702
        db.update_project(update).await.unwrap();
14✔
703

704
        let versions = db.list_project_versions(project_id).await.unwrap();
3✔
705
        assert_eq!(versions.len(), 4);
1✔
706
    }
1✔
707

708
    async fn list_projects(
1✔
709
        app_ctx: &PostgresContext<NoTls>,
1✔
710
        session: &SimpleSession,
1✔
711
    ) -> Vec<ProjectListing> {
1✔
712
        let options = ProjectListOptions {
1✔
713
            filter: ProjectFilter::None,
1✔
714
            order: OrderBy::NameDesc,
1✔
715
            offset: 0,
1✔
716
            limit: 2,
1✔
717
        };
1✔
718

1✔
719
        let db = app_ctx.session_context(session.clone()).db();
1✔
720

721
        let projects = db.list_projects(options).await.unwrap();
11✔
722

1✔
723
        assert_eq!(projects.len(), 2);
1✔
724
        assert_eq!(projects[0].name, "Test9");
1✔
725
        assert_eq!(projects[1].name, "Test8");
1✔
726
        projects
1✔
727
    }
1✔
728

729
    async fn create_projects(app_ctx: &PostgresContext<NoTls>, session: &SimpleSession) {
1✔
730
        let db = app_ctx.session_context(session.clone()).db();
1✔
731

732
        for i in 0..10 {
11✔
733
            let create = CreateProject {
10✔
734
                name: format!("Test{i}"),
10✔
735
                description: format!("Test{}", 10 - i),
10✔
736
                bounds: STRectangle::new(
10✔
737
                    SpatialReferenceOption::Unreferenced,
10✔
738
                    0.,
10✔
739
                    0.,
10✔
740
                    1.,
10✔
741
                    1.,
10✔
742
                    0,
10✔
743
                    1,
10✔
744
                )
10✔
745
                .unwrap(),
10✔
746
                time_step: None,
10✔
747
            };
10✔
748
            db.create_project(create).await.unwrap();
74✔
749
        }
750
    }
1✔
751

752
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
753
    async fn it_persists_workflows() {
1✔
754
        with_temp_context(|app_ctx, _pg_config| async move {
1✔
755
            let workflow = Workflow {
1✔
756
                operator: TypedOperator::Vector(
1✔
757
                    MockPointSource {
1✔
758
                        params: MockPointSourceParams {
1✔
759
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
760
                        },
1✔
761
                    }
1✔
762
                    .boxed(),
1✔
763
                ),
1✔
764
            };
1✔
765

766
            let session = app_ctx.default_session().await.unwrap();
18✔
767
        let ctx = app_ctx.session_context(session);
1✔
768

1✔
769
            let db = ctx
1✔
770
                .db();
1✔
771
            let id = db
1✔
772
                .register_workflow(workflow)
1✔
773
                .await
3✔
774
                .unwrap();
1✔
775

1✔
776
            drop(ctx);
1✔
777

778
            let workflow = db.load_workflow(&id).await.unwrap();
3✔
779

1✔
780
            let json = serde_json::to_string(&workflow).unwrap();
1✔
781
            assert_eq!(json, r#"{"type":"Vector","operator":{"type":"MockPointSource","params":{"points":[{"x":1.0,"y":2.0},{"x":1.0,"y":2.0},{"x":1.0,"y":2.0}]}}}"#);
1✔
782
        })
1✔
783
        .await;
12✔
784
    }
785

786
    #[allow(clippy::too_many_lines)]
787
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
788
    async fn it_persists_datasets() {
1✔
789
        with_temp_context(|app_ctx, _| async move {
1✔
790
            let loading_info = OgrSourceDataset {
1✔
791
                file_name: PathBuf::from("test.csv"),
1✔
792
                layer_name: "test.csv".to_owned(),
1✔
793
                data_type: Some(VectorDataType::MultiPoint),
1✔
794
                time: OgrSourceDatasetTimeType::Start {
1✔
795
                    start_field: "start".to_owned(),
1✔
796
                    start_format: OgrSourceTimeFormat::Auto,
1✔
797
                    duration: OgrSourceDurationSpec::Zero,
1✔
798
                },
1✔
799
                default_geometry: None,
1✔
800
                columns: Some(OgrSourceColumnSpec {
1✔
801
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
802
                        header: CsvHeader::Auto,
1✔
803
                    }),
1✔
804
                    x: "x".to_owned(),
1✔
805
                    y: None,
1✔
806
                    int: vec![],
1✔
807
                    float: vec![],
1✔
808
                    text: vec![],
1✔
809
                    bool: vec![],
1✔
810
                    datetime: vec![],
1✔
811
                    rename: None,
1✔
812
                }),
1✔
813
                force_ogr_time_filter: false,
1✔
814
                force_ogr_spatial_filter: false,
1✔
815
                on_error: OgrSourceErrorSpec::Ignore,
1✔
816
                sql_query: None,
1✔
817
                attribute_query: None,
1✔
818
                cache_ttl: CacheTtlSeconds::default(),
1✔
819
            };
1✔
820

1✔
821
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
822
                OgrSourceDataset,
1✔
823
                VectorResultDescriptor,
1✔
824
                VectorQueryRectangle,
1✔
825
            > {
1✔
826
                loading_info: loading_info.clone(),
1✔
827
                result_descriptor: VectorResultDescriptor {
1✔
828
                    data_type: VectorDataType::MultiPoint,
1✔
829
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
830
                    columns: [(
1✔
831
                        "foo".to_owned(),
1✔
832
                        VectorColumnInfo {
1✔
833
                            data_type: FeatureDataType::Float,
1✔
834
                            measurement: Measurement::Unitless.into(),
1✔
835
                        },
1✔
836
                    )]
1✔
837
                    .into_iter()
1✔
838
                    .collect(),
1✔
839
                    time: None,
1✔
840
                    bbox: None,
1✔
841
                },
1✔
842
                phantom: Default::default(),
1✔
843
            });
1✔
844

845
            let session = app_ctx.default_session().await.unwrap();
18✔
846

1✔
847
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
848

1✔
849
            let db = app_ctx.session_context(session.clone()).db();
1✔
850
            let wrap = db.wrap_meta_data(meta_data);
1✔
851
            let DatasetIdAndName {
852
                id: dataset_id,
1✔
853
                name: dataset_name,
1✔
854
            } = db
1✔
855
                .add_dataset(
1✔
856
                    AddDataset {
1✔
857
                        name: Some(dataset_name.clone()),
1✔
858
                        display_name: "Ogr Test".to_owned(),
1✔
859
                        description: "desc".to_owned(),
1✔
860
                        source_operator: "OgrSource".to_owned(),
1✔
861
                        symbology: None,
1✔
862
                        provenance: Some(vec![Provenance {
1✔
863
                            citation: "citation".to_owned(),
1✔
864
                            license: "license".to_owned(),
1✔
865
                            uri: "uri".to_owned(),
1✔
866
                        }]),
1✔
867
                    },
1✔
868
                    wrap,
1✔
869
                )
1✔
870
                .await
67✔
871
                .unwrap();
1✔
872

873
            let datasets = db
1✔
874
                .list_datasets(DatasetListOptions {
1✔
875
                    filter: None,
1✔
876
                    order: crate::datasets::listing::OrderBy::NameAsc,
1✔
877
                    offset: 0,
1✔
878
                    limit: 10,
1✔
879
                })
1✔
880
                .await
3✔
881
                .unwrap();
1✔
882

1✔
883
            assert_eq!(datasets.len(), 1);
1✔
884

885
            assert_eq!(
1✔
886
                datasets[0],
1✔
887
                DatasetListing {
1✔
888
                    id: dataset_id,
1✔
889
                    name: dataset_name,
1✔
890
                    display_name: "Ogr Test".to_owned(),
1✔
891
                    description: "desc".to_owned(),
1✔
892
                    source_operator: "OgrSource".to_owned(),
1✔
893
                    symbology: None,
1✔
894
                    tags: vec![],
1✔
895
                    result_descriptor: TypedResultDescriptor::Vector(VectorResultDescriptor {
1✔
896
                        data_type: VectorDataType::MultiPoint,
1✔
897
                        spatial_reference: SpatialReference::epsg_4326().into(),
1✔
898
                        columns: [(
1✔
899
                            "foo".to_owned(),
1✔
900
                            VectorColumnInfo {
1✔
901
                                data_type: FeatureDataType::Float,
1✔
902
                                measurement: Measurement::Unitless.into()
1✔
903
                            }
1✔
904
                        )]
1✔
905
                        .into_iter()
1✔
906
                        .collect(),
1✔
907
                        time: None,
1✔
908
                        bbox: None,
1✔
909
                    })
1✔
910
                    .into(),
1✔
911
                },
1✔
912
            );
1✔
913

914
            let provenance = db.load_provenance(&dataset_id).await.unwrap();
3✔
915

1✔
916
            assert_eq!(
1✔
917
                provenance,
1✔
918
                ProvenanceOutput {
1✔
919
                    data: dataset_id.into(),
1✔
920
                    provenance: Some(vec![Provenance {
1✔
921
                        citation: "citation".to_owned(),
1✔
922
                        license: "license".to_owned(),
1✔
923
                        uri: "uri".to_owned(),
1✔
924
                    }])
1✔
925
                }
1✔
926
            );
1✔
927

928
            let meta_data: Box<dyn MetaData<OgrSourceDataset, _, _>> =
1✔
929
                db.meta_data(&dataset_id.into()).await.unwrap();
3✔
930

931
            assert_eq!(
1✔
932
                meta_data
1✔
933
                    .loading_info(VectorQueryRectangle {
1✔
934
                        spatial_bounds: BoundingBox2D::new_unchecked(
1✔
935
                            (-180., -90.).into(),
1✔
936
                            (180., 90.).into()
1✔
937
                        ),
1✔
938
                        time_interval: TimeInterval::default(),
1✔
939
                        spatial_resolution: SpatialResolution::zero_point_one(),
1✔
940
                    })
1✔
941
                    .await
×
942
                    .unwrap(),
1✔
943
                loading_info
944
            );
945
        })
1✔
946
        .await;
10✔
947
    }
948

949
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
950
    async fn it_persists_uploads() {
1✔
951
        with_temp_context(|app_ctx, _| async move {
1✔
952
            let id = UploadId::from_str("2de18cd8-4a38-4111-a445-e3734bc18a80").unwrap();
1✔
953
            let input = Upload {
1✔
954
                id,
1✔
955
                files: vec![FileUpload {
1✔
956
                    id: FileId::from_str("e80afab0-831d-4d40-95d6-1e4dfd277e72").unwrap(),
1✔
957
                    name: "test.csv".to_owned(),
1✔
958
                    byte_size: 1337,
1✔
959
                }],
1✔
960
            };
1✔
961

962
            let session = app_ctx.default_session().await.unwrap();
18✔
963

1✔
964
            let db = app_ctx.session_context(session.clone()).db();
1✔
965

1✔
966
            db.create_upload(input.clone()).await.unwrap();
6✔
967

968
            let upload = db.load_upload(id).await.unwrap();
3✔
969

1✔
970
            assert_eq!(upload, input);
1✔
971
        })
1✔
972
        .await;
12✔
973
    }
974

975
    #[allow(clippy::too_many_lines)]
976
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
977
    async fn it_persists_layer_providers() {
1✔
978
        with_temp_context(|app_ctx, _| async move {
1✔
979
            let db = app_ctx.default_session_context().await.unwrap().db();
19✔
980

1✔
981
            let provider_id =
1✔
982
                DataProviderId::from_str("7b20c8d7-d754-4f8f-ad44-dddd25df22d2").unwrap();
1✔
983

1✔
984
            let loading_info = OgrSourceDataset {
1✔
985
                file_name: PathBuf::from("test.csv"),
1✔
986
                layer_name: "test.csv".to_owned(),
1✔
987
                data_type: Some(VectorDataType::MultiPoint),
1✔
988
                time: OgrSourceDatasetTimeType::Start {
1✔
989
                    start_field: "start".to_owned(),
1✔
990
                    start_format: OgrSourceTimeFormat::Auto,
1✔
991
                    duration: OgrSourceDurationSpec::Zero,
1✔
992
                },
1✔
993
                default_geometry: None,
1✔
994
                columns: Some(OgrSourceColumnSpec {
1✔
995
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
996
                        header: CsvHeader::Auto,
1✔
997
                    }),
1✔
998
                    x: "x".to_owned(),
1✔
999
                    y: None,
1✔
1000
                    int: vec![],
1✔
1001
                    float: vec![],
1✔
1002
                    text: vec![],
1✔
1003
                    bool: vec![],
1✔
1004
                    datetime: vec![],
1✔
1005
                    rename: None,
1✔
1006
                }),
1✔
1007
                force_ogr_time_filter: false,
1✔
1008
                force_ogr_spatial_filter: false,
1✔
1009
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1010
                sql_query: None,
1✔
1011
                attribute_query: None,
1✔
1012
                cache_ttl: CacheTtlSeconds::default(),
1✔
1013
            };
1✔
1014

1✔
1015
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1016
                OgrSourceDataset,
1✔
1017
                VectorResultDescriptor,
1✔
1018
                VectorQueryRectangle,
1✔
1019
            > {
1✔
1020
                loading_info: loading_info.clone(),
1✔
1021
                result_descriptor: VectorResultDescriptor {
1✔
1022
                    data_type: VectorDataType::MultiPoint,
1✔
1023
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1024
                    columns: [(
1✔
1025
                        "foo".to_owned(),
1✔
1026
                        VectorColumnInfo {
1✔
1027
                            data_type: FeatureDataType::Float,
1✔
1028
                            measurement: Measurement::Unitless.into(),
1✔
1029
                        },
1✔
1030
                    )]
1✔
1031
                    .into_iter()
1✔
1032
                    .collect(),
1✔
1033
                    time: None,
1✔
1034
                    bbox: None,
1✔
1035
                },
1✔
1036
                phantom: Default::default(),
1✔
1037
            });
1✔
1038

1✔
1039
            let provider = MockExternalLayerProviderDefinition {
1✔
1040
                id: provider_id,
1✔
1041
                root_collection: MockCollection {
1✔
1042
                    id: LayerCollectionId("b5f82c7c-9133-4ac1-b4ae-8faac3b9a6df".to_owned()),
1✔
1043
                    name: "Mock Collection A".to_owned(),
1✔
1044
                    description: "Some description".to_owned(),
1✔
1045
                    collections: vec![MockCollection {
1✔
1046
                        id: LayerCollectionId("21466897-37a1-4666-913a-50b5244699ad".to_owned()),
1✔
1047
                        name: "Mock Collection B".to_owned(),
1✔
1048
                        description: "Some description".to_owned(),
1✔
1049
                        collections: vec![],
1✔
1050
                        layers: vec![],
1✔
1051
                    }],
1✔
1052
                    layers: vec![],
1✔
1053
                },
1✔
1054
                data: [("myData".to_owned(), meta_data)].into_iter().collect(),
1✔
1055
            };
1✔
1056

1✔
1057
            db.add_layer_provider(Box::new(provider)).await.unwrap();
3✔
1058

1059
            let providers = db
1✔
1060
                .list_layer_providers(LayerProviderListingOptions {
1✔
1061
                    offset: 0,
1✔
1062
                    limit: 10,
1✔
1063
                })
1✔
1064
                .await
3✔
1065
                .unwrap();
1✔
1066

1✔
1067
            assert_eq!(providers.len(), 1);
1✔
1068

1069
            assert_eq!(
1✔
1070
                providers[0],
1✔
1071
                LayerProviderListing {
1✔
1072
                    id: provider_id,
1✔
1073
                    name: "MockName".to_owned(),
1✔
1074
                    description: "MockType".to_owned(),
1✔
1075
                }
1✔
1076
            );
1✔
1077

1078
            let provider = db.load_layer_provider(provider_id).await.unwrap();
3✔
1079

1080
            let datasets = provider
1✔
1081
                .load_layer_collection(
1082
                    &provider.get_root_layer_collection_id().await.unwrap(),
1✔
1083
                    LayerCollectionListOptions {
1✔
1084
                        offset: 0,
1✔
1085
                        limit: 10,
1✔
1086
                    },
1✔
1087
                )
1088
                .await
×
1089
                .unwrap();
1✔
1090

1✔
1091
            assert_eq!(datasets.items.len(), 1);
1✔
1092
        })
1✔
1093
        .await;
9✔
1094
    }
1095

1096
    #[allow(clippy::too_many_lines)]
1097
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1098
    async fn it_loads_all_meta_data_types() {
1✔
1099
        with_temp_context(|app_ctx, _| async move {
1✔
1100
            let session = app_ctx.default_session().await.unwrap();
18✔
1101

1✔
1102
            let db = app_ctx.session_context(session.clone()).db();
1✔
1103

1✔
1104
            let vector_descriptor = VectorResultDescriptor {
1✔
1105
                data_type: VectorDataType::Data,
1✔
1106
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1107
                columns: Default::default(),
1✔
1108
                time: None,
1✔
1109
                bbox: None,
1✔
1110
            };
1✔
1111

1✔
1112
            let raster_descriptor = RasterResultDescriptor {
1✔
1113
                data_type: RasterDataType::U8,
1✔
1114
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1115
                measurement: Default::default(),
1✔
1116
                time: None,
1✔
1117
                bbox: None,
1✔
1118
                resolution: None,
1✔
1119
            };
1✔
1120

1✔
1121
            let vector_ds = AddDataset {
1✔
1122
                name: None,
1✔
1123
                display_name: "OgrDataset".to_string(),
1✔
1124
                description: "My Ogr dataset".to_string(),
1✔
1125
                source_operator: "OgrSource".to_string(),
1✔
1126
                symbology: None,
1✔
1127
                provenance: None,
1✔
1128
            };
1✔
1129

1✔
1130
            let raster_ds = AddDataset {
1✔
1131
                name: None,
1✔
1132
                display_name: "GdalDataset".to_string(),
1✔
1133
                description: "My Gdal dataset".to_string(),
1✔
1134
                source_operator: "GdalSource".to_string(),
1✔
1135
                symbology: None,
1✔
1136
                provenance: None,
1✔
1137
            };
1✔
1138

1✔
1139
            let gdal_params = GdalDatasetParameters {
1✔
1140
                file_path: Default::default(),
1✔
1141
                rasterband_channel: 0,
1✔
1142
                geo_transform: GdalDatasetGeoTransform {
1✔
1143
                    origin_coordinate: Default::default(),
1✔
1144
                    x_pixel_size: 0.0,
1✔
1145
                    y_pixel_size: 0.0,
1✔
1146
                },
1✔
1147
                width: 0,
1✔
1148
                height: 0,
1✔
1149
                file_not_found_handling: FileNotFoundHandling::NoData,
1✔
1150
                no_data_value: None,
1✔
1151
                properties_mapping: None,
1✔
1152
                gdal_open_options: None,
1✔
1153
                gdal_config_options: None,
1✔
1154
                allow_alphaband_as_mask: false,
1✔
1155
                retry: None,
1✔
1156
            };
1✔
1157

1✔
1158
            let meta = StaticMetaData {
1✔
1159
                loading_info: OgrSourceDataset {
1✔
1160
                    file_name: Default::default(),
1✔
1161
                    layer_name: String::new(),
1✔
1162
                    data_type: None,
1✔
1163
                    time: Default::default(),
1✔
1164
                    default_geometry: None,
1✔
1165
                    columns: None,
1✔
1166
                    force_ogr_time_filter: false,
1✔
1167
                    force_ogr_spatial_filter: false,
1✔
1168
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1169
                    sql_query: None,
1✔
1170
                    attribute_query: None,
1✔
1171
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1172
                },
1✔
1173
                result_descriptor: vector_descriptor.clone(),
1✔
1174
                phantom: Default::default(),
1✔
1175
            };
1✔
1176

1✔
1177
            let meta = db.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1178

1179
            let id = db.add_dataset(vector_ds, meta).await.unwrap().id;
67✔
1180

1181
            let meta: geoengine_operators::util::Result<
1✔
1182
                Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1183
            > = db.meta_data(&id.into()).await;
3✔
1184

1185
            assert!(meta.is_ok());
1✔
1186

1187
            let meta = GdalMetaDataRegular {
1✔
1188
                result_descriptor: raster_descriptor.clone(),
1✔
1189
                params: gdal_params.clone(),
1✔
1190
                time_placeholders: Default::default(),
1✔
1191
                data_time: Default::default(),
1✔
1192
                step: TimeStep {
1✔
1193
                    granularity: TimeGranularity::Millis,
1✔
1194
                    step: 0,
1✔
1195
                },
1✔
1196
                cache_ttl: CacheTtlSeconds::default(),
1✔
1197
            };
1✔
1198

1✔
1199
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataRegular(meta));
1✔
1200

1201
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1202

1203
            let meta: geoengine_operators::util::Result<
1✔
1204
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1205
            > = db.meta_data(&id.into()).await;
3✔
1206

1207
            assert!(meta.is_ok());
1✔
1208

1209
            let meta = GdalMetaDataStatic {
1✔
1210
                time: None,
1✔
1211
                params: gdal_params.clone(),
1✔
1212
                result_descriptor: raster_descriptor.clone(),
1✔
1213
                cache_ttl: CacheTtlSeconds::default(),
1✔
1214
            };
1✔
1215

1✔
1216
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalStatic(meta));
1✔
1217

1218
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1219

1220
            let meta: geoengine_operators::util::Result<
1✔
1221
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1222
            > = db.meta_data(&id.into()).await;
3✔
1223

1224
            assert!(meta.is_ok());
1✔
1225

1226
            let meta = GdalMetaDataList {
1✔
1227
                result_descriptor: raster_descriptor.clone(),
1✔
1228
                params: vec![],
1✔
1229
            };
1✔
1230

1✔
1231
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataList(meta));
1✔
1232

1233
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1234

1235
            let meta: geoengine_operators::util::Result<
1✔
1236
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1237
            > = db.meta_data(&id.into()).await;
3✔
1238

1239
            assert!(meta.is_ok());
1✔
1240

1241
            let meta = GdalMetadataNetCdfCf {
1✔
1242
                result_descriptor: raster_descriptor.clone(),
1✔
1243
                params: gdal_params.clone(),
1✔
1244
                start: TimeInstance::MIN,
1✔
1245
                end: TimeInstance::MAX,
1✔
1246
                step: TimeStep {
1✔
1247
                    granularity: TimeGranularity::Millis,
1✔
1248
                    step: 0,
1✔
1249
                },
1✔
1250
                band_offset: 0,
1✔
1251
                cache_ttl: CacheTtlSeconds::default(),
1✔
1252
            };
1✔
1253

1✔
1254
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetadataNetCdfCf(meta));
1✔
1255

1256
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1257

1258
            let meta: geoengine_operators::util::Result<
1✔
1259
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1260
            > = db.meta_data(&id.into()).await;
3✔
1261

1262
            assert!(meta.is_ok());
1✔
1263
        })
1✔
1264
        .await;
12✔
1265
    }
1266

1267
    #[allow(clippy::too_many_lines)]
1268
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1269
    async fn it_collects_layers() {
1✔
1270
        with_temp_context(|app_ctx, _| async move {
1✔
1271
            let session = app_ctx.default_session().await.unwrap();
18✔
1272

1✔
1273
            let layer_db = app_ctx.session_context(session).db();
1✔
1274

1✔
1275
            let workflow = Workflow {
1✔
1276
                operator: TypedOperator::Vector(
1✔
1277
                    MockPointSource {
1✔
1278
                        params: MockPointSourceParams {
1✔
1279
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1280
                        },
1✔
1281
                    }
1✔
1282
                    .boxed(),
1✔
1283
                ),
1✔
1284
            };
1✔
1285

1286
            let root_collection_id = layer_db.get_root_layer_collection_id().await.unwrap();
1✔
1287

1288
            let layer1 = layer_db
1✔
1289
                .add_layer(
1✔
1290
                    AddLayer {
1✔
1291
                        name: "Layer1".to_string(),
1✔
1292
                        description: "Layer 1".to_string(),
1✔
1293
                        symbology: None,
1✔
1294
                        workflow: workflow.clone(),
1✔
1295
                        metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1296
                        properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1297
                    },
1✔
1298
                    &root_collection_id,
1✔
1299
                )
1✔
1300
                .await
38✔
1301
                .unwrap();
1✔
1302

1303
            assert_eq!(
1✔
1304
                layer_db.load_layer(&layer1).await.unwrap(),
3✔
1305
                crate::layers::layer::Layer {
1✔
1306
                    id: ProviderLayerId {
1✔
1307
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1308
                        layer_id: layer1.clone(),
1✔
1309
                    },
1✔
1310
                    name: "Layer1".to_string(),
1✔
1311
                    description: "Layer 1".to_string(),
1✔
1312
                    symbology: None,
1✔
1313
                    workflow: workflow.clone(),
1✔
1314
                    metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1315
                    properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1316
                }
1✔
1317
            );
1318

1319
            let collection1_id = layer_db
1✔
1320
                .add_layer_collection(
1✔
1321
                    AddLayerCollection {
1✔
1322
                        name: "Collection1".to_string(),
1✔
1323
                        description: "Collection 1".to_string(),
1✔
1324
                        properties: Default::default(),
1✔
1325
                    },
1✔
1326
                    &root_collection_id,
1✔
1327
                )
1✔
1328
                .await
8✔
1329
                .unwrap();
1✔
1330

1331
            let layer2 = layer_db
1✔
1332
                .add_layer(
1✔
1333
                    AddLayer {
1✔
1334
                        name: "Layer2".to_string(),
1✔
1335
                        description: "Layer 2".to_string(),
1✔
1336
                        symbology: None,
1✔
1337
                        workflow: workflow.clone(),
1✔
1338
                        metadata: Default::default(),
1✔
1339
                        properties: Default::default(),
1✔
1340
                    },
1✔
1341
                    &collection1_id,
1✔
1342
                )
1✔
1343
                .await
7✔
1344
                .unwrap();
1✔
1345

1346
            let collection2_id = layer_db
1✔
1347
                .add_layer_collection(
1✔
1348
                    AddLayerCollection {
1✔
1349
                        name: "Collection2".to_string(),
1✔
1350
                        description: "Collection 2".to_string(),
1✔
1351
                        properties: Default::default(),
1✔
1352
                    },
1✔
1353
                    &collection1_id,
1✔
1354
                )
1✔
1355
                .await
7✔
1356
                .unwrap();
1✔
1357

1✔
1358
            layer_db
1✔
1359
                .add_collection_to_parent(&collection2_id, &collection1_id)
1✔
1360
                .await
3✔
1361
                .unwrap();
1✔
1362

1363
            let root_collection = layer_db
1✔
1364
                .load_layer_collection(
1✔
1365
                    &root_collection_id,
1✔
1366
                    LayerCollectionListOptions {
1✔
1367
                        offset: 0,
1✔
1368
                        limit: 20,
1✔
1369
                    },
1✔
1370
                )
1✔
1371
                .await
5✔
1372
                .unwrap();
1✔
1373

1✔
1374
            assert_eq!(
1✔
1375
                root_collection,
1✔
1376
                LayerCollection {
1✔
1377
                    id: ProviderLayerCollectionId {
1✔
1378
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1379
                        collection_id: root_collection_id,
1✔
1380
                    },
1✔
1381
                    name: "Layers".to_string(),
1✔
1382
                    description: "All available Geo Engine layers".to_string(),
1✔
1383
                    items: vec![
1✔
1384
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1385
                            id: ProviderLayerCollectionId {
1✔
1386
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1387
                                collection_id: collection1_id.clone(),
1✔
1388
                            },
1✔
1389
                            name: "Collection1".to_string(),
1✔
1390
                            description: "Collection 1".to_string(),
1✔
1391
                            properties: Default::default(),
1✔
1392
                        }),
1✔
1393
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1394
                            id: ProviderLayerCollectionId {
1✔
1395
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1396
                                collection_id: LayerCollectionId(
1✔
1397
                                    UNSORTED_COLLECTION_ID.to_string()
1✔
1398
                                ),
1✔
1399
                            },
1✔
1400
                            name: "Unsorted".to_string(),
1✔
1401
                            description: "Unsorted Layers".to_string(),
1✔
1402
                            properties: Default::default(),
1✔
1403
                        }),
1✔
1404
                        CollectionItem::Layer(LayerListing {
1✔
1405
                            id: ProviderLayerId {
1✔
1406
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1407
                                layer_id: layer1,
1✔
1408
                            },
1✔
1409
                            name: "Layer1".to_string(),
1✔
1410
                            description: "Layer 1".to_string(),
1✔
1411
                            properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1412
                        })
1✔
1413
                    ],
1✔
1414
                    entry_label: None,
1✔
1415
                    properties: vec![],
1✔
1416
                }
1✔
1417
            );
1✔
1418

1419
            let collection1 = layer_db
1✔
1420
                .load_layer_collection(
1✔
1421
                    &collection1_id,
1✔
1422
                    LayerCollectionListOptions {
1✔
1423
                        offset: 0,
1✔
1424
                        limit: 20,
1✔
1425
                    },
1✔
1426
                )
1✔
1427
                .await
5✔
1428
                .unwrap();
1✔
1429

1✔
1430
            assert_eq!(
1✔
1431
                collection1,
1✔
1432
                LayerCollection {
1✔
1433
                    id: ProviderLayerCollectionId {
1✔
1434
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1435
                        collection_id: collection1_id,
1✔
1436
                    },
1✔
1437
                    name: "Collection1".to_string(),
1✔
1438
                    description: "Collection 1".to_string(),
1✔
1439
                    items: vec![
1✔
1440
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1441
                            id: ProviderLayerCollectionId {
1✔
1442
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1443
                                collection_id: collection2_id,
1✔
1444
                            },
1✔
1445
                            name: "Collection2".to_string(),
1✔
1446
                            description: "Collection 2".to_string(),
1✔
1447
                            properties: Default::default(),
1✔
1448
                        }),
1✔
1449
                        CollectionItem::Layer(LayerListing {
1✔
1450
                            id: ProviderLayerId {
1✔
1451
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1452
                                layer_id: layer2,
1✔
1453
                            },
1✔
1454
                            name: "Layer2".to_string(),
1✔
1455
                            description: "Layer 2".to_string(),
1✔
1456
                            properties: vec![],
1✔
1457
                        })
1✔
1458
                    ],
1✔
1459
                    entry_label: None,
1✔
1460
                    properties: vec![],
1✔
1461
                }
1✔
1462
            );
1✔
1463
        })
1✔
1464
        .await;
12✔
1465
    }
1466

1467
    #[allow(clippy::too_many_lines)]
1468
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1469
    async fn it_removes_layer_collections() {
1✔
1470
        with_temp_context(|app_ctx, _| async move {
1✔
1471
            let session = app_ctx.default_session().await.unwrap();
18✔
1472

1✔
1473
            let layer_db = app_ctx.session_context(session).db();
1✔
1474

1✔
1475
            let layer = AddLayer {
1✔
1476
                name: "layer".to_string(),
1✔
1477
                description: "description".to_string(),
1✔
1478
                workflow: Workflow {
1✔
1479
                    operator: TypedOperator::Vector(
1✔
1480
                        MockPointSource {
1✔
1481
                            params: MockPointSourceParams {
1✔
1482
                                points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1483
                            },
1✔
1484
                        }
1✔
1485
                        .boxed(),
1✔
1486
                    ),
1✔
1487
                },
1✔
1488
                symbology: None,
1✔
1489
                metadata: Default::default(),
1✔
1490
                properties: Default::default(),
1✔
1491
            };
1✔
1492

1493
            let root_collection = &layer_db.get_root_layer_collection_id().await.unwrap();
1✔
1494

1✔
1495
            let collection = AddLayerCollection {
1✔
1496
                name: "top collection".to_string(),
1✔
1497
                description: "description".to_string(),
1✔
1498
                properties: Default::default(),
1✔
1499
            };
1✔
1500

1501
            let top_c_id = layer_db
1✔
1502
                .add_layer_collection(collection, root_collection)
1✔
1503
                .await
10✔
1504
                .unwrap();
1✔
1505

1506
            let l_id = layer_db.add_layer(layer, &top_c_id).await.unwrap();
35✔
1507

1✔
1508
            let collection = AddLayerCollection {
1✔
1509
                name: "empty collection".to_string(),
1✔
1510
                description: "description".to_string(),
1✔
1511
                properties: Default::default(),
1✔
1512
            };
1✔
1513

1514
            let empty_c_id = layer_db
1✔
1515
                .add_layer_collection(collection, &top_c_id)
1✔
1516
                .await
7✔
1517
                .unwrap();
1✔
1518

1519
            let items = layer_db
1✔
1520
                .load_layer_collection(
1✔
1521
                    &top_c_id,
1✔
1522
                    LayerCollectionListOptions {
1✔
1523
                        offset: 0,
1✔
1524
                        limit: 20,
1✔
1525
                    },
1✔
1526
                )
1✔
1527
                .await
5✔
1528
                .unwrap();
1✔
1529

1✔
1530
            assert_eq!(
1✔
1531
                items,
1✔
1532
                LayerCollection {
1✔
1533
                    id: ProviderLayerCollectionId {
1✔
1534
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1535
                        collection_id: top_c_id.clone(),
1✔
1536
                    },
1✔
1537
                    name: "top collection".to_string(),
1✔
1538
                    description: "description".to_string(),
1✔
1539
                    items: vec![
1✔
1540
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1541
                            id: ProviderLayerCollectionId {
1✔
1542
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1543
                                collection_id: empty_c_id.clone(),
1✔
1544
                            },
1✔
1545
                            name: "empty collection".to_string(),
1✔
1546
                            description: "description".to_string(),
1✔
1547
                            properties: Default::default(),
1✔
1548
                        }),
1✔
1549
                        CollectionItem::Layer(LayerListing {
1✔
1550
                            id: ProviderLayerId {
1✔
1551
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1552
                                layer_id: l_id.clone(),
1✔
1553
                            },
1✔
1554
                            name: "layer".to_string(),
1✔
1555
                            description: "description".to_string(),
1✔
1556
                            properties: vec![],
1✔
1557
                        })
1✔
1558
                    ],
1✔
1559
                    entry_label: None,
1✔
1560
                    properties: vec![],
1✔
1561
                }
1✔
1562
            );
1✔
1563

1564
            // remove empty collection
1565
            layer_db.remove_layer_collection(&empty_c_id).await.unwrap();
9✔
1566

1567
            let items = layer_db
1✔
1568
                .load_layer_collection(
1✔
1569
                    &top_c_id,
1✔
1570
                    LayerCollectionListOptions {
1✔
1571
                        offset: 0,
1✔
1572
                        limit: 20,
1✔
1573
                    },
1✔
1574
                )
1✔
1575
                .await
5✔
1576
                .unwrap();
1✔
1577

1✔
1578
            assert_eq!(
1✔
1579
                items,
1✔
1580
                LayerCollection {
1✔
1581
                    id: ProviderLayerCollectionId {
1✔
1582
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1583
                        collection_id: top_c_id.clone(),
1✔
1584
                    },
1✔
1585
                    name: "top collection".to_string(),
1✔
1586
                    description: "description".to_string(),
1✔
1587
                    items: vec![CollectionItem::Layer(LayerListing {
1✔
1588
                        id: ProviderLayerId {
1✔
1589
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1590
                            layer_id: l_id.clone(),
1✔
1591
                        },
1✔
1592
                        name: "layer".to_string(),
1✔
1593
                        description: "description".to_string(),
1✔
1594
                        properties: vec![],
1✔
1595
                    })],
1✔
1596
                    entry_label: None,
1✔
1597
                    properties: vec![],
1✔
1598
                }
1✔
1599
            );
1✔
1600

1601
            // remove top (not root) collection
1602
            layer_db.remove_layer_collection(&top_c_id).await.unwrap();
9✔
1603

1✔
1604
            layer_db
1✔
1605
                .load_layer_collection(
1✔
1606
                    &top_c_id,
1✔
1607
                    LayerCollectionListOptions {
1✔
1608
                        offset: 0,
1✔
1609
                        limit: 20,
1✔
1610
                    },
1✔
1611
                )
1✔
1612
                .await
3✔
1613
                .unwrap_err();
1✔
1614

1✔
1615
            // should be deleted automatically
1✔
1616
            layer_db.load_layer(&l_id).await.unwrap_err();
3✔
1617

1✔
1618
            // it is not allowed to remove the root collection
1✔
1619
            layer_db
1✔
1620
                .remove_layer_collection(root_collection)
1✔
1621
                .await
×
1622
                .unwrap_err();
1✔
1623
            layer_db
1✔
1624
                .load_layer_collection(
1✔
1625
                    root_collection,
1✔
1626
                    LayerCollectionListOptions {
1✔
1627
                        offset: 0,
1✔
1628
                        limit: 20,
1✔
1629
                    },
1✔
1630
                )
1✔
1631
                .await
5✔
1632
                .unwrap();
1✔
1633
        })
1✔
1634
        .await;
11✔
1635
    }
1636

1637
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1638
    #[allow(clippy::too_many_lines)]
1639
    async fn it_removes_collections_from_collections() {
1✔
1640
        with_temp_context(|app_ctx, _| async move {
1✔
1641
            let session = app_ctx.default_session().await.unwrap();
18✔
1642

1✔
1643
            let db = app_ctx.session_context(session).db();
1✔
1644

1645
            let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
1646

1647
            let mid_collection_id = db
1✔
1648
                .add_layer_collection(
1✔
1649
                    AddLayerCollection {
1✔
1650
                        name: "mid collection".to_string(),
1✔
1651
                        description: "description".to_string(),
1✔
1652
                        properties: Default::default(),
1✔
1653
                    },
1✔
1654
                    root_collection_id,
1✔
1655
                )
1✔
1656
                .await
10✔
1657
                .unwrap();
1✔
1658

1659
            let bottom_collection_id = db
1✔
1660
                .add_layer_collection(
1✔
1661
                    AddLayerCollection {
1✔
1662
                        name: "bottom collection".to_string(),
1✔
1663
                        description: "description".to_string(),
1✔
1664
                        properties: Default::default(),
1✔
1665
                    },
1✔
1666
                    &mid_collection_id,
1✔
1667
                )
1✔
1668
                .await
7✔
1669
                .unwrap();
1✔
1670

1671
            let layer_id = db
1✔
1672
                .add_layer(
1✔
1673
                    AddLayer {
1✔
1674
                        name: "layer".to_string(),
1✔
1675
                        description: "description".to_string(),
1✔
1676
                        workflow: Workflow {
1✔
1677
                            operator: TypedOperator::Vector(
1✔
1678
                                MockPointSource {
1✔
1679
                                    params: MockPointSourceParams {
1✔
1680
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1681
                                    },
1✔
1682
                                }
1✔
1683
                                .boxed(),
1✔
1684
                            ),
1✔
1685
                        },
1✔
1686
                        symbology: None,
1✔
1687
                        metadata: Default::default(),
1✔
1688
                        properties: Default::default(),
1✔
1689
                    },
1✔
1690
                    &mid_collection_id,
1✔
1691
                )
1✔
1692
                .await
35✔
1693
                .unwrap();
1✔
1694

1✔
1695
            // removing the mid collection…
1✔
1696
            db.remove_layer_collection_from_parent(&mid_collection_id, root_collection_id)
1✔
1697
                .await
12✔
1698
                .unwrap();
1✔
1699

1✔
1700
            // …should remove itself
1✔
1701
            db.load_layer_collection(&mid_collection_id, LayerCollectionListOptions::default())
1✔
1702
                .await
3✔
1703
                .unwrap_err();
1✔
1704

1✔
1705
            // …should remove the bottom collection
1✔
1706
            db.load_layer_collection(&bottom_collection_id, LayerCollectionListOptions::default())
1✔
1707
                .await
3✔
1708
                .unwrap_err();
1✔
1709

1✔
1710
            // … and should remove the layer of the bottom collection
1✔
1711
            db.load_layer(&layer_id).await.unwrap_err();
3✔
1712

1✔
1713
            // the root collection is still there
1✔
1714
            db.load_layer_collection(root_collection_id, LayerCollectionListOptions::default())
1✔
1715
                .await
5✔
1716
                .unwrap();
1✔
1717
        })
1✔
1718
        .await;
11✔
1719
    }
1720

1721
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1722
    #[allow(clippy::too_many_lines)]
1723
    async fn it_removes_layers_from_collections() {
1✔
1724
        with_temp_context(|app_ctx, _| async move {
1✔
1725
            let session = app_ctx.default_session().await.unwrap();
18✔
1726

1✔
1727
            let db = app_ctx.session_context(session).db();
1✔
1728

1729
            let root_collection = &db.get_root_layer_collection_id().await.unwrap();
1✔
1730

1731
            let another_collection = db
1✔
1732
                .add_layer_collection(
1✔
1733
                    AddLayerCollection {
1✔
1734
                        name: "top collection".to_string(),
1✔
1735
                        description: "description".to_string(),
1✔
1736
                        properties: Default::default(),
1✔
1737
                    },
1✔
1738
                    root_collection,
1✔
1739
                )
1✔
1740
                .await
10✔
1741
                .unwrap();
1✔
1742

1743
            let layer_in_one_collection = db
1✔
1744
                .add_layer(
1✔
1745
                    AddLayer {
1✔
1746
                        name: "layer 1".to_string(),
1✔
1747
                        description: "description".to_string(),
1✔
1748
                        workflow: Workflow {
1✔
1749
                            operator: TypedOperator::Vector(
1✔
1750
                                MockPointSource {
1✔
1751
                                    params: MockPointSourceParams {
1✔
1752
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1753
                                    },
1✔
1754
                                }
1✔
1755
                                .boxed(),
1✔
1756
                            ),
1✔
1757
                        },
1✔
1758
                        symbology: None,
1✔
1759
                        metadata: Default::default(),
1✔
1760
                        properties: Default::default(),
1✔
1761
                    },
1✔
1762
                    &another_collection,
1✔
1763
                )
1✔
1764
                .await
35✔
1765
                .unwrap();
1✔
1766

1767
            let layer_in_two_collections = db
1✔
1768
                .add_layer(
1✔
1769
                    AddLayer {
1✔
1770
                        name: "layer 2".to_string(),
1✔
1771
                        description: "description".to_string(),
1✔
1772
                        workflow: Workflow {
1✔
1773
                            operator: TypedOperator::Vector(
1✔
1774
                                MockPointSource {
1✔
1775
                                    params: MockPointSourceParams {
1✔
1776
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1777
                                    },
1✔
1778
                                }
1✔
1779
                                .boxed(),
1✔
1780
                            ),
1✔
1781
                        },
1✔
1782
                        symbology: None,
1✔
1783
                        metadata: Default::default(),
1✔
1784
                        properties: Default::default(),
1✔
1785
                    },
1✔
1786
                    &another_collection,
1✔
1787
                )
1✔
1788
                .await
7✔
1789
                .unwrap();
1✔
1790

1✔
1791
            db.add_layer_to_collection(&layer_in_two_collections, root_collection)
1✔
1792
                .await
3✔
1793
                .unwrap();
1✔
1794

1✔
1795
            // remove first layer --> should be deleted entirely
1✔
1796

1✔
1797
            db.remove_layer_from_collection(&layer_in_one_collection, &another_collection)
1✔
1798
                .await
7✔
1799
                .unwrap();
1✔
1800

1801
            let number_of_layer_in_collection = db
1✔
1802
                .load_layer_collection(
1✔
1803
                    &another_collection,
1✔
1804
                    LayerCollectionListOptions {
1✔
1805
                        offset: 0,
1✔
1806
                        limit: 20,
1✔
1807
                    },
1✔
1808
                )
1✔
1809
                .await
5✔
1810
                .unwrap()
1✔
1811
                .items
1✔
1812
                .len();
1✔
1813
            assert_eq!(
1✔
1814
                number_of_layer_in_collection,
1✔
1815
                1 /* only the other collection should be here */
1✔
1816
            );
1✔
1817

1818
            db.load_layer(&layer_in_one_collection).await.unwrap_err();
3✔
1819

1✔
1820
            // remove second layer --> should only be gone in collection
1✔
1821

1✔
1822
            db.remove_layer_from_collection(&layer_in_two_collections, &another_collection)
1✔
1823
                .await
7✔
1824
                .unwrap();
1✔
1825

1826
            let number_of_layer_in_collection = db
1✔
1827
                .load_layer_collection(
1✔
1828
                    &another_collection,
1✔
1829
                    LayerCollectionListOptions {
1✔
1830
                        offset: 0,
1✔
1831
                        limit: 20,
1✔
1832
                    },
1✔
1833
                )
1✔
1834
                .await
5✔
1835
                .unwrap()
1✔
1836
                .items
1✔
1837
                .len();
1✔
1838
            assert_eq!(
1✔
1839
                number_of_layer_in_collection,
1✔
1840
                0 /* both layers were deleted */
1✔
1841
            );
1✔
1842

1843
            db.load_layer(&layer_in_two_collections).await.unwrap();
3✔
1844
        })
1✔
1845
        .await;
12✔
1846
    }
1847

1848
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1849
    #[allow(clippy::too_many_lines)]
1850
    async fn it_deletes_dataset() {
1✔
1851
        with_temp_context(|app_ctx, _| async move {
1✔
1852
            let loading_info = OgrSourceDataset {
1✔
1853
                file_name: PathBuf::from("test.csv"),
1✔
1854
                layer_name: "test.csv".to_owned(),
1✔
1855
                data_type: Some(VectorDataType::MultiPoint),
1✔
1856
                time: OgrSourceDatasetTimeType::Start {
1✔
1857
                    start_field: "start".to_owned(),
1✔
1858
                    start_format: OgrSourceTimeFormat::Auto,
1✔
1859
                    duration: OgrSourceDurationSpec::Zero,
1✔
1860
                },
1✔
1861
                default_geometry: None,
1✔
1862
                columns: Some(OgrSourceColumnSpec {
1✔
1863
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
1864
                        header: CsvHeader::Auto,
1✔
1865
                    }),
1✔
1866
                    x: "x".to_owned(),
1✔
1867
                    y: None,
1✔
1868
                    int: vec![],
1✔
1869
                    float: vec![],
1✔
1870
                    text: vec![],
1✔
1871
                    bool: vec![],
1✔
1872
                    datetime: vec![],
1✔
1873
                    rename: None,
1✔
1874
                }),
1✔
1875
                force_ogr_time_filter: false,
1✔
1876
                force_ogr_spatial_filter: false,
1✔
1877
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1878
                sql_query: None,
1✔
1879
                attribute_query: None,
1✔
1880
                cache_ttl: CacheTtlSeconds::default(),
1✔
1881
            };
1✔
1882

1✔
1883
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1884
                OgrSourceDataset,
1✔
1885
                VectorResultDescriptor,
1✔
1886
                VectorQueryRectangle,
1✔
1887
            > {
1✔
1888
                loading_info: loading_info.clone(),
1✔
1889
                result_descriptor: VectorResultDescriptor {
1✔
1890
                    data_type: VectorDataType::MultiPoint,
1✔
1891
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1892
                    columns: [(
1✔
1893
                        "foo".to_owned(),
1✔
1894
                        VectorColumnInfo {
1✔
1895
                            data_type: FeatureDataType::Float,
1✔
1896
                            measurement: Measurement::Unitless.into(),
1✔
1897
                        },
1✔
1898
                    )]
1✔
1899
                    .into_iter()
1✔
1900
                    .collect(),
1✔
1901
                    time: None,
1✔
1902
                    bbox: None,
1✔
1903
                },
1✔
1904
                phantom: Default::default(),
1✔
1905
            });
1✔
1906

1907
            let session = app_ctx.default_session().await.unwrap();
18✔
1908

1✔
1909
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
1910

1✔
1911
            let db = app_ctx.session_context(session.clone()).db();
1✔
1912
            let wrap = db.wrap_meta_data(meta_data);
1✔
1913
            let dataset_id = db
1✔
1914
                .add_dataset(
1✔
1915
                    AddDataset {
1✔
1916
                        name: Some(dataset_name),
1✔
1917
                        display_name: "Ogr Test".to_owned(),
1✔
1918
                        description: "desc".to_owned(),
1✔
1919
                        source_operator: "OgrSource".to_owned(),
1✔
1920
                        symbology: None,
1✔
1921
                        provenance: Some(vec![Provenance {
1✔
1922
                            citation: "citation".to_owned(),
1✔
1923
                            license: "license".to_owned(),
1✔
1924
                            uri: "uri".to_owned(),
1✔
1925
                        }]),
1✔
1926
                    },
1✔
1927
                    wrap,
1✔
1928
                )
1✔
1929
                .await
67✔
1930
                .unwrap()
1✔
1931
                .id;
1932

1933
            assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
1934

1935
            db.delete_dataset(dataset_id).await.unwrap();
3✔
1936

1937
            assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
1938
        })
1✔
1939
        .await;
10✔
1940
    }
1941

1942
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1943
    #[allow(clippy::too_many_lines)]
1944
    async fn it_deletes_admin_dataset() {
1✔
1945
        with_temp_context(|app_ctx, _| async move {
1✔
1946
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
1947

1✔
1948
            let loading_info = OgrSourceDataset {
1✔
1949
                file_name: PathBuf::from("test.csv"),
1✔
1950
                layer_name: "test.csv".to_owned(),
1✔
1951
                data_type: Some(VectorDataType::MultiPoint),
1✔
1952
                time: OgrSourceDatasetTimeType::Start {
1✔
1953
                    start_field: "start".to_owned(),
1✔
1954
                    start_format: OgrSourceTimeFormat::Auto,
1✔
1955
                    duration: OgrSourceDurationSpec::Zero,
1✔
1956
                },
1✔
1957
                default_geometry: None,
1✔
1958
                columns: Some(OgrSourceColumnSpec {
1✔
1959
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
1960
                        header: CsvHeader::Auto,
1✔
1961
                    }),
1✔
1962
                    x: "x".to_owned(),
1✔
1963
                    y: None,
1✔
1964
                    int: vec![],
1✔
1965
                    float: vec![],
1✔
1966
                    text: vec![],
1✔
1967
                    bool: vec![],
1✔
1968
                    datetime: vec![],
1✔
1969
                    rename: None,
1✔
1970
                }),
1✔
1971
                force_ogr_time_filter: false,
1✔
1972
                force_ogr_spatial_filter: false,
1✔
1973
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1974
                sql_query: None,
1✔
1975
                attribute_query: None,
1✔
1976
                cache_ttl: CacheTtlSeconds::default(),
1✔
1977
            };
1✔
1978

1✔
1979
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1980
                OgrSourceDataset,
1✔
1981
                VectorResultDescriptor,
1✔
1982
                VectorQueryRectangle,
1✔
1983
            > {
1✔
1984
                loading_info: loading_info.clone(),
1✔
1985
                result_descriptor: VectorResultDescriptor {
1✔
1986
                    data_type: VectorDataType::MultiPoint,
1✔
1987
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1988
                    columns: [(
1✔
1989
                        "foo".to_owned(),
1✔
1990
                        VectorColumnInfo {
1✔
1991
                            data_type: FeatureDataType::Float,
1✔
1992
                            measurement: Measurement::Unitless.into(),
1✔
1993
                        },
1✔
1994
                    )]
1✔
1995
                    .into_iter()
1✔
1996
                    .collect(),
1✔
1997
                    time: None,
1✔
1998
                    bbox: None,
1✔
1999
                },
1✔
2000
                phantom: Default::default(),
1✔
2001
            });
1✔
2002

2003
            let session = app_ctx.default_session().await.unwrap();
18✔
2004

1✔
2005
            let db = app_ctx.session_context(session).db();
1✔
2006
            let wrap = db.wrap_meta_data(meta_data);
1✔
2007
            let dataset_id = db
1✔
2008
                .add_dataset(
1✔
2009
                    AddDataset {
1✔
2010
                        name: Some(dataset_name),
1✔
2011
                        display_name: "Ogr Test".to_owned(),
1✔
2012
                        description: "desc".to_owned(),
1✔
2013
                        source_operator: "OgrSource".to_owned(),
1✔
2014
                        symbology: None,
1✔
2015
                        provenance: Some(vec![Provenance {
1✔
2016
                            citation: "citation".to_owned(),
1✔
2017
                            license: "license".to_owned(),
1✔
2018
                            uri: "uri".to_owned(),
1✔
2019
                        }]),
1✔
2020
                    },
1✔
2021
                    wrap,
1✔
2022
                )
1✔
2023
                .await
66✔
2024
                .unwrap()
1✔
2025
                .id;
2026

2027
            assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
2028

2029
            db.delete_dataset(dataset_id).await.unwrap();
3✔
2030

2031
            assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
2032
        })
1✔
2033
        .await;
8✔
2034
    }
2035

2036
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2037
    async fn test_missing_layer_dataset_in_collection_listing() {
1✔
2038
        with_temp_context(|app_ctx, _| async move {
1✔
2039
            let session = app_ctx.default_session().await.unwrap();
18✔
2040
            let db = app_ctx.session_context(session).db();
1✔
2041

2042
            let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
2043

2044
            let top_collection_id = db
1✔
2045
                .add_layer_collection(
1✔
2046
                    AddLayerCollection {
1✔
2047
                        name: "top collection".to_string(),
1✔
2048
                        description: "description".to_string(),
1✔
2049
                        properties: Default::default(),
1✔
2050
                    },
1✔
2051
                    root_collection_id,
1✔
2052
                )
1✔
2053
                .await
10✔
2054
                .unwrap();
1✔
2055

1✔
2056
            let faux_layer = LayerId("faux".to_string());
1✔
2057

1✔
2058
            // this should fail
1✔
2059
            db.add_layer_to_collection(&faux_layer, &top_collection_id)
1✔
2060
                .await
×
2061
                .unwrap_err();
1✔
2062

2063
            let root_collection_layers = db
1✔
2064
                .load_layer_collection(
1✔
2065
                    &top_collection_id,
1✔
2066
                    LayerCollectionListOptions {
1✔
2067
                        offset: 0,
1✔
2068
                        limit: 20,
1✔
2069
                    },
1✔
2070
                )
1✔
2071
                .await
5✔
2072
                .unwrap();
1✔
2073

1✔
2074
            assert_eq!(
1✔
2075
                root_collection_layers,
1✔
2076
                LayerCollection {
1✔
2077
                    id: ProviderLayerCollectionId {
1✔
2078
                        provider_id: DataProviderId(
1✔
2079
                            "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74".try_into().unwrap()
1✔
2080
                        ),
1✔
2081
                        collection_id: top_collection_id.clone(),
1✔
2082
                    },
1✔
2083
                    name: "top collection".to_string(),
1✔
2084
                    description: "description".to_string(),
1✔
2085
                    items: vec![],
1✔
2086
                    entry_label: None,
1✔
2087
                    properties: vec![],
1✔
2088
                }
1✔
2089
            );
1✔
2090
        })
1✔
2091
        .await;
10✔
2092
    }
2093

2094
    #[allow(clippy::too_many_lines)]
2095
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2096
    async fn it_updates_project_layer_symbology() {
1✔
2097
        with_temp_context(|app_ctx, _| async move {
1✔
2098
            let session = app_ctx.default_session().await.unwrap();
18✔
2099

2100
            let (_, workflow_id) = register_ndvi_workflow_helper(&app_ctx).await;
77✔
2101

2102
            let db = app_ctx.session_context(session.clone()).db();
1✔
2103

1✔
2104
            let create_project: CreateProject = serde_json::from_value(json!({
1✔
2105
                "name": "Default",
1✔
2106
                "description": "Default project",
1✔
2107
                "bounds": {
1✔
2108
                    "boundingBox": {
1✔
2109
                        "lowerLeftCoordinate": {
1✔
2110
                            "x": -180,
1✔
2111
                            "y": -90
1✔
2112
                        },
1✔
2113
                        "upperRightCoordinate": {
1✔
2114
                            "x": 180,
1✔
2115
                            "y": 90
1✔
2116
                        }
1✔
2117
                    },
1✔
2118
                    "spatialReference": "EPSG:4326",
1✔
2119
                    "timeInterval": {
1✔
2120
                        "start": 1_396_353_600_000i64,
1✔
2121
                        "end": 1_396_353_600_000i64
1✔
2122
                    }
1✔
2123
                },
1✔
2124
                "timeStep": {
1✔
2125
                    "step": 1,
1✔
2126
                    "granularity": "months"
1✔
2127
                }
1✔
2128
            }))
1✔
2129
            .unwrap();
1✔
2130

2131
            let project_id = db.create_project(create_project).await.unwrap();
11✔
2132

1✔
2133
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2134
                "id": project_id.to_string(),
1✔
2135
                "layers": [{
1✔
2136
                    "name": "NDVI",
1✔
2137
                    "workflow": workflow_id.to_string(),
1✔
2138
                    "visibility": {
1✔
2139
                        "data": true,
1✔
2140
                        "legend": false
1✔
2141
                    },
1✔
2142
                    "symbology": {
1✔
2143
                        "type": "raster",
1✔
2144
                        "opacity": 1,
1✔
2145
                        "colorizer": {
1✔
2146
                            "type": "linearGradient",
1✔
2147
                            "breakpoints": [{
1✔
2148
                                "value": 1,
1✔
2149
                                "color": [0, 0, 0, 255]
1✔
2150
                            }, {
1✔
2151
                                "value": 255,
1✔
2152
                                "color": [255, 255, 255, 255]
1✔
2153
                            }],
1✔
2154
                            "noDataColor": [0, 0, 0, 0],
1✔
2155
                            "overColor": [255, 255, 255, 127],
1✔
2156
                            "underColor": [255, 255, 255, 127]
1✔
2157
                        }
1✔
2158
                    }
1✔
2159
                }]
1✔
2160
            }))
1✔
2161
            .unwrap();
1✔
2162

1✔
2163
            db.update_project(update).await.unwrap();
65✔
2164

1✔
2165
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2166
                "id": project_id.to_string(),
1✔
2167
                "layers": [{
1✔
2168
                    "name": "NDVI",
1✔
2169
                    "workflow": workflow_id.to_string(),
1✔
2170
                    "visibility": {
1✔
2171
                        "data": true,
1✔
2172
                        "legend": false
1✔
2173
                    },
1✔
2174
                    "symbology": {
1✔
2175
                        "type": "raster",
1✔
2176
                        "opacity": 1,
1✔
2177
                        "colorizer": {
1✔
2178
                            "type": "linearGradient",
1✔
2179
                            "breakpoints": [{
1✔
2180
                                "value": 1,
1✔
2181
                                "color": [0, 0, 4, 255]
1✔
2182
                            }, {
1✔
2183
                                "value": 17.866_666_666_666_667,
1✔
2184
                                "color": [11, 9, 36, 255]
1✔
2185
                            }, {
1✔
2186
                                "value": 34.733_333_333_333_334,
1✔
2187
                                "color": [32, 17, 75, 255]
1✔
2188
                            }, {
1✔
2189
                                "value": 51.6,
1✔
2190
                                "color": [59, 15, 112, 255]
1✔
2191
                            }, {
1✔
2192
                                "value": 68.466_666_666_666_67,
1✔
2193
                                "color": [87, 21, 126, 255]
1✔
2194
                            }, {
1✔
2195
                                "value": 85.333_333_333_333_33,
1✔
2196
                                "color": [114, 31, 129, 255]
1✔
2197
                            }, {
1✔
2198
                                "value": 102.199_999_999_999_99,
1✔
2199
                                "color": [140, 41, 129, 255]
1✔
2200
                            }, {
1✔
2201
                                "value": 119.066_666_666_666_65,
1✔
2202
                                "color": [168, 50, 125, 255]
1✔
2203
                            }, {
1✔
2204
                                "value": 135.933_333_333_333_34,
1✔
2205
                                "color": [196, 60, 117, 255]
1✔
2206
                            }, {
1✔
2207
                                "value": 152.799_999_999_999_98,
1✔
2208
                                "color": [222, 73, 104, 255]
1✔
2209
                            }, {
1✔
2210
                                "value": 169.666_666_666_666_66,
1✔
2211
                                "color": [241, 96, 93, 255]
1✔
2212
                            }, {
1✔
2213
                                "value": 186.533_333_333_333_33,
1✔
2214
                                "color": [250, 127, 94, 255]
1✔
2215
                            }, {
1✔
2216
                                "value": 203.399_999_999_999_98,
1✔
2217
                                "color": [254, 159, 109, 255]
1✔
2218
                            }, {
1✔
2219
                                "value": 220.266_666_666_666_65,
1✔
2220
                                "color": [254, 191, 132, 255]
1✔
2221
                            }, {
1✔
2222
                                "value": 237.133_333_333_333_3,
1✔
2223
                                "color": [253, 222, 160, 255]
1✔
2224
                            }, {
1✔
2225
                                "value": 254,
1✔
2226
                                "color": [252, 253, 191, 255]
1✔
2227
                            }],
1✔
2228
                            "noDataColor": [0, 0, 0, 0],
1✔
2229
                            "overColor": [255, 255, 255, 127],
1✔
2230
                            "underColor": [255, 255, 255, 127]
1✔
2231
                        }
1✔
2232
                    }
1✔
2233
                }]
1✔
2234
            }))
1✔
2235
            .unwrap();
1✔
2236

1✔
2237
            db.update_project(update).await.unwrap();
14✔
2238

1✔
2239
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2240
                "id": project_id.to_string(),
1✔
2241
                "layers": [{
1✔
2242
                    "name": "NDVI",
1✔
2243
                    "workflow": workflow_id.to_string(),
1✔
2244
                    "visibility": {
1✔
2245
                        "data": true,
1✔
2246
                        "legend": false
1✔
2247
                    },
1✔
2248
                    "symbology": {
1✔
2249
                        "type": "raster",
1✔
2250
                        "opacity": 1,
1✔
2251
                        "colorizer": {
1✔
2252
                            "type": "linearGradient",
1✔
2253
                            "breakpoints": [{
1✔
2254
                                "value": 1,
1✔
2255
                                "color": [0, 0, 4, 255]
1✔
2256
                            }, {
1✔
2257
                                "value": 17.866_666_666_666_667,
1✔
2258
                                "color": [11, 9, 36, 255]
1✔
2259
                            }, {
1✔
2260
                                "value": 34.733_333_333_333_334,
1✔
2261
                                "color": [32, 17, 75, 255]
1✔
2262
                            }, {
1✔
2263
                                "value": 51.6,
1✔
2264
                                "color": [59, 15, 112, 255]
1✔
2265
                            }, {
1✔
2266
                                "value": 68.466_666_666_666_67,
1✔
2267
                                "color": [87, 21, 126, 255]
1✔
2268
                            }, {
1✔
2269
                                "value": 85.333_333_333_333_33,
1✔
2270
                                "color": [114, 31, 129, 255]
1✔
2271
                            }, {
1✔
2272
                                "value": 102.199_999_999_999_99,
1✔
2273
                                "color": [140, 41, 129, 255]
1✔
2274
                            }, {
1✔
2275
                                "value": 119.066_666_666_666_65,
1✔
2276
                                "color": [168, 50, 125, 255]
1✔
2277
                            }, {
1✔
2278
                                "value": 135.933_333_333_333_34,
1✔
2279
                                "color": [196, 60, 117, 255]
1✔
2280
                            }, {
1✔
2281
                                "value": 152.799_999_999_999_98,
1✔
2282
                                "color": [222, 73, 104, 255]
1✔
2283
                            }, {
1✔
2284
                                "value": 169.666_666_666_666_66,
1✔
2285
                                "color": [241, 96, 93, 255]
1✔
2286
                            }, {
1✔
2287
                                "value": 186.533_333_333_333_33,
1✔
2288
                                "color": [250, 127, 94, 255]
1✔
2289
                            }, {
1✔
2290
                                "value": 203.399_999_999_999_98,
1✔
2291
                                "color": [254, 159, 109, 255]
1✔
2292
                            }, {
1✔
2293
                                "value": 220.266_666_666_666_65,
1✔
2294
                                "color": [254, 191, 132, 255]
1✔
2295
                            }, {
1✔
2296
                                "value": 237.133_333_333_333_3,
1✔
2297
                                "color": [253, 222, 160, 255]
1✔
2298
                            }, {
1✔
2299
                                "value": 254,
1✔
2300
                                "color": [252, 253, 191, 255]
1✔
2301
                            }],
1✔
2302
                            "noDataColor": [0, 0, 0, 0],
1✔
2303
                            "overColor": [255, 255, 255, 127],
1✔
2304
                            "underColor": [255, 255, 255, 127]
1✔
2305
                        }
1✔
2306
                    }
1✔
2307
                }]
1✔
2308
            }))
1✔
2309
            .unwrap();
1✔
2310

1✔
2311
            db.update_project(update).await.unwrap();
14✔
2312

1✔
2313
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2314
                "id": project_id.to_string(),
1✔
2315
                "layers": [{
1✔
2316
                    "name": "NDVI",
1✔
2317
                    "workflow": workflow_id.to_string(),
1✔
2318
                    "visibility": {
1✔
2319
                        "data": true,
1✔
2320
                        "legend": false
1✔
2321
                    },
1✔
2322
                    "symbology": {
1✔
2323
                        "type": "raster",
1✔
2324
                        "opacity": 1,
1✔
2325
                        "colorizer": {
1✔
2326
                            "type": "linearGradient",
1✔
2327
                            "breakpoints": [{
1✔
2328
                                "value": 1,
1✔
2329
                                "color": [0, 0, 4, 255]
1✔
2330
                            }, {
1✔
2331
                                "value": 17.933_333_333_333_334,
1✔
2332
                                "color": [11, 9, 36, 255]
1✔
2333
                            }, {
1✔
2334
                                "value": 34.866_666_666_666_67,
1✔
2335
                                "color": [32, 17, 75, 255]
1✔
2336
                            }, {
1✔
2337
                                "value": 51.800_000_000_000_004,
1✔
2338
                                "color": [59, 15, 112, 255]
1✔
2339
                            }, {
1✔
2340
                                "value": 68.733_333_333_333_33,
1✔
2341
                                "color": [87, 21, 126, 255]
1✔
2342
                            }, {
1✔
2343
                                "value": 85.666_666_666_666_66,
1✔
2344
                                "color": [114, 31, 129, 255]
1✔
2345
                            }, {
1✔
2346
                                "value": 102.6,
1✔
2347
                                "color": [140, 41, 129, 255]
1✔
2348
                            }, {
1✔
2349
                                "value": 119.533_333_333_333_32,
1✔
2350
                                "color": [168, 50, 125, 255]
1✔
2351
                            }, {
1✔
2352
                                "value": 136.466_666_666_666_67,
1✔
2353
                                "color": [196, 60, 117, 255]
1✔
2354
                            }, {
1✔
2355
                                "value": 153.4,
1✔
2356
                                "color": [222, 73, 104, 255]
1✔
2357
                            }, {
1✔
2358
                                "value": 170.333_333_333_333_31,
1✔
2359
                                "color": [241, 96, 93, 255]
1✔
2360
                            }, {
1✔
2361
                                "value": 187.266_666_666_666_65,
1✔
2362
                                "color": [250, 127, 94, 255]
1✔
2363
                            }, {
1✔
2364
                                "value": 204.2,
1✔
2365
                                "color": [254, 159, 109, 255]
1✔
2366
                            }, {
1✔
2367
                                "value": 221.133_333_333_333_33,
1✔
2368
                                "color": [254, 191, 132, 255]
1✔
2369
                            }, {
1✔
2370
                                "value": 238.066_666_666_666_63,
1✔
2371
                                "color": [253, 222, 160, 255]
1✔
2372
                            }, {
1✔
2373
                                "value": 255,
1✔
2374
                                "color": [252, 253, 191, 255]
1✔
2375
                            }],
1✔
2376
                            "noDataColor": [0, 0, 0, 0],
1✔
2377
                            "overColor": [255, 255, 255, 127],
1✔
2378
                            "underColor": [255, 255, 255, 127]
1✔
2379
                        }
1✔
2380
                    }
1✔
2381
                }]
1✔
2382
            }))
1✔
2383
            .unwrap();
1✔
2384

1✔
2385
            let update = update;
1✔
2386

2387
            // run two updates concurrently
2388
            let (r0, r1) = join!(db.update_project(update.clone()), db.update_project(update));
1✔
2389

2390
            assert!(r0.is_ok());
1✔
2391
            assert!(r1.is_ok());
1✔
2392
        })
1✔
2393
        .await;
11✔
2394
    }
2395

2396
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2397
    #[allow(clippy::too_many_lines)]
2398
    async fn it_resolves_dataset_names_to_ids() {
1✔
2399
        with_temp_context(|app_ctx, _| async move {
1✔
2400
            let session = app_ctx.default_session().await.unwrap();
18✔
2401
            let db = app_ctx.session_context(session.clone()).db();
1✔
2402

1✔
2403
            let loading_info = OgrSourceDataset {
1✔
2404
                file_name: PathBuf::from("test.csv"),
1✔
2405
                layer_name: "test.csv".to_owned(),
1✔
2406
                data_type: Some(VectorDataType::MultiPoint),
1✔
2407
                time: OgrSourceDatasetTimeType::Start {
1✔
2408
                    start_field: "start".to_owned(),
1✔
2409
                    start_format: OgrSourceTimeFormat::Auto,
1✔
2410
                    duration: OgrSourceDurationSpec::Zero,
1✔
2411
                },
1✔
2412
                default_geometry: None,
1✔
2413
                columns: Some(OgrSourceColumnSpec {
1✔
2414
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
2415
                        header: CsvHeader::Auto,
1✔
2416
                    }),
1✔
2417
                    x: "x".to_owned(),
1✔
2418
                    y: None,
1✔
2419
                    int: vec![],
1✔
2420
                    float: vec![],
1✔
2421
                    text: vec![],
1✔
2422
                    bool: vec![],
1✔
2423
                    datetime: vec![],
1✔
2424
                    rename: None,
1✔
2425
                }),
1✔
2426
                force_ogr_time_filter: false,
1✔
2427
                force_ogr_spatial_filter: false,
1✔
2428
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2429
                sql_query: None,
1✔
2430
                attribute_query: None,
1✔
2431
                cache_ttl: CacheTtlSeconds::default(),
1✔
2432
            };
1✔
2433

1✔
2434
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
2435
                OgrSourceDataset,
1✔
2436
                VectorResultDescriptor,
1✔
2437
                VectorQueryRectangle,
1✔
2438
            > {
1✔
2439
                loading_info: loading_info.clone(),
1✔
2440
                result_descriptor: VectorResultDescriptor {
1✔
2441
                    data_type: VectorDataType::MultiPoint,
1✔
2442
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2443
                    columns: [(
1✔
2444
                        "foo".to_owned(),
1✔
2445
                        VectorColumnInfo {
1✔
2446
                            data_type: FeatureDataType::Float,
1✔
2447
                            measurement: Measurement::Unitless.into(),
1✔
2448
                        },
1✔
2449
                    )]
1✔
2450
                    .into_iter()
1✔
2451
                    .collect(),
1✔
2452
                    time: None,
1✔
2453
                    bbox: None,
1✔
2454
                },
1✔
2455
                phantom: Default::default(),
1✔
2456
            });
1✔
2457

2458
            let DatasetIdAndName {
2459
                id: dataset_id1,
1✔
2460
                name: dataset_name1,
1✔
2461
            } = db
1✔
2462
                .add_dataset(
1✔
2463
                    AddDataset {
1✔
2464
                        name: Some(DatasetName::new(None, "my_dataset".to_owned())),
1✔
2465
                        display_name: "Ogr Test".to_owned(),
1✔
2466
                        description: "desc".to_owned(),
1✔
2467
                        source_operator: "OgrSource".to_owned(),
1✔
2468
                        symbology: None,
1✔
2469
                        provenance: Some(vec![Provenance {
1✔
2470
                            citation: "citation".to_owned(),
1✔
2471
                            license: "license".to_owned(),
1✔
2472
                            uri: "uri".to_owned(),
1✔
2473
                        }]),
1✔
2474
                    },
1✔
2475
                    db.wrap_meta_data(meta_data.clone()),
1✔
2476
                )
1✔
2477
                .await
66✔
2478
                .unwrap();
1✔
2479

2480
            assert_eq!(
1✔
2481
                db.resolve_dataset_name_to_id(&dataset_name1).await.unwrap(),
3✔
2482
                dataset_id1
2483
            );
2484
        })
1✔
2485
        .await;
12✔
2486
    }
2487

2488
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2489
    #[allow(clippy::too_many_lines)]
2490
    async fn test_postgres_type_serialization() {
1✔
2491
        pub async fn test_type<T>(
25✔
2492
            conn: &PooledConnection<'_, PostgresConnectionManager<tokio_postgres::NoTls>>,
25✔
2493
            sql_type: &str,
25✔
2494
            checks: impl IntoIterator<Item = T>,
25✔
2495
        ) where
25✔
2496
            T: PartialEq + postgres_types::FromSqlOwned + postgres_types::ToSql + Sync,
25✔
2497
        {
25✔
2498
            // don't quote built-in types
1✔
2499
            let quote = if sql_type == "double precision" {
25✔
2500
                ""
1✔
2501
            } else {
1✔
2502
                "\""
24✔
2503
            };
1✔
2504

1✔
2505
            for value in checks {
84✔
2506
                let stmt = conn
59✔
2507
                    .prepare(&format!("SELECT $1::{quote}{sql_type}{quote}"))
59✔
2508
                    .await
130✔
2509
                    .unwrap();
59✔
2510
                let result: T = conn.query_one(&stmt, &[&value]).await.unwrap().get(0);
59✔
2511

59✔
2512
                assert_eq!(value, result);
59✔
2513
            }
1✔
2514
        }
25✔
2515

1✔
2516
        with_temp_context(|app_ctx, _| async move {
1✔
2517
            let pool = app_ctx.pool.get().await.unwrap();
1✔
2518

1✔
2519
            test_type(&pool, "RgbaColor", [RgbaColor([0, 1, 2, 3])]).await;
4✔
2520

2521
            test_type(
1✔
2522
                &pool,
1✔
2523
                "double precision",
1✔
2524
                [NotNanF64::from(NotNan::<f64>::new(1.0).unwrap())],
1✔
2525
            )
1✔
2526
            .await;
2✔
2527

2528
            test_type(
1✔
2529
                &pool,
1✔
2530
                "Breakpoint",
1✔
2531
                [Breakpoint {
1✔
2532
                    value: NotNan::<f64>::new(1.0).unwrap().into(),
1✔
2533
                    color: RgbaColor([0, 0, 0, 0]),
1✔
2534
                }],
1✔
2535
            )
1✔
2536
            .await;
5✔
2537

2538
            test_type(
1✔
2539
                &pool,
1✔
2540
                "DefaultColors",
1✔
2541
                [
1✔
2542
                    DefaultColors::DefaultColor {
1✔
2543
                        default_color: RgbaColor([0, 10, 20, 30]),
1✔
2544
                    },
1✔
2545
                    DefaultColors::OverUnder(OverUnderColors {
1✔
2546
                        over_color: RgbaColor([1, 2, 3, 4]),
1✔
2547
                        under_color: RgbaColor([5, 6, 7, 8]),
1✔
2548
                    }),
1✔
2549
                ],
1✔
2550
            )
1✔
2551
            .await;
6✔
2552

2553
            test_type(
1✔
2554
                &pool,
1✔
2555
                "ColorizerType",
1✔
2556
                [
1✔
2557
                    ColorizerTypeDbType::LinearGradient,
1✔
2558
                    ColorizerTypeDbType::LogarithmicGradient,
1✔
2559
                    ColorizerTypeDbType::Palette,
1✔
2560
                    ColorizerTypeDbType::Rgba,
1✔
2561
                ],
1✔
2562
            )
1✔
2563
            .await;
11✔
2564

2565
            test_type(
1✔
2566
                &pool,
1✔
2567
                "Colorizer",
1✔
2568
                [
1✔
2569
                    Colorizer::LinearGradient(LinearGradient {
1✔
2570
                        breakpoints: vec![
1✔
2571
                            Breakpoint {
1✔
2572
                                value: NotNan::<f64>::new(-10.0).unwrap().into(),
1✔
2573
                                color: RgbaColor([0, 0, 0, 0]),
1✔
2574
                            },
1✔
2575
                            Breakpoint {
1✔
2576
                                value: NotNan::<f64>::new(2.0).unwrap().into(),
1✔
2577
                                color: RgbaColor([255, 0, 0, 255]),
1✔
2578
                            },
1✔
2579
                        ],
1✔
2580
                        no_data_color: RgbaColor([0, 10, 20, 30]),
1✔
2581
                        color_fields: DefaultColors::OverUnder(OverUnderColors {
1✔
2582
                            over_color: RgbaColor([1, 2, 3, 4]),
1✔
2583
                            under_color: RgbaColor([5, 6, 7, 8]),
1✔
2584
                        }),
1✔
2585
                    }),
1✔
2586
                    Colorizer::LogarithmicGradient(LogarithmicGradient {
1✔
2587
                        breakpoints: vec![
1✔
2588
                            Breakpoint {
1✔
2589
                                value: NotNan::<f64>::new(1.0).unwrap().into(),
1✔
2590
                                color: RgbaColor([0, 0, 0, 0]),
1✔
2591
                            },
1✔
2592
                            Breakpoint {
1✔
2593
                                value: NotNan::<f64>::new(2.0).unwrap().into(),
1✔
2594
                                color: RgbaColor([255, 0, 0, 255]),
1✔
2595
                            },
1✔
2596
                        ],
1✔
2597
                        no_data_color: RgbaColor([0, 10, 20, 30]),
1✔
2598
                        color_fields: DefaultColors::OverUnder(OverUnderColors {
1✔
2599
                            over_color: RgbaColor([1, 2, 3, 4]),
1✔
2600
                            under_color: RgbaColor([5, 6, 7, 8]),
1✔
2601
                        }),
1✔
2602
                    }),
1✔
2603
                    Colorizer::Palette {
1✔
2604
                        colors: Palette(
1✔
2605
                            [
1✔
2606
                                (NotNan::<f64>::new(1.0).unwrap(), RgbaColor([0, 0, 0, 0])),
1✔
2607
                                (
1✔
2608
                                    NotNan::<f64>::new(2.0).unwrap(),
1✔
2609
                                    RgbaColor([255, 0, 0, 255]),
1✔
2610
                                ),
1✔
2611
                                (NotNan::<f64>::new(3.0).unwrap(), RgbaColor([0, 10, 20, 30])),
1✔
2612
                            ]
1✔
2613
                            .into(),
1✔
2614
                        ),
1✔
2615
                        no_data_color: RgbaColor([1, 2, 3, 4]),
1✔
2616
                        default_color: RgbaColor([5, 6, 7, 8]),
1✔
2617
                    },
1✔
2618
                    Colorizer::Rgba,
1✔
2619
                ],
1✔
2620
            )
1✔
2621
            .await;
11✔
2622

2623
            test_type(
1✔
2624
                &pool,
1✔
2625
                "ColorParam",
1✔
2626
                [
1✔
2627
                    ColorParam::Static {
1✔
2628
                        color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2629
                    },
1✔
2630
                    ColorParam::Derived(DerivedColor {
1✔
2631
                        attribute: "foobar".to_string(),
1✔
2632
                        colorizer: Colorizer::Rgba,
1✔
2633
                    }),
1✔
2634
                ],
1✔
2635
            )
1✔
2636
            .await;
6✔
2637

2638
            test_type(
1✔
2639
                &pool,
1✔
2640
                "NumberParam",
1✔
2641
                [
1✔
2642
                    NumberParam::Static { value: 42 },
1✔
2643
                    NumberParam::Derived(DerivedNumber {
1✔
2644
                        attribute: "foobar".to_string(),
1✔
2645
                        factor: 1.0,
1✔
2646
                        default_value: 42.,
1✔
2647
                    }),
1✔
2648
                ],
1✔
2649
            )
1✔
2650
            .await;
6✔
2651

2652
            test_type(
1✔
2653
                &pool,
1✔
2654
                "StrokeParam",
1✔
2655
                [StrokeParam {
1✔
2656
                    width: NumberParam::Static { value: 42 },
1✔
2657
                    color: ColorParam::Static {
1✔
2658
                        color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2659
                    },
1✔
2660
                }],
1✔
2661
            )
1✔
2662
            .await;
4✔
2663

2664
            test_type(
1✔
2665
                &pool,
1✔
2666
                "TextSymbology",
1✔
2667
                [TextSymbology {
1✔
2668
                    attribute: "attribute".to_string(),
1✔
2669
                    fill_color: ColorParam::Static {
1✔
2670
                        color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2671
                    },
1✔
2672
                    stroke: StrokeParam {
1✔
2673
                        width: NumberParam::Static { value: 42 },
1✔
2674
                        color: ColorParam::Static {
1✔
2675
                            color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2676
                        },
1✔
2677
                    },
1✔
2678
                }],
1✔
2679
            )
1✔
2680
            .await;
4✔
2681

2682
            test_type(
1✔
2683
                &pool,
1✔
2684
                "Symbology",
1✔
2685
                [
1✔
2686
                    Symbology::Point(PointSymbology {
1✔
2687
                        fill_color: ColorParam::Static {
1✔
2688
                            color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2689
                        },
1✔
2690
                        stroke: StrokeParam {
1✔
2691
                            width: NumberParam::Static { value: 42 },
1✔
2692
                            color: ColorParam::Static {
1✔
2693
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2694
                            },
1✔
2695
                        },
1✔
2696
                        radius: NumberParam::Static { value: 42 },
1✔
2697
                        text: Some(TextSymbology {
1✔
2698
                            attribute: "attribute".to_string(),
1✔
2699
                            fill_color: ColorParam::Static {
1✔
2700
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2701
                            },
1✔
2702
                            stroke: StrokeParam {
1✔
2703
                                width: NumberParam::Static { value: 42 },
1✔
2704
                                color: ColorParam::Static {
1✔
2705
                                    color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2706
                                },
1✔
2707
                            },
1✔
2708
                        }),
1✔
2709
                    }),
1✔
2710
                    Symbology::Line(LineSymbology {
1✔
2711
                        stroke: StrokeParam {
1✔
2712
                            width: NumberParam::Static { value: 42 },
1✔
2713
                            color: ColorParam::Static {
1✔
2714
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2715
                            },
1✔
2716
                        },
1✔
2717
                        text: Some(TextSymbology {
1✔
2718
                            attribute: "attribute".to_string(),
1✔
2719
                            fill_color: ColorParam::Static {
1✔
2720
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2721
                            },
1✔
2722
                            stroke: StrokeParam {
1✔
2723
                                width: NumberParam::Static { value: 42 },
1✔
2724
                                color: ColorParam::Static {
1✔
2725
                                    color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2726
                                },
1✔
2727
                            },
1✔
2728
                        }),
1✔
2729
                        auto_simplified: true,
1✔
2730
                    }),
1✔
2731
                    Symbology::Polygon(PolygonSymbology {
1✔
2732
                        fill_color: ColorParam::Static {
1✔
2733
                            color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2734
                        },
1✔
2735
                        stroke: StrokeParam {
1✔
2736
                            width: NumberParam::Static { value: 42 },
1✔
2737
                            color: ColorParam::Static {
1✔
2738
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2739
                            },
1✔
2740
                        },
1✔
2741
                        text: Some(TextSymbology {
1✔
2742
                            attribute: "attribute".to_string(),
1✔
2743
                            fill_color: ColorParam::Static {
1✔
2744
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2745
                            },
1✔
2746
                            stroke: StrokeParam {
1✔
2747
                                width: NumberParam::Static { value: 42 },
1✔
2748
                                color: ColorParam::Static {
1✔
2749
                                    color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2750
                                },
1✔
2751
                            },
1✔
2752
                        }),
1✔
2753
                        auto_simplified: true,
1✔
2754
                    }),
1✔
2755
                    Symbology::Raster(RasterSymbology {
1✔
2756
                        opacity: 1.0,
1✔
2757
                        colorizer: Colorizer::LinearGradient(LinearGradient {
1✔
2758
                            breakpoints: vec![
1✔
2759
                                Breakpoint {
1✔
2760
                                    value: NotNan::<f64>::new(-10.0).unwrap().into(),
1✔
2761
                                    color: RgbaColor([0, 0, 0, 0]),
1✔
2762
                                },
1✔
2763
                                Breakpoint {
1✔
2764
                                    value: NotNan::<f64>::new(2.0).unwrap().into(),
1✔
2765
                                    color: RgbaColor([255, 0, 0, 255]),
1✔
2766
                                },
1✔
2767
                            ],
1✔
2768
                            no_data_color: RgbaColor([0, 10, 20, 30]),
1✔
2769
                            color_fields: DefaultColors::OverUnder(OverUnderColors {
1✔
2770
                                over_color: RgbaColor([1, 2, 3, 4]),
1✔
2771
                                under_color: RgbaColor([5, 6, 7, 8]),
1✔
2772
                            }),
1✔
2773
                        }),
1✔
2774
                    }),
1✔
2775
                ],
1✔
2776
            )
1✔
2777
            .await;
18✔
2778

2779
            test_type(
1✔
2780
                &pool,
1✔
2781
                "RasterDataType",
1✔
2782
                [
1✔
2783
                    crate::api::model::datatypes::RasterDataType::U8,
1✔
2784
                    crate::api::model::datatypes::RasterDataType::U16,
1✔
2785
                    crate::api::model::datatypes::RasterDataType::U32,
1✔
2786
                    crate::api::model::datatypes::RasterDataType::U64,
1✔
2787
                    crate::api::model::datatypes::RasterDataType::I8,
1✔
2788
                    crate::api::model::datatypes::RasterDataType::I16,
1✔
2789
                    crate::api::model::datatypes::RasterDataType::I32,
1✔
2790
                    crate::api::model::datatypes::RasterDataType::I64,
1✔
2791
                    crate::api::model::datatypes::RasterDataType::F32,
1✔
2792
                    crate::api::model::datatypes::RasterDataType::F64,
1✔
2793
                ],
1✔
2794
            )
1✔
2795
            .await;
22✔
2796

2797
            test_type(
1✔
2798
                &pool,
1✔
2799
                "Measurement",
1✔
2800
                [
1✔
2801
                    Measurement::Unitless,
1✔
2802
                    Measurement::Continuous(ContinuousMeasurement {
1✔
2803
                        measurement: "Temperature".to_string(),
1✔
2804
                        unit: Some("°C".to_string()),
1✔
2805
                    }),
1✔
2806
                    Measurement::Classification(ClassificationMeasurement {
1✔
2807
                        measurement: "Color".to_string(),
1✔
2808
                        classes: [(1, "Grayscale".to_string()), (2, "Colorful".to_string())].into(),
1✔
2809
                    }),
1✔
2810
                ],
1✔
2811
            )
1✔
2812
            .await;
15✔
2813

2814
            test_type(
1✔
2815
                &pool,
1✔
2816
                "Coordinate2D",
1✔
2817
                [crate::api::model::datatypes::Coordinate2D::from(
1✔
2818
                    Coordinate2D::new(0.0f64, 1.),
1✔
2819
                )],
1✔
2820
            )
1✔
2821
            .await;
4✔
2822

2823
            test_type(
1✔
2824
                &pool,
1✔
2825
                "SpatialPartition2D",
1✔
2826
                [crate::api::model::datatypes::SpatialPartition2D {
1✔
2827
                    upper_left_coordinate: Coordinate2D::new(0.0f64, 1.).into(),
1✔
2828
                    lower_right_coordinate: Coordinate2D::new(2., 0.5).into(),
1✔
2829
                }],
1✔
2830
            )
1✔
2831
            .await;
4✔
2832

2833
            test_type(
1✔
2834
                &pool,
1✔
2835
                "BoundingBox2D",
1✔
2836
                [crate::api::model::datatypes::BoundingBox2D {
1✔
2837
                    lower_left_coordinate: Coordinate2D::new(0.0f64, 0.5).into(),
1✔
2838
                    upper_right_coordinate: Coordinate2D::new(2., 1.0).into(),
1✔
2839
                }],
1✔
2840
            )
1✔
2841
            .await;
4✔
2842

2843
            test_type(
1✔
2844
                &pool,
1✔
2845
                "SpatialResolution",
1✔
2846
                [crate::api::model::datatypes::SpatialResolution { x: 1.2, y: 2.3 }],
1✔
2847
            )
1✔
2848
            .await;
4✔
2849

2850
            test_type(
1✔
2851
                &pool,
1✔
2852
                "VectorDataType",
1✔
2853
                [
1✔
2854
                    crate::api::model::datatypes::VectorDataType::Data,
1✔
2855
                    crate::api::model::datatypes::VectorDataType::MultiPoint,
1✔
2856
                    crate::api::model::datatypes::VectorDataType::MultiLineString,
1✔
2857
                    crate::api::model::datatypes::VectorDataType::MultiPolygon,
1✔
2858
                ],
1✔
2859
            )
1✔
2860
            .await;
10✔
2861

2862
            test_type(
1✔
2863
                &pool,
1✔
2864
                "FeatureDataType",
1✔
2865
                [
1✔
2866
                    crate::api::model::datatypes::FeatureDataType::Category,
1✔
2867
                    crate::api::model::datatypes::FeatureDataType::Int,
1✔
2868
                    crate::api::model::datatypes::FeatureDataType::Float,
1✔
2869
                    crate::api::model::datatypes::FeatureDataType::Text,
1✔
2870
                    crate::api::model::datatypes::FeatureDataType::Bool,
1✔
2871
                    crate::api::model::datatypes::FeatureDataType::DateTime,
1✔
2872
                ],
1✔
2873
            )
1✔
2874
            .await;
14✔
2875

2876
            test_type(
1✔
2877
                &pool,
1✔
2878
                "TimeInterval",
1✔
2879
                [crate::api::model::datatypes::TimeInterval::from(
1✔
2880
                    TimeInterval::default(),
1✔
2881
                )],
1✔
2882
            )
1✔
2883
            .await;
4✔
2884

2885
            test_type(
1✔
2886
                &pool,
1✔
2887
                "SpatialReference",
1✔
2888
                [
1✔
2889
                    crate::api::model::datatypes::SpatialReferenceOption::Unreferenced,
1✔
2890
                    crate::api::model::datatypes::SpatialReferenceOption::SpatialReference(
1✔
2891
                        SpatialReference::epsg_4326().into(),
1✔
2892
                    ),
1✔
2893
                ],
1✔
2894
            )
1✔
2895
            .await;
8✔
2896

2897
            test_type(
1✔
2898
                &pool,
1✔
2899
                "PlotResultDescriptor",
1✔
2900
                [PlotResultDescriptor {
1✔
2901
                    spatial_reference: SpatialReferenceOption::Unreferenced.into(),
1✔
2902
                    time: None,
1✔
2903
                    bbox: None,
1✔
2904
                }],
1✔
2905
            )
1✔
2906
            .await;
4✔
2907

2908
            test_type(
1✔
2909
                &pool,
1✔
2910
                "VectorResultDescriptor",
1✔
2911
                [crate::api::model::operators::VectorResultDescriptor {
1✔
2912
                    data_type: VectorDataType::MultiPoint.into(),
1✔
2913
                    spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2914
                        SpatialReference::epsg_4326(),
1✔
2915
                    )
1✔
2916
                    .into(),
1✔
2917
                    columns: [(
1✔
2918
                        "foo".to_string(),
1✔
2919
                        VectorColumnInfo {
1✔
2920
                            data_type: FeatureDataType::Int,
1✔
2921
                            measurement: Measurement::Unitless.into(),
1✔
2922
                        }
1✔
2923
                        .into(),
1✔
2924
                    )]
1✔
2925
                    .into(),
1✔
2926
                    time: Some(TimeInterval::default().into()),
1✔
2927
                    bbox: Some(
1✔
2928
                        BoundingBox2D::new(
1✔
2929
                            Coordinate2D::new(0.0f64, 0.5),
1✔
2930
                            Coordinate2D::new(2., 1.0),
1✔
2931
                        )
1✔
2932
                        .unwrap()
1✔
2933
                        .into(),
1✔
2934
                    ),
1✔
2935
                }],
1✔
2936
            )
1✔
2937
            .await;
7✔
2938

2939
            test_type(
1✔
2940
                &pool,
1✔
2941
                "RasterResultDescriptor",
1✔
2942
                [crate::api::model::operators::RasterResultDescriptor {
1✔
2943
                    data_type: RasterDataType::U8.into(),
1✔
2944
                    spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2945
                        SpatialReference::epsg_4326(),
1✔
2946
                    )
1✔
2947
                    .into(),
1✔
2948
                    measurement: Measurement::Unitless,
1✔
2949
                    time: Some(TimeInterval::default().into()),
1✔
2950
                    bbox: Some(SpatialPartition2D {
1✔
2951
                        upper_left_coordinate: Coordinate2D::new(0.0f64, 1.).into(),
1✔
2952
                        lower_right_coordinate: Coordinate2D::new(2., 0.5).into(),
1✔
2953
                    }),
1✔
2954
                    resolution: Some(SpatialResolution { x: 1.2, y: 2.3 }.into()),
1✔
2955
                }],
1✔
2956
            )
1✔
2957
            .await;
4✔
2958

2959
            test_type(
1✔
2960
                &pool,
1✔
2961
                "ResultDescriptor",
1✔
2962
                [
1✔
2963
                    crate::api::model::operators::TypedResultDescriptor::Vector(
1✔
2964
                        VectorResultDescriptor {
1✔
2965
                            data_type: VectorDataType::MultiPoint,
1✔
2966
                            spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2967
                                SpatialReference::epsg_4326(),
1✔
2968
                            ),
1✔
2969
                            columns: [(
1✔
2970
                                "foo".to_string(),
1✔
2971
                                VectorColumnInfo {
1✔
2972
                                    data_type: FeatureDataType::Int,
1✔
2973
                                    measurement: Measurement::Unitless.into(),
1✔
2974
                                },
1✔
2975
                            )]
1✔
2976
                            .into(),
1✔
2977
                            time: Some(TimeInterval::default()),
1✔
2978
                            bbox: Some(
1✔
2979
                                BoundingBox2D::new(
1✔
2980
                                    Coordinate2D::new(0.0f64, 0.5),
1✔
2981
                                    Coordinate2D::new(2., 1.0),
1✔
2982
                                )
1✔
2983
                                .unwrap(),
1✔
2984
                            ),
1✔
2985
                        }
1✔
2986
                        .into(),
1✔
2987
                    ),
1✔
2988
                    crate::api::model::operators::TypedResultDescriptor::Raster(
1✔
2989
                        crate::api::model::operators::RasterResultDescriptor {
1✔
2990
                            data_type: RasterDataType::U8.into(),
1✔
2991
                            spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2992
                                SpatialReference::epsg_4326(),
1✔
2993
                            )
1✔
2994
                            .into(),
1✔
2995
                            measurement: Measurement::Unitless,
1✔
2996
                            time: Some(TimeInterval::default().into()),
1✔
2997
                            bbox: Some(SpatialPartition2D {
1✔
2998
                                upper_left_coordinate: Coordinate2D::new(0.0f64, 1.).into(),
1✔
2999
                                lower_right_coordinate: Coordinate2D::new(2., 0.5).into(),
1✔
3000
                            }),
1✔
3001
                            resolution: Some(SpatialResolution { x: 1.2, y: 2.3 }.into()),
1✔
3002
                        },
1✔
3003
                    ),
1✔
3004
                    crate::api::model::operators::TypedResultDescriptor::Plot(
1✔
3005
                        PlotResultDescriptor {
1✔
3006
                            spatial_reference: SpatialReferenceOption::Unreferenced.into(),
1✔
3007
                            time: None,
1✔
3008
                            bbox: None,
1✔
3009
                        },
1✔
3010
                    ),
1✔
3011
                ],
1✔
3012
            )
1✔
3013
            .await;
8✔
3014
        })
1✔
3015
        .await;
11✔
3016
    }
3017
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc