• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 5868226649

15 Aug 2023 01:33PM UTC coverage: 89.482% (-0.01%) from 89.494%
5868226649

push

github

web-flow
Merge pull request #852 from geo-engine/fix-async-file-logger

fix async file logger and more complex filters

27 of 27 new or added lines in 1 file covered. (100.0%)

104055 of 116286 relevant lines covered (89.48%)

62262.38 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.18
/services/src/contexts/postgres.rs
1
use crate::api::model::datatypes::DatasetName;
2
use crate::contexts::{ApplicationContext, QueryContextImpl, SessionId, SimpleSession};
3
use crate::contexts::{GeoEngineDb, SessionContext};
4
use crate::datasets::add_from_directory::{
5
    add_datasets_from_directory, add_providers_from_directory,
6
};
7
use crate::datasets::upload::{Volume, Volumes};
8
use crate::error::{self, Error, Result};
9
use crate::layers::add_from_directory::{
10
    add_layer_collections_from_directory, add_layers_from_directory, UNSORTED_COLLECTION_ID,
11
};
12
use crate::layers::storage::INTERNAL_LAYER_DB_ROOT_COLLECTION_ID;
13

14
use crate::projects::{ProjectId, STRectangle};
15
use crate::tasks::{SimpleTaskManager, SimpleTaskManagerBackend, SimpleTaskManagerContext};
16
use crate::util::config::get_config_element;
17
use async_trait::async_trait;
18
use bb8_postgres::{
19
    bb8::Pool,
20
    bb8::PooledConnection,
21
    tokio_postgres::{error::SqlState, tls::MakeTlsConnect, tls::TlsConnect, Config, Socket},
22
    PostgresConnectionManager,
23
};
24
use geoengine_datatypes::raster::TilingSpecification;
25
use geoengine_operators::engine::ChunkByteSize;
26
use geoengine_operators::util::create_rayon_thread_pool;
27
use log::{debug, info};
28
use rayon::ThreadPool;
29
use std::path::PathBuf;
30
use std::sync::Arc;
31

32
use super::{ExecutionContextImpl, Session, SimpleApplicationContext};
33

34
// TODO: distinguish user-facing errors from system-facing error messages
35

36
/// A context with references to Postgres backends of the database.
37
#[derive(Clone)]
619✔
38
pub struct PostgresContext<Tls>
39
where
40
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
41
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
42
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
43
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
44
{
45
    default_session_id: SessionId,
46
    thread_pool: Arc<ThreadPool>,
47
    exe_ctx_tiling_spec: TilingSpecification,
48
    query_ctx_chunk_size: ChunkByteSize,
49
    task_manager: Arc<SimpleTaskManagerBackend>,
50
    pool: Pool<PostgresConnectionManager<Tls>>,
51
    volumes: Volumes,
52
}
53

54
enum DatabaseStatus {
55
    Unitialized,
56
    InitializedClearDatabase,
57
    InitializedKeepDatabase,
58
}
59

60
impl<Tls> PostgresContext<Tls>
61
where
62
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
63
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
64
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
65
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
66
{
67
    pub async fn new_with_context_spec(
228✔
68
        config: Config,
228✔
69
        tls: Tls,
228✔
70
        exe_ctx_tiling_spec: TilingSpecification,
228✔
71
        query_ctx_chunk_size: ChunkByteSize,
228✔
72
    ) -> Result<Self> {
228✔
73
        let pg_mgr = PostgresConnectionManager::new(config, tls);
228✔
74

75
        let pool = Pool::builder().build(pg_mgr).await?;
228✔
76
        let created_schema = Self::create_schema(pool.get().await?).await?;
2,736✔
77

78
        let session = if created_schema {
228✔
79
            let session = SimpleSession::default();
228✔
80
            Self::create_default_session(pool.get().await?, session.id()).await?;
456✔
81
            session
228✔
82
        } else {
83
            Self::load_default_session(pool.get().await?).await?
×
84
        };
85

86
        Ok(PostgresContext {
228✔
87
            default_session_id: session.id(),
228✔
88
            task_manager: Default::default(),
228✔
89
            thread_pool: create_rayon_thread_pool(0),
228✔
90
            exe_ctx_tiling_spec,
228✔
91
            query_ctx_chunk_size,
228✔
92
            pool,
228✔
93
            volumes: Default::default(),
228✔
94
        })
228✔
95
    }
228✔
96

97
    // TODO: check if the datasets exist already and don't output warnings when skipping them
98
    #[allow(clippy::too_many_arguments)]
99
    pub async fn new_with_data(
×
100
        config: Config,
×
101
        tls: Tls,
×
102
        dataset_defs_path: PathBuf,
×
103
        provider_defs_path: PathBuf,
×
104
        layer_defs_path: PathBuf,
×
105
        layer_collection_defs_path: PathBuf,
×
106
        exe_ctx_tiling_spec: TilingSpecification,
×
107
        query_ctx_chunk_size: ChunkByteSize,
×
108
    ) -> Result<Self> {
×
109
        let pg_mgr = PostgresConnectionManager::new(config, tls);
×
110

111
        let pool = Pool::builder().build(pg_mgr).await?;
×
112
        let created_schema = Self::create_schema(pool.get().await?).await?;
×
113

114
        let session = if created_schema {
×
115
            let session = SimpleSession::default();
×
116
            Self::create_default_session(pool.get().await?, session.id()).await?;
×
117
            session
×
118
        } else {
119
            Self::load_default_session(pool.get().await?).await?
×
120
        };
121

122
        let app_ctx = PostgresContext {
×
123
            default_session_id: session.id(),
×
124
            task_manager: Default::default(),
×
125
            thread_pool: create_rayon_thread_pool(0),
×
126
            exe_ctx_tiling_spec,
×
127
            query_ctx_chunk_size,
×
128
            pool,
×
129
            volumes: Default::default(),
×
130
        };
×
131

×
132
        if created_schema {
×
133
            info!("Populating database with initial data...");
×
134

135
            let ctx = app_ctx.session_context(session);
×
136

×
137
            let mut db = ctx.db();
×
138
            add_layers_from_directory(&mut db, layer_defs_path).await;
×
139
            add_layer_collections_from_directory(&mut db, layer_collection_defs_path).await;
×
140

141
            add_datasets_from_directory(&mut db, dataset_defs_path).await;
×
142

143
            add_providers_from_directory(&mut db, provider_defs_path, &[]).await;
×
144
        }
×
145

146
        Ok(app_ctx)
×
147
    }
×
148

149
    async fn check_schema_status(
319✔
150
        conn: &PooledConnection<'_, PostgresConnectionManager<Tls>>,
319✔
151
    ) -> Result<DatabaseStatus> {
319✔
152
        let stmt = match conn
319✔
153
            .prepare("SELECT clear_database_on_start from geoengine;")
319✔
154
            .await
319✔
155
        {
156
            Ok(stmt) => stmt,
×
157
            Err(e) => {
319✔
158
                if let Some(code) = e.code() {
319✔
159
                    if *code == SqlState::UNDEFINED_TABLE {
319✔
160
                        info!("Initializing schema.");
×
161
                        return Ok(DatabaseStatus::Unitialized);
319✔
162
                    }
×
163
                }
×
164
                return Err(error::Error::TokioPostgres { source: e });
×
165
            }
166
        };
167

168
        let row = conn.query_one(&stmt, &[]).await?;
×
169

170
        if row.get(0) {
×
171
            Ok(DatabaseStatus::InitializedClearDatabase)
×
172
        } else {
173
            Ok(DatabaseStatus::InitializedKeepDatabase)
×
174
        }
175
    }
319✔
176

177
    #[allow(clippy::too_many_lines)]
178
    /// Creates the database schema. Returns true if the schema was created, false if it already existed.
179
    pub(crate) async fn create_schema(
319✔
180
        mut conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
319✔
181
    ) -> Result<bool> {
319✔
182
        let postgres_config = get_config_element::<crate::util::config::Postgres>()?;
319✔
183

184
        let database_status = Self::check_schema_status(&conn).await?;
319✔
185

186
        match database_status {
×
187
            DatabaseStatus::InitializedClearDatabase if postgres_config.clear_database_on_start => {
×
188
                let schema_name = postgres_config.schema;
×
189
                info!("Clearing schema {}.", schema_name);
×
190
                conn.batch_execute(&format!(
×
191
                    "DROP SCHEMA {schema_name} CASCADE; CREATE SCHEMA {schema_name};"
×
192
                ))
×
193
                .await?;
×
194
            }
195
            DatabaseStatus::InitializedKeepDatabase if postgres_config.clear_database_on_start => {
×
196
                return Err(Error::ClearDatabaseOnStartupNotAllowed)
×
197
            }
198
            DatabaseStatus::InitializedClearDatabase | DatabaseStatus::InitializedKeepDatabase => {
199
                return Ok(false)
×
200
            }
201
            DatabaseStatus::Unitialized => (),
319✔
202
        };
203

204
        let tx = conn.build_transaction().start().await?;
319✔
205

206
        tx.batch_execute(include_str!("schema.sql")).await?;
319✔
207

208
        let stmt = tx
319✔
209
            .prepare(
319✔
210
                "
319✔
211
            INSERT INTO geoengine (clear_database_on_start) VALUES ($1);",
319✔
212
            )
319✔
213
            .await?;
319✔
214

215
        tx.execute(&stmt, &[&postgres_config.clear_database_on_start])
319✔
216
            .await?;
319✔
217

218
        let stmt = tx
319✔
219
            .prepare(
319✔
220
                r#"
319✔
221
            INSERT INTO layer_collections (
319✔
222
                id,
319✔
223
                name,
319✔
224
                description,
319✔
225
                properties
319✔
226
            ) VALUES (
319✔
227
                $1,
319✔
228
                'Layers',
319✔
229
                'All available Geo Engine layers',
319✔
230
                ARRAY[]::"PropertyType"[]
319✔
231
            );"#,
319✔
232
            )
319✔
233
            .await?;
319✔
234

235
        tx.execute(&stmt, &[&INTERNAL_LAYER_DB_ROOT_COLLECTION_ID])
319✔
236
            .await?;
319✔
237

238
        let stmt = tx
319✔
239
            .prepare(
319✔
240
                r#"INSERT INTO layer_collections (
319✔
241
                id,
319✔
242
                name,
319✔
243
                description,
319✔
244
                properties
319✔
245
            ) VALUES (
319✔
246
                $1,
319✔
247
                'Unsorted',
319✔
248
                'Unsorted Layers',
319✔
249
                ARRAY[]::"PropertyType"[]
319✔
250
            );"#,
319✔
251
            )
319✔
252
            .await?;
319✔
253

254
        tx.execute(&stmt, &[&UNSORTED_COLLECTION_ID]).await?;
319✔
255

256
        let stmt = tx
319✔
257
            .prepare(
319✔
258
                r#"
319✔
259
            INSERT INTO collection_children (parent, child) 
319✔
260
            VALUES ($1, $2);"#,
319✔
261
            )
319✔
262
            .await?;
319✔
263

264
        tx.execute(
319✔
265
            &stmt,
319✔
266
            &[
319✔
267
                &INTERNAL_LAYER_DB_ROOT_COLLECTION_ID,
319✔
268
                &UNSORTED_COLLECTION_ID,
319✔
269
            ],
319✔
270
        )
319✔
271
        .await?;
319✔
272

273
        tx.commit().await?;
319✔
274

275
        debug!("Created database schema");
×
276

277
        Ok(true)
319✔
278
    }
319✔
279

280
    async fn create_default_session(
228✔
281
        conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
228✔
282
        session_id: SessionId,
228✔
283
    ) -> Result<()> {
228✔
284
        let stmt = conn
228✔
285
            .prepare("INSERT INTO sessions (id, project_id, view) VALUES ($1, NULL ,NULL);")
228✔
286
            .await?;
228✔
287

288
        conn.execute(&stmt, &[&session_id]).await?;
228✔
289

290
        Ok(())
228✔
291
    }
228✔
292
    async fn load_default_session(
65✔
293
        conn: PooledConnection<'_, PostgresConnectionManager<Tls>>,
65✔
294
    ) -> Result<SimpleSession> {
65✔
295
        let stmt = conn
65✔
296
            .prepare("SELECT id, project_id, view FROM sessions LIMIT 1;")
65✔
297
            .await?;
349✔
298

299
        let row = conn.query_one(&stmt, &[]).await?;
65✔
300

301
        Ok(SimpleSession::new(row.get(0), row.get(1), row.get(2)))
65✔
302
    }
65✔
303
}
304

305
#[async_trait]
306
impl<Tls> SimpleApplicationContext for PostgresContext<Tls>
307
where
308
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
309
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
310
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
311
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
312
{
313
    async fn default_session_id(&self) -> SessionId {
78✔
314
        self.default_session_id
78✔
315
    }
78✔
316

317
    async fn default_session(&self) -> Result<SimpleSession> {
65✔
318
        Self::load_default_session(self.pool.get().await?).await
413✔
319
    }
130✔
320

321
    async fn update_default_session_project(&self, project: ProjectId) -> Result<()> {
1✔
322
        let conn = self.pool.get().await?;
1✔
323

324
        let stmt = conn
1✔
325
            .prepare("UPDATE sessions SET project_id = $1 WHERE id = $2;")
1✔
326
            .await?;
1✔
327

328
        conn.execute(&stmt, &[&project, &self.default_session_id])
1✔
329
            .await?;
1✔
330

331
        Ok(())
1✔
332
    }
2✔
333

334
    async fn update_default_session_view(&self, view: STRectangle) -> Result<()> {
1✔
335
        let conn = self.pool.get().await?;
1✔
336

337
        let stmt = conn
1✔
338
            .prepare("UPDATE sessions SET view = $1 WHERE id = $2;")
1✔
339
            .await?;
×
340

341
        conn.execute(&stmt, &[&view, &self.default_session_id])
1✔
342
            .await?;
1✔
343

344
        Ok(())
1✔
345
    }
2✔
346

347
    async fn default_session_context(&self) -> Result<Self::SessionContext> {
277✔
348
        Ok(self.session_context(self.session_by_id(self.default_session_id).await?))
4,031✔
349
    }
554✔
350
}
351

352
#[async_trait]
353
impl<Tls> ApplicationContext for PostgresContext<Tls>
354
where
355
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
356
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
357
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
358
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
359
{
360
    type SessionContext = PostgresSessionContext<Tls>;
361
    type Session = SimpleSession;
362

363
    fn session_context(&self, session: Self::Session) -> Self::SessionContext {
441✔
364
        PostgresSessionContext {
441✔
365
            session,
441✔
366
            context: self.clone(),
441✔
367
        }
441✔
368
    }
441✔
369

370
    async fn session_by_id(&self, session_id: SessionId) -> Result<Self::Session> {
379✔
371
        let mut conn = self.pool.get().await?;
379✔
372

373
        let tx = conn.build_transaction().start().await?;
374✔
374

375
        let stmt = tx
373✔
376
            .prepare(
373✔
377
                "
373✔
378
            SELECT           
373✔
379
                project_id,
373✔
380
                view
373✔
381
            FROM sessions
373✔
382
            WHERE id = $1;",
373✔
383
            )
373✔
384
            .await?;
3,287✔
385

386
        let row = tx
373✔
387
            .query_one(&stmt, &[&session_id])
373✔
388
            .await
356✔
389
            .map_err(|_error| error::Error::InvalidSession)?;
373✔
390

391
        Ok(SimpleSession::new(session_id, row.get(0), row.get(1)))
373✔
392
    }
752✔
393
}
394

395
#[derive(Clone)]
×
396
pub struct PostgresSessionContext<Tls>
397
where
398
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
399
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
400
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
401
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
402
{
403
    session: SimpleSession,
404
    context: PostgresContext<Tls>,
405
}
406

407
#[async_trait]
408
impl<Tls> SessionContext for PostgresSessionContext<Tls>
409
where
410
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
411
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
412
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
413
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
414
{
415
    type Session = SimpleSession;
416
    type GeoEngineDB = PostgresDb<Tls>;
417

418
    type TaskContext = SimpleTaskManagerContext;
419
    type TaskManager = SimpleTaskManager; // this does not persist across restarts
420
    type QueryContext = QueryContextImpl;
421
    type ExecutionContext = ExecutionContextImpl<Self::GeoEngineDB>;
422

423
    fn db(&self) -> Self::GeoEngineDB {
388✔
424
        PostgresDb::new(self.context.pool.clone())
388✔
425
    }
388✔
426

427
    fn tasks(&self) -> Self::TaskManager {
36✔
428
        SimpleTaskManager::new(self.context.task_manager.clone())
36✔
429
    }
36✔
430

431
    fn query_context(&self) -> Result<Self::QueryContext> {
27✔
432
        Ok(QueryContextImpl::new(
27✔
433
            self.context.query_ctx_chunk_size,
27✔
434
            self.context.thread_pool.clone(),
27✔
435
        ))
27✔
436
    }
27✔
437

438
    fn execution_context(&self) -> Result<Self::ExecutionContext> {
50✔
439
        Ok(ExecutionContextImpl::<PostgresDb<Tls>>::new(
50✔
440
            self.db(),
50✔
441
            self.context.thread_pool.clone(),
50✔
442
            self.context.exe_ctx_tiling_spec,
50✔
443
        ))
50✔
444
    }
50✔
445

446
    fn volumes(&self) -> Result<Vec<Volume>> {
×
447
        Ok(self.context.volumes.volumes.clone())
×
448
    }
×
449

450
    fn session(&self) -> &Self::Session {
110✔
451
        &self.session
110✔
452
    }
110✔
453
}
454

455
pub struct PostgresDb<Tls>
456
where
457
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
458
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
459
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
460
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
461
{
462
    pub(crate) conn_pool: Pool<PostgresConnectionManager<Tls>>,
463
}
464

465
impl<Tls> PostgresDb<Tls>
466
where
467
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
468
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
469
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
470
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
471
{
472
    pub fn new(conn_pool: Pool<PostgresConnectionManager<Tls>>) -> Self {
388✔
473
        Self { conn_pool }
388✔
474
    }
388✔
475

476
    /// Check whether the namepsace of the given dataset is allowed for insertion
477
    /// Check whether the namepsace of the given dataset is allowed for insertion
478
    pub(crate) fn check_namespace(id: &DatasetName) -> Result<()> {
68✔
479
        // due to a lack of users, etc., we only allow one namespace for now
68✔
480
        if id.namespace.is_none() {
68✔
481
            Ok(())
68✔
482
        } else {
483
            Err(Error::InvalidDatasetIdNamespace)
×
484
        }
485
    }
68✔
486
}
487

488
impl<Tls> GeoEngineDb for PostgresDb<Tls>
489
where
490
    Tls: MakeTlsConnect<Socket> + Clone + Send + Sync + 'static,
491
    <Tls as MakeTlsConnect<Socket>>::Stream: Send + Sync,
492
    <Tls as MakeTlsConnect<Socket>>::TlsConnect: Send,
493
    <<Tls as MakeTlsConnect<Socket>>::TlsConnect as TlsConnect<Socket>>::Future: Send,
494
{
495
}
496

497
#[cfg(test)]
498
mod tests {
499
    use std::collections::HashMap;
500
    use std::str::FromStr;
501

502
    use super::*;
503
    use crate::api::model::datatypes::{
504
        Breakpoint, ClassificationMeasurement, Colorizer, ContinuousMeasurement, DataProviderId,
505
        DatasetName, DefaultColors, LayerId, LinearGradient, LogarithmicGradient, Measurement,
506
        NotNanF64, OverUnderColors, Palette, RgbaColor, SpatialPartition2D,
507
    };
508
    use crate::api::model::operators::PlotResultDescriptor;
509
    use crate::api::model::responses::datasets::DatasetIdAndName;
510
    use crate::api::model::services::AddDataset;
511
    use crate::api::model::{ColorizerTypeDbType, HashMapTextTextDbType};
512
    use crate::datasets::external::mock::{MockCollection, MockExternalLayerProviderDefinition};
513
    use crate::datasets::listing::{DatasetListOptions, DatasetListing, ProvenanceOutput};
514
    use crate::datasets::listing::{DatasetProvider, Provenance};
515
    use crate::datasets::storage::{DatasetStore, MetaDataDefinition};
516
    use crate::datasets::upload::{FileId, UploadId};
517
    use crate::datasets::upload::{FileUpload, Upload, UploadDb};
518
    use crate::layers::layer::{
519
        AddLayer, AddLayerCollection, CollectionItem, LayerCollection, LayerCollectionListOptions,
520
        LayerCollectionListing, LayerListing, ProviderLayerCollectionId, ProviderLayerId,
521
    };
522
    use crate::layers::listing::{LayerCollectionId, LayerCollectionProvider};
523
    use crate::layers::storage::{
524
        LayerDb, LayerProviderDb, LayerProviderListing, LayerProviderListingOptions,
525
        INTERNAL_PROVIDER_ID,
526
    };
527
    use crate::projects::{
528
        ColorParam, CreateProject, DerivedColor, DerivedNumber, LayerUpdate, LineSymbology,
529
        LoadVersion, NumberParam, OrderBy, Plot, PlotUpdate, PointSymbology, PolygonSymbology,
530
        ProjectDb, ProjectFilter, ProjectId, ProjectLayer, ProjectListOptions, ProjectListing,
531
        RasterSymbology, STRectangle, StrokeParam, Symbology, TextSymbology, UpdateProject,
532
    };
533
    use crate::util::tests::register_ndvi_workflow_helper;
534
    use crate::util::tests::with_temp_context;
535
    use crate::workflows::registry::WorkflowRegistry;
536
    use crate::workflows::workflow::Workflow;
537
    use bb8_postgres::tokio_postgres::NoTls;
538
    use futures::join;
539
    use geoengine_datatypes::collections::VectorDataType;
540
    use geoengine_datatypes::primitives::CacheTtlSeconds;
541
    use geoengine_datatypes::primitives::{
542
        BoundingBox2D, Coordinate2D, FeatureDataType, RasterQueryRectangle, SpatialResolution,
543
        TimeGranularity, TimeInstance, TimeInterval, TimeStep, VectorQueryRectangle,
544
    };
545
    use geoengine_datatypes::raster::RasterDataType;
546
    use geoengine_datatypes::spatial_reference::{SpatialReference, SpatialReferenceOption};
547
    use geoengine_operators::engine::{
548
        MetaData, MetaDataProvider, MultipleRasterOrSingleVectorSource, PlotOperator,
549
        RasterResultDescriptor, StaticMetaData, TypedOperator, TypedResultDescriptor,
550
        VectorColumnInfo, VectorOperator, VectorResultDescriptor,
551
    };
552
    use geoengine_operators::mock::{MockPointSource, MockPointSourceParams};
553
    use geoengine_operators::plot::{Statistics, StatisticsParams};
554
    use geoengine_operators::source::{
555
        CsvHeader, FileNotFoundHandling, FormatSpecifics, GdalDatasetGeoTransform,
556
        GdalDatasetParameters, GdalLoadingInfo, GdalMetaDataList, GdalMetaDataRegular,
557
        GdalMetaDataStatic, GdalMetadataNetCdfCf, OgrSourceColumnSpec, OgrSourceDataset,
558
        OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceTimeFormat,
559
    };
560
    use geoengine_operators::util::input::MultiRasterOrVectorOperator::Raster;
561
    use ordered_float::NotNan;
562
    use serde_json::json;
563

564
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
565
    async fn test() {
1✔
566
        with_temp_context(|app_ctx, _| async move {
1✔
567
            let session = app_ctx.default_session().await.unwrap();
18✔
568

1✔
569
            create_projects(&app_ctx, &session).await;
74✔
570

571
            let projects = list_projects(&app_ctx, &session).await;
11✔
572

573
            let project_id = projects[0].id;
1✔
574

1✔
575
            update_projects(&app_ctx, &session, project_id).await;
155✔
576

577
            delete_project(&app_ctx, &session, project_id).await;
6✔
578
        })
1✔
579
        .await;
11✔
580
    }
581

582
    async fn delete_project(
1✔
583
        app_ctx: &PostgresContext<NoTls>,
1✔
584
        session: &SimpleSession,
1✔
585
        project_id: ProjectId,
1✔
586
    ) {
1✔
587
        let db = app_ctx.session_context(session.clone()).db();
1✔
588

1✔
589
        db.delete_project(project_id).await.unwrap();
3✔
590

1✔
591
        assert!(db.load_project(project_id).await.is_err());
3✔
592
    }
1✔
593

594
    #[allow(clippy::too_many_lines)]
595
    async fn update_projects(
1✔
596
        app_ctx: &PostgresContext<NoTls>,
1✔
597
        session: &SimpleSession,
1✔
598
        project_id: ProjectId,
1✔
599
    ) {
1✔
600
        let db = app_ctx.session_context(session.clone()).db();
1✔
601

602
        let project = db
1✔
603
            .load_project_version(project_id, LoadVersion::Latest)
1✔
604
            .await
37✔
605
            .unwrap();
1✔
606

607
        let layer_workflow_id = db
1✔
608
            .register_workflow(Workflow {
1✔
609
                operator: TypedOperator::Vector(
1✔
610
                    MockPointSource {
1✔
611
                        params: MockPointSourceParams {
1✔
612
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
613
                        },
1✔
614
                    }
1✔
615
                    .boxed(),
1✔
616
                ),
1✔
617
            })
1✔
618
            .await
3✔
619
            .unwrap();
1✔
620

1✔
621
        assert!(db.load_workflow(&layer_workflow_id).await.is_ok());
3✔
622

623
        let plot_workflow_id = db
1✔
624
            .register_workflow(Workflow {
1✔
625
                operator: Statistics {
1✔
626
                    params: StatisticsParams {
1✔
627
                        column_names: vec![],
1✔
628
                    },
1✔
629
                    sources: MultipleRasterOrSingleVectorSource {
1✔
630
                        source: Raster(vec![]),
1✔
631
                    },
1✔
632
                }
1✔
633
                .boxed()
1✔
634
                .into(),
1✔
635
            })
1✔
636
            .await
3✔
637
            .unwrap();
1✔
638

1✔
639
        assert!(db.load_workflow(&plot_workflow_id).await.is_ok());
3✔
640

641
        // add a plot
642
        let update = UpdateProject {
1✔
643
            id: project.id,
1✔
644
            name: Some("Test9 Updated".into()),
1✔
645
            description: None,
1✔
646
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
1✔
647
                workflow: layer_workflow_id,
1✔
648
                name: "TestLayer".into(),
1✔
649
                symbology: PointSymbology::default().into(),
1✔
650
                visibility: Default::default(),
1✔
651
            })]),
1✔
652
            plots: Some(vec![PlotUpdate::UpdateOrInsert(Plot {
1✔
653
                workflow: plot_workflow_id,
1✔
654
                name: "Test Plot".into(),
1✔
655
            })]),
1✔
656
            bounds: None,
1✔
657
            time_step: None,
1✔
658
        };
1✔
659
        db.update_project(update).await.unwrap();
65✔
660

661
        let versions = db.list_project_versions(project_id).await.unwrap();
3✔
662
        assert_eq!(versions.len(), 2);
1✔
663

664
        // add second plot
665
        let update = UpdateProject {
1✔
666
            id: project.id,
1✔
667
            name: Some("Test9 Updated".into()),
1✔
668
            description: None,
1✔
669
            layers: Some(vec![LayerUpdate::UpdateOrInsert(ProjectLayer {
1✔
670
                workflow: layer_workflow_id,
1✔
671
                name: "TestLayer".into(),
1✔
672
                symbology: PointSymbology::default().into(),
1✔
673
                visibility: Default::default(),
1✔
674
            })]),
1✔
675
            plots: Some(vec![
1✔
676
                PlotUpdate::UpdateOrInsert(Plot {
1✔
677
                    workflow: plot_workflow_id,
1✔
678
                    name: "Test Plot".into(),
1✔
679
                }),
1✔
680
                PlotUpdate::UpdateOrInsert(Plot {
1✔
681
                    workflow: plot_workflow_id,
1✔
682
                    name: "Test Plot".into(),
1✔
683
                }),
1✔
684
            ]),
1✔
685
            bounds: None,
1✔
686
            time_step: None,
1✔
687
        };
1✔
688
        db.update_project(update).await.unwrap();
18✔
689

690
        let versions = db.list_project_versions(project_id).await.unwrap();
3✔
691
        assert_eq!(versions.len(), 3);
1✔
692

693
        // delete plots
694
        let update = UpdateProject {
1✔
695
            id: project.id,
1✔
696
            name: None,
1✔
697
            description: None,
1✔
698
            layers: None,
1✔
699
            plots: Some(vec![]),
1✔
700
            bounds: None,
1✔
701
            time_step: None,
1✔
702
        };
1✔
703
        db.update_project(update).await.unwrap();
14✔
704

705
        let versions = db.list_project_versions(project_id).await.unwrap();
3✔
706
        assert_eq!(versions.len(), 4);
1✔
707
    }
1✔
708

709
    async fn list_projects(
1✔
710
        app_ctx: &PostgresContext<NoTls>,
1✔
711
        session: &SimpleSession,
1✔
712
    ) -> Vec<ProjectListing> {
1✔
713
        let options = ProjectListOptions {
1✔
714
            filter: ProjectFilter::None,
1✔
715
            order: OrderBy::NameDesc,
1✔
716
            offset: 0,
1✔
717
            limit: 2,
1✔
718
        };
1✔
719

1✔
720
        let db = app_ctx.session_context(session.clone()).db();
1✔
721

722
        let projects = db.list_projects(options).await.unwrap();
11✔
723

1✔
724
        assert_eq!(projects.len(), 2);
1✔
725
        assert_eq!(projects[0].name, "Test9");
1✔
726
        assert_eq!(projects[1].name, "Test8");
1✔
727
        projects
1✔
728
    }
1✔
729

730
    async fn create_projects(app_ctx: &PostgresContext<NoTls>, session: &SimpleSession) {
1✔
731
        let db = app_ctx.session_context(session.clone()).db();
1✔
732

733
        for i in 0..10 {
11✔
734
            let create = CreateProject {
10✔
735
                name: format!("Test{i}"),
10✔
736
                description: format!("Test{}", 10 - i),
10✔
737
                bounds: STRectangle::new(
10✔
738
                    SpatialReferenceOption::Unreferenced,
10✔
739
                    0.,
10✔
740
                    0.,
10✔
741
                    1.,
10✔
742
                    1.,
10✔
743
                    0,
10✔
744
                    1,
10✔
745
                )
10✔
746
                .unwrap(),
10✔
747
                time_step: None,
10✔
748
            };
10✔
749
            db.create_project(create).await.unwrap();
74✔
750
        }
751
    }
1✔
752

753
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
754
    async fn it_persists_workflows() {
1✔
755
        with_temp_context(|app_ctx, _pg_config| async move {
1✔
756
            let workflow = Workflow {
1✔
757
                operator: TypedOperator::Vector(
1✔
758
                    MockPointSource {
1✔
759
                        params: MockPointSourceParams {
1✔
760
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
761
                        },
1✔
762
                    }
1✔
763
                    .boxed(),
1✔
764
                ),
1✔
765
            };
1✔
766

767
            let session = app_ctx.default_session().await.unwrap();
18✔
768
        let ctx = app_ctx.session_context(session);
1✔
769

1✔
770
            let db = ctx
1✔
771
                .db();
1✔
772
            let id = db
1✔
773
                .register_workflow(workflow)
1✔
774
                .await
3✔
775
                .unwrap();
1✔
776

1✔
777
            drop(ctx);
1✔
778

779
            let workflow = db.load_workflow(&id).await.unwrap();
3✔
780

1✔
781
            let json = serde_json::to_string(&workflow).unwrap();
1✔
782
            assert_eq!(json, r#"{"type":"Vector","operator":{"type":"MockPointSource","params":{"points":[{"x":1.0,"y":2.0},{"x":1.0,"y":2.0},{"x":1.0,"y":2.0}]}}}"#);
1✔
783
        })
1✔
784
        .await;
11✔
785
    }
786

787
    #[allow(clippy::too_many_lines)]
788
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
789
    async fn it_persists_datasets() {
1✔
790
        with_temp_context(|app_ctx, _| async move {
1✔
791
            let loading_info = OgrSourceDataset {
1✔
792
                file_name: PathBuf::from("test.csv"),
1✔
793
                layer_name: "test.csv".to_owned(),
1✔
794
                data_type: Some(VectorDataType::MultiPoint),
1✔
795
                time: OgrSourceDatasetTimeType::Start {
1✔
796
                    start_field: "start".to_owned(),
1✔
797
                    start_format: OgrSourceTimeFormat::Auto,
1✔
798
                    duration: OgrSourceDurationSpec::Zero,
1✔
799
                },
1✔
800
                default_geometry: None,
1✔
801
                columns: Some(OgrSourceColumnSpec {
1✔
802
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
803
                        header: CsvHeader::Auto,
1✔
804
                    }),
1✔
805
                    x: "x".to_owned(),
1✔
806
                    y: None,
1✔
807
                    int: vec![],
1✔
808
                    float: vec![],
1✔
809
                    text: vec![],
1✔
810
                    bool: vec![],
1✔
811
                    datetime: vec![],
1✔
812
                    rename: None,
1✔
813
                }),
1✔
814
                force_ogr_time_filter: false,
1✔
815
                force_ogr_spatial_filter: false,
1✔
816
                on_error: OgrSourceErrorSpec::Ignore,
1✔
817
                sql_query: None,
1✔
818
                attribute_query: None,
1✔
819
                cache_ttl: CacheTtlSeconds::default(),
1✔
820
            };
1✔
821

1✔
822
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
823
                OgrSourceDataset,
1✔
824
                VectorResultDescriptor,
1✔
825
                VectorQueryRectangle,
1✔
826
            > {
1✔
827
                loading_info: loading_info.clone(),
1✔
828
                result_descriptor: VectorResultDescriptor {
1✔
829
                    data_type: VectorDataType::MultiPoint,
1✔
830
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
831
                    columns: [(
1✔
832
                        "foo".to_owned(),
1✔
833
                        VectorColumnInfo {
1✔
834
                            data_type: FeatureDataType::Float,
1✔
835
                            measurement: Measurement::Unitless.into(),
1✔
836
                        },
1✔
837
                    )]
1✔
838
                    .into_iter()
1✔
839
                    .collect(),
1✔
840
                    time: None,
1✔
841
                    bbox: None,
1✔
842
                },
1✔
843
                phantom: Default::default(),
1✔
844
            });
1✔
845

846
            let session = app_ctx.default_session().await.unwrap();
18✔
847

1✔
848
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
849

1✔
850
            let db = app_ctx.session_context(session.clone()).db();
1✔
851
            let wrap = db.wrap_meta_data(meta_data);
1✔
852
            let DatasetIdAndName {
853
                id: dataset_id,
1✔
854
                name: dataset_name,
1✔
855
            } = db
1✔
856
                .add_dataset(
1✔
857
                    AddDataset {
1✔
858
                        name: Some(dataset_name.clone()),
1✔
859
                        display_name: "Ogr Test".to_owned(),
1✔
860
                        description: "desc".to_owned(),
1✔
861
                        source_operator: "OgrSource".to_owned(),
1✔
862
                        symbology: None,
1✔
863
                        provenance: Some(vec![Provenance {
1✔
864
                            citation: "citation".to_owned(),
1✔
865
                            license: "license".to_owned(),
1✔
866
                            uri: "uri".to_owned(),
1✔
867
                        }]),
1✔
868
                    },
1✔
869
                    wrap,
1✔
870
                )
1✔
871
                .await
67✔
872
                .unwrap();
1✔
873

874
            let datasets = db
1✔
875
                .list_datasets(DatasetListOptions {
1✔
876
                    filter: None,
1✔
877
                    order: crate::datasets::listing::OrderBy::NameAsc,
1✔
878
                    offset: 0,
1✔
879
                    limit: 10,
1✔
880
                })
1✔
881
                .await
3✔
882
                .unwrap();
1✔
883

1✔
884
            assert_eq!(datasets.len(), 1);
1✔
885

886
            assert_eq!(
1✔
887
                datasets[0],
1✔
888
                DatasetListing {
1✔
889
                    id: dataset_id,
1✔
890
                    name: dataset_name,
1✔
891
                    display_name: "Ogr Test".to_owned(),
1✔
892
                    description: "desc".to_owned(),
1✔
893
                    source_operator: "OgrSource".to_owned(),
1✔
894
                    symbology: None,
1✔
895
                    tags: vec![],
1✔
896
                    result_descriptor: TypedResultDescriptor::Vector(VectorResultDescriptor {
1✔
897
                        data_type: VectorDataType::MultiPoint,
1✔
898
                        spatial_reference: SpatialReference::epsg_4326().into(),
1✔
899
                        columns: [(
1✔
900
                            "foo".to_owned(),
1✔
901
                            VectorColumnInfo {
1✔
902
                                data_type: FeatureDataType::Float,
1✔
903
                                measurement: Measurement::Unitless.into()
1✔
904
                            }
1✔
905
                        )]
1✔
906
                        .into_iter()
1✔
907
                        .collect(),
1✔
908
                        time: None,
1✔
909
                        bbox: None,
1✔
910
                    })
1✔
911
                    .into(),
1✔
912
                },
1✔
913
            );
1✔
914

915
            let provenance = db.load_provenance(&dataset_id).await.unwrap();
3✔
916

1✔
917
            assert_eq!(
1✔
918
                provenance,
1✔
919
                ProvenanceOutput {
1✔
920
                    data: dataset_id.into(),
1✔
921
                    provenance: Some(vec![Provenance {
1✔
922
                        citation: "citation".to_owned(),
1✔
923
                        license: "license".to_owned(),
1✔
924
                        uri: "uri".to_owned(),
1✔
925
                    }])
1✔
926
                }
1✔
927
            );
1✔
928

929
            let meta_data: Box<dyn MetaData<OgrSourceDataset, _, _>> =
1✔
930
                db.meta_data(&dataset_id.into()).await.unwrap();
3✔
931

932
            assert_eq!(
1✔
933
                meta_data
1✔
934
                    .loading_info(VectorQueryRectangle {
1✔
935
                        spatial_bounds: BoundingBox2D::new_unchecked(
1✔
936
                            (-180., -90.).into(),
1✔
937
                            (180., 90.).into()
1✔
938
                        ),
1✔
939
                        time_interval: TimeInterval::default(),
1✔
940
                        spatial_resolution: SpatialResolution::zero_point_one(),
1✔
941
                    })
1✔
942
                    .await
×
943
                    .unwrap(),
1✔
944
                loading_info
945
            );
946
        })
1✔
947
        .await;
11✔
948
    }
949

950
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
951
    async fn it_persists_uploads() {
1✔
952
        with_temp_context(|app_ctx, _| async move {
1✔
953
            let id = UploadId::from_str("2de18cd8-4a38-4111-a445-e3734bc18a80").unwrap();
1✔
954
            let input = Upload {
1✔
955
                id,
1✔
956
                files: vec![FileUpload {
1✔
957
                    id: FileId::from_str("e80afab0-831d-4d40-95d6-1e4dfd277e72").unwrap(),
1✔
958
                    name: "test.csv".to_owned(),
1✔
959
                    byte_size: 1337,
1✔
960
                }],
1✔
961
            };
1✔
962

963
            let session = app_ctx.default_session().await.unwrap();
18✔
964

1✔
965
            let db = app_ctx.session_context(session.clone()).db();
1✔
966

1✔
967
            db.create_upload(input.clone()).await.unwrap();
6✔
968

969
            let upload = db.load_upload(id).await.unwrap();
3✔
970

1✔
971
            assert_eq!(upload, input);
1✔
972
        })
1✔
973
        .await;
9✔
974
    }
975

976
    #[allow(clippy::too_many_lines)]
977
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
978
    async fn it_persists_layer_providers() {
1✔
979
        with_temp_context(|app_ctx, _| async move {
1✔
980
            let db = app_ctx.default_session_context().await.unwrap().db();
19✔
981

1✔
982
            let provider_id =
1✔
983
                DataProviderId::from_str("7b20c8d7-d754-4f8f-ad44-dddd25df22d2").unwrap();
1✔
984

1✔
985
            let loading_info = OgrSourceDataset {
1✔
986
                file_name: PathBuf::from("test.csv"),
1✔
987
                layer_name: "test.csv".to_owned(),
1✔
988
                data_type: Some(VectorDataType::MultiPoint),
1✔
989
                time: OgrSourceDatasetTimeType::Start {
1✔
990
                    start_field: "start".to_owned(),
1✔
991
                    start_format: OgrSourceTimeFormat::Auto,
1✔
992
                    duration: OgrSourceDurationSpec::Zero,
1✔
993
                },
1✔
994
                default_geometry: None,
1✔
995
                columns: Some(OgrSourceColumnSpec {
1✔
996
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
997
                        header: CsvHeader::Auto,
1✔
998
                    }),
1✔
999
                    x: "x".to_owned(),
1✔
1000
                    y: None,
1✔
1001
                    int: vec![],
1✔
1002
                    float: vec![],
1✔
1003
                    text: vec![],
1✔
1004
                    bool: vec![],
1✔
1005
                    datetime: vec![],
1✔
1006
                    rename: None,
1✔
1007
                }),
1✔
1008
                force_ogr_time_filter: false,
1✔
1009
                force_ogr_spatial_filter: false,
1✔
1010
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1011
                sql_query: None,
1✔
1012
                attribute_query: None,
1✔
1013
                cache_ttl: CacheTtlSeconds::default(),
1✔
1014
            };
1✔
1015

1✔
1016
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1017
                OgrSourceDataset,
1✔
1018
                VectorResultDescriptor,
1✔
1019
                VectorQueryRectangle,
1✔
1020
            > {
1✔
1021
                loading_info: loading_info.clone(),
1✔
1022
                result_descriptor: VectorResultDescriptor {
1✔
1023
                    data_type: VectorDataType::MultiPoint,
1✔
1024
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1025
                    columns: [(
1✔
1026
                        "foo".to_owned(),
1✔
1027
                        VectorColumnInfo {
1✔
1028
                            data_type: FeatureDataType::Float,
1✔
1029
                            measurement: Measurement::Unitless.into(),
1✔
1030
                        },
1✔
1031
                    )]
1✔
1032
                    .into_iter()
1✔
1033
                    .collect(),
1✔
1034
                    time: None,
1✔
1035
                    bbox: None,
1✔
1036
                },
1✔
1037
                phantom: Default::default(),
1✔
1038
            });
1✔
1039

1✔
1040
            let provider = MockExternalLayerProviderDefinition {
1✔
1041
                id: provider_id,
1✔
1042
                root_collection: MockCollection {
1✔
1043
                    id: LayerCollectionId("b5f82c7c-9133-4ac1-b4ae-8faac3b9a6df".to_owned()),
1✔
1044
                    name: "Mock Collection A".to_owned(),
1✔
1045
                    description: "Some description".to_owned(),
1✔
1046
                    collections: vec![MockCollection {
1✔
1047
                        id: LayerCollectionId("21466897-37a1-4666-913a-50b5244699ad".to_owned()),
1✔
1048
                        name: "Mock Collection B".to_owned(),
1✔
1049
                        description: "Some description".to_owned(),
1✔
1050
                        collections: vec![],
1✔
1051
                        layers: vec![],
1✔
1052
                    }],
1✔
1053
                    layers: vec![],
1✔
1054
                },
1✔
1055
                data: [("myData".to_owned(), meta_data)].into_iter().collect(),
1✔
1056
            };
1✔
1057

1✔
1058
            db.add_layer_provider(Box::new(provider)).await.unwrap();
3✔
1059

1060
            let providers = db
1✔
1061
                .list_layer_providers(LayerProviderListingOptions {
1✔
1062
                    offset: 0,
1✔
1063
                    limit: 10,
1✔
1064
                })
1✔
1065
                .await
3✔
1066
                .unwrap();
1✔
1067

1✔
1068
            assert_eq!(providers.len(), 1);
1✔
1069

1070
            assert_eq!(
1✔
1071
                providers[0],
1✔
1072
                LayerProviderListing {
1✔
1073
                    id: provider_id,
1✔
1074
                    name: "MockName".to_owned(),
1✔
1075
                    description: "MockType".to_owned(),
1✔
1076
                }
1✔
1077
            );
1✔
1078

1079
            let provider = db.load_layer_provider(provider_id).await.unwrap();
3✔
1080

1081
            let datasets = provider
1✔
1082
                .load_layer_collection(
1083
                    &provider.get_root_layer_collection_id().await.unwrap(),
1✔
1084
                    LayerCollectionListOptions {
1✔
1085
                        offset: 0,
1✔
1086
                        limit: 10,
1✔
1087
                    },
1✔
1088
                )
1089
                .await
×
1090
                .unwrap();
1✔
1091

1✔
1092
            assert_eq!(datasets.items.len(), 1);
1✔
1093
        })
1✔
1094
        .await;
10✔
1095
    }
1096

1097
    #[allow(clippy::too_many_lines)]
1098
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1099
    async fn it_loads_all_meta_data_types() {
1✔
1100
        with_temp_context(|app_ctx, _| async move {
1✔
1101
            let session = app_ctx.default_session().await.unwrap();
18✔
1102

1✔
1103
            let db = app_ctx.session_context(session.clone()).db();
1✔
1104

1✔
1105
            let vector_descriptor = VectorResultDescriptor {
1✔
1106
                data_type: VectorDataType::Data,
1✔
1107
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1108
                columns: Default::default(),
1✔
1109
                time: None,
1✔
1110
                bbox: None,
1✔
1111
            };
1✔
1112

1✔
1113
            let raster_descriptor = RasterResultDescriptor {
1✔
1114
                data_type: RasterDataType::U8,
1✔
1115
                spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1116
                measurement: Default::default(),
1✔
1117
                time: None,
1✔
1118
                bbox: None,
1✔
1119
                resolution: None,
1✔
1120
            };
1✔
1121

1✔
1122
            let vector_ds = AddDataset {
1✔
1123
                name: None,
1✔
1124
                display_name: "OgrDataset".to_string(),
1✔
1125
                description: "My Ogr dataset".to_string(),
1✔
1126
                source_operator: "OgrSource".to_string(),
1✔
1127
                symbology: None,
1✔
1128
                provenance: None,
1✔
1129
            };
1✔
1130

1✔
1131
            let raster_ds = AddDataset {
1✔
1132
                name: None,
1✔
1133
                display_name: "GdalDataset".to_string(),
1✔
1134
                description: "My Gdal dataset".to_string(),
1✔
1135
                source_operator: "GdalSource".to_string(),
1✔
1136
                symbology: None,
1✔
1137
                provenance: None,
1✔
1138
            };
1✔
1139

1✔
1140
            let gdal_params = GdalDatasetParameters {
1✔
1141
                file_path: Default::default(),
1✔
1142
                rasterband_channel: 0,
1✔
1143
                geo_transform: GdalDatasetGeoTransform {
1✔
1144
                    origin_coordinate: Default::default(),
1✔
1145
                    x_pixel_size: 0.0,
1✔
1146
                    y_pixel_size: 0.0,
1✔
1147
                },
1✔
1148
                width: 0,
1✔
1149
                height: 0,
1✔
1150
                file_not_found_handling: FileNotFoundHandling::NoData,
1✔
1151
                no_data_value: None,
1✔
1152
                properties_mapping: None,
1✔
1153
                gdal_open_options: None,
1✔
1154
                gdal_config_options: None,
1✔
1155
                allow_alphaband_as_mask: false,
1✔
1156
                retry: None,
1✔
1157
            };
1✔
1158

1✔
1159
            let meta = StaticMetaData {
1✔
1160
                loading_info: OgrSourceDataset {
1✔
1161
                    file_name: Default::default(),
1✔
1162
                    layer_name: String::new(),
1✔
1163
                    data_type: None,
1✔
1164
                    time: Default::default(),
1✔
1165
                    default_geometry: None,
1✔
1166
                    columns: None,
1✔
1167
                    force_ogr_time_filter: false,
1✔
1168
                    force_ogr_spatial_filter: false,
1✔
1169
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1170
                    sql_query: None,
1✔
1171
                    attribute_query: None,
1✔
1172
                    cache_ttl: CacheTtlSeconds::default(),
1✔
1173
                },
1✔
1174
                result_descriptor: vector_descriptor.clone(),
1✔
1175
                phantom: Default::default(),
1✔
1176
            };
1✔
1177

1✔
1178
            let meta = db.wrap_meta_data(MetaDataDefinition::OgrMetaData(meta));
1✔
1179

1180
            let id = db.add_dataset(vector_ds, meta).await.unwrap().id;
67✔
1181

1182
            let meta: geoengine_operators::util::Result<
1✔
1183
                Box<dyn MetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>>,
1✔
1184
            > = db.meta_data(&id.into()).await;
3✔
1185

1186
            assert!(meta.is_ok());
1✔
1187

1188
            let meta = GdalMetaDataRegular {
1✔
1189
                result_descriptor: raster_descriptor.clone(),
1✔
1190
                params: gdal_params.clone(),
1✔
1191
                time_placeholders: Default::default(),
1✔
1192
                data_time: Default::default(),
1✔
1193
                step: TimeStep {
1✔
1194
                    granularity: TimeGranularity::Millis,
1✔
1195
                    step: 0,
1✔
1196
                },
1✔
1197
                cache_ttl: CacheTtlSeconds::default(),
1✔
1198
            };
1✔
1199

1✔
1200
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataRegular(meta));
1✔
1201

1202
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1203

1204
            let meta: geoengine_operators::util::Result<
1✔
1205
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1206
            > = db.meta_data(&id.into()).await;
3✔
1207

1208
            assert!(meta.is_ok());
1✔
1209

1210
            let meta = GdalMetaDataStatic {
1✔
1211
                time: None,
1✔
1212
                params: gdal_params.clone(),
1✔
1213
                result_descriptor: raster_descriptor.clone(),
1✔
1214
                cache_ttl: CacheTtlSeconds::default(),
1✔
1215
            };
1✔
1216

1✔
1217
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalStatic(meta));
1✔
1218

1219
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1220

1221
            let meta: geoengine_operators::util::Result<
1✔
1222
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1223
            > = db.meta_data(&id.into()).await;
3✔
1224

1225
            assert!(meta.is_ok());
1✔
1226

1227
            let meta = GdalMetaDataList {
1✔
1228
                result_descriptor: raster_descriptor.clone(),
1✔
1229
                params: vec![],
1✔
1230
            };
1✔
1231

1✔
1232
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetaDataList(meta));
1✔
1233

1234
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1235

1236
            let meta: geoengine_operators::util::Result<
1✔
1237
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1238
            > = db.meta_data(&id.into()).await;
3✔
1239

1240
            assert!(meta.is_ok());
1✔
1241

1242
            let meta = GdalMetadataNetCdfCf {
1✔
1243
                result_descriptor: raster_descriptor.clone(),
1✔
1244
                params: gdal_params.clone(),
1✔
1245
                start: TimeInstance::MIN,
1✔
1246
                end: TimeInstance::MAX,
1✔
1247
                step: TimeStep {
1✔
1248
                    granularity: TimeGranularity::Millis,
1✔
1249
                    step: 0,
1✔
1250
                },
1✔
1251
                band_offset: 0,
1✔
1252
                cache_ttl: CacheTtlSeconds::default(),
1✔
1253
            };
1✔
1254

1✔
1255
            let meta = db.wrap_meta_data(MetaDataDefinition::GdalMetadataNetCdfCf(meta));
1✔
1256

1257
            let id = db.add_dataset(raster_ds.clone(), meta).await.unwrap().id;
3✔
1258

1259
            let meta: geoengine_operators::util::Result<
1✔
1260
                Box<dyn MetaData<GdalLoadingInfo, RasterResultDescriptor, RasterQueryRectangle>>,
1✔
1261
            > = db.meta_data(&id.into()).await;
3✔
1262

1263
            assert!(meta.is_ok());
1✔
1264
        })
1✔
1265
        .await;
12✔
1266
    }
1267

1268
    #[allow(clippy::too_many_lines)]
1269
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1270
    async fn it_collects_layers() {
1✔
1271
        with_temp_context(|app_ctx, _| async move {
1✔
1272
            let session = app_ctx.default_session().await.unwrap();
18✔
1273

1✔
1274
            let layer_db = app_ctx.session_context(session).db();
1✔
1275

1✔
1276
            let workflow = Workflow {
1✔
1277
                operator: TypedOperator::Vector(
1✔
1278
                    MockPointSource {
1✔
1279
                        params: MockPointSourceParams {
1✔
1280
                            points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1281
                        },
1✔
1282
                    }
1✔
1283
                    .boxed(),
1✔
1284
                ),
1✔
1285
            };
1✔
1286

1287
            let root_collection_id = layer_db.get_root_layer_collection_id().await.unwrap();
1✔
1288

1289
            let layer1 = layer_db
1✔
1290
                .add_layer(
1✔
1291
                    AddLayer {
1✔
1292
                        name: "Layer1".to_string(),
1✔
1293
                        description: "Layer 1".to_string(),
1✔
1294
                        symbology: None,
1✔
1295
                        workflow: workflow.clone(),
1✔
1296
                        metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1297
                        properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1298
                    },
1✔
1299
                    &root_collection_id,
1✔
1300
                )
1✔
1301
                .await
43✔
1302
                .unwrap();
1✔
1303

1304
            assert_eq!(
1✔
1305
                layer_db.load_layer(&layer1).await.unwrap(),
4✔
1306
                crate::layers::layer::Layer {
1✔
1307
                    id: ProviderLayerId {
1✔
1308
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1309
                        layer_id: layer1.clone(),
1✔
1310
                    },
1✔
1311
                    name: "Layer1".to_string(),
1✔
1312
                    description: "Layer 1".to_string(),
1✔
1313
                    symbology: None,
1✔
1314
                    workflow: workflow.clone(),
1✔
1315
                    metadata: [("meta".to_string(), "datum".to_string())].into(),
1✔
1316
                    properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1317
                }
1✔
1318
            );
1319

1320
            let collection1_id = layer_db
1✔
1321
                .add_layer_collection(
1✔
1322
                    AddLayerCollection {
1✔
1323
                        name: "Collection1".to_string(),
1✔
1324
                        description: "Collection 1".to_string(),
1✔
1325
                        properties: Default::default(),
1✔
1326
                    },
1✔
1327
                    &root_collection_id,
1✔
1328
                )
1✔
1329
                .await
7✔
1330
                .unwrap();
1✔
1331

1332
            let layer2 = layer_db
1✔
1333
                .add_layer(
1✔
1334
                    AddLayer {
1✔
1335
                        name: "Layer2".to_string(),
1✔
1336
                        description: "Layer 2".to_string(),
1✔
1337
                        symbology: None,
1✔
1338
                        workflow: workflow.clone(),
1✔
1339
                        metadata: Default::default(),
1✔
1340
                        properties: Default::default(),
1✔
1341
                    },
1✔
1342
                    &collection1_id,
1✔
1343
                )
1✔
1344
                .await
9✔
1345
                .unwrap();
1✔
1346

1347
            let collection2_id = layer_db
1✔
1348
                .add_layer_collection(
1✔
1349
                    AddLayerCollection {
1✔
1350
                        name: "Collection2".to_string(),
1✔
1351
                        description: "Collection 2".to_string(),
1✔
1352
                        properties: Default::default(),
1✔
1353
                    },
1✔
1354
                    &collection1_id,
1✔
1355
                )
1✔
1356
                .await
7✔
1357
                .unwrap();
1✔
1358

1✔
1359
            layer_db
1✔
1360
                .add_collection_to_parent(&collection2_id, &collection1_id)
1✔
1361
                .await
3✔
1362
                .unwrap();
1✔
1363

1364
            let root_collection = layer_db
1✔
1365
                .load_layer_collection(
1✔
1366
                    &root_collection_id,
1✔
1367
                    LayerCollectionListOptions {
1✔
1368
                        offset: 0,
1✔
1369
                        limit: 20,
1✔
1370
                    },
1✔
1371
                )
1✔
1372
                .await
5✔
1373
                .unwrap();
1✔
1374

1✔
1375
            assert_eq!(
1✔
1376
                root_collection,
1✔
1377
                LayerCollection {
1✔
1378
                    id: ProviderLayerCollectionId {
1✔
1379
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1380
                        collection_id: root_collection_id,
1✔
1381
                    },
1✔
1382
                    name: "Layers".to_string(),
1✔
1383
                    description: "All available Geo Engine layers".to_string(),
1✔
1384
                    items: vec![
1✔
1385
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1386
                            id: ProviderLayerCollectionId {
1✔
1387
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1388
                                collection_id: collection1_id.clone(),
1✔
1389
                            },
1✔
1390
                            name: "Collection1".to_string(),
1✔
1391
                            description: "Collection 1".to_string(),
1✔
1392
                            properties: Default::default(),
1✔
1393
                        }),
1✔
1394
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1395
                            id: ProviderLayerCollectionId {
1✔
1396
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1397
                                collection_id: LayerCollectionId(
1✔
1398
                                    UNSORTED_COLLECTION_ID.to_string()
1✔
1399
                                ),
1✔
1400
                            },
1✔
1401
                            name: "Unsorted".to_string(),
1✔
1402
                            description: "Unsorted Layers".to_string(),
1✔
1403
                            properties: Default::default(),
1✔
1404
                        }),
1✔
1405
                        CollectionItem::Layer(LayerListing {
1✔
1406
                            id: ProviderLayerId {
1✔
1407
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1408
                                layer_id: layer1,
1✔
1409
                            },
1✔
1410
                            name: "Layer1".to_string(),
1✔
1411
                            description: "Layer 1".to_string(),
1✔
1412
                            properties: vec![("proper".to_string(), "tee".to_string()).into()],
1✔
1413
                        })
1✔
1414
                    ],
1✔
1415
                    entry_label: None,
1✔
1416
                    properties: vec![],
1✔
1417
                }
1✔
1418
            );
1✔
1419

1420
            let collection1 = layer_db
1✔
1421
                .load_layer_collection(
1✔
1422
                    &collection1_id,
1✔
1423
                    LayerCollectionListOptions {
1✔
1424
                        offset: 0,
1✔
1425
                        limit: 20,
1✔
1426
                    },
1✔
1427
                )
1✔
1428
                .await
5✔
1429
                .unwrap();
1✔
1430

1✔
1431
            assert_eq!(
1✔
1432
                collection1,
1✔
1433
                LayerCollection {
1✔
1434
                    id: ProviderLayerCollectionId {
1✔
1435
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1436
                        collection_id: collection1_id,
1✔
1437
                    },
1✔
1438
                    name: "Collection1".to_string(),
1✔
1439
                    description: "Collection 1".to_string(),
1✔
1440
                    items: vec![
1✔
1441
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1442
                            id: ProviderLayerCollectionId {
1✔
1443
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1444
                                collection_id: collection2_id,
1✔
1445
                            },
1✔
1446
                            name: "Collection2".to_string(),
1✔
1447
                            description: "Collection 2".to_string(),
1✔
1448
                            properties: Default::default(),
1✔
1449
                        }),
1✔
1450
                        CollectionItem::Layer(LayerListing {
1✔
1451
                            id: ProviderLayerId {
1✔
1452
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1453
                                layer_id: layer2,
1✔
1454
                            },
1✔
1455
                            name: "Layer2".to_string(),
1✔
1456
                            description: "Layer 2".to_string(),
1✔
1457
                            properties: vec![],
1✔
1458
                        })
1✔
1459
                    ],
1✔
1460
                    entry_label: None,
1✔
1461
                    properties: vec![],
1✔
1462
                }
1✔
1463
            );
1✔
1464
        })
1✔
1465
        .await;
12✔
1466
    }
1467

1468
    #[allow(clippy::too_many_lines)]
1469
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1470
    async fn it_removes_layer_collections() {
1✔
1471
        with_temp_context(|app_ctx, _| async move {
1✔
1472
            let session = app_ctx.default_session().await.unwrap();
18✔
1473

1✔
1474
            let layer_db = app_ctx.session_context(session).db();
1✔
1475

1✔
1476
            let layer = AddLayer {
1✔
1477
                name: "layer".to_string(),
1✔
1478
                description: "description".to_string(),
1✔
1479
                workflow: Workflow {
1✔
1480
                    operator: TypedOperator::Vector(
1✔
1481
                        MockPointSource {
1✔
1482
                            params: MockPointSourceParams {
1✔
1483
                                points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1484
                            },
1✔
1485
                        }
1✔
1486
                        .boxed(),
1✔
1487
                    ),
1✔
1488
                },
1✔
1489
                symbology: None,
1✔
1490
                metadata: Default::default(),
1✔
1491
                properties: Default::default(),
1✔
1492
            };
1✔
1493

1494
            let root_collection = &layer_db.get_root_layer_collection_id().await.unwrap();
1✔
1495

1✔
1496
            let collection = AddLayerCollection {
1✔
1497
                name: "top collection".to_string(),
1✔
1498
                description: "description".to_string(),
1✔
1499
                properties: Default::default(),
1✔
1500
            };
1✔
1501

1502
            let top_c_id = layer_db
1✔
1503
                .add_layer_collection(collection, root_collection)
1✔
1504
                .await
10✔
1505
                .unwrap();
1✔
1506

1507
            let l_id = layer_db.add_layer(layer, &top_c_id).await.unwrap();
41✔
1508

1✔
1509
            let collection = AddLayerCollection {
1✔
1510
                name: "empty collection".to_string(),
1✔
1511
                description: "description".to_string(),
1✔
1512
                properties: Default::default(),
1✔
1513
            };
1✔
1514

1515
            let empty_c_id = layer_db
1✔
1516
                .add_layer_collection(collection, &top_c_id)
1✔
1517
                .await
7✔
1518
                .unwrap();
1✔
1519

1520
            let items = layer_db
1✔
1521
                .load_layer_collection(
1✔
1522
                    &top_c_id,
1✔
1523
                    LayerCollectionListOptions {
1✔
1524
                        offset: 0,
1✔
1525
                        limit: 20,
1✔
1526
                    },
1✔
1527
                )
1✔
1528
                .await
5✔
1529
                .unwrap();
1✔
1530

1✔
1531
            assert_eq!(
1✔
1532
                items,
1✔
1533
                LayerCollection {
1✔
1534
                    id: ProviderLayerCollectionId {
1✔
1535
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1536
                        collection_id: top_c_id.clone(),
1✔
1537
                    },
1✔
1538
                    name: "top collection".to_string(),
1✔
1539
                    description: "description".to_string(),
1✔
1540
                    items: vec![
1✔
1541
                        CollectionItem::Collection(LayerCollectionListing {
1✔
1542
                            id: ProviderLayerCollectionId {
1✔
1543
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1544
                                collection_id: empty_c_id.clone(),
1✔
1545
                            },
1✔
1546
                            name: "empty collection".to_string(),
1✔
1547
                            description: "description".to_string(),
1✔
1548
                            properties: Default::default(),
1✔
1549
                        }),
1✔
1550
                        CollectionItem::Layer(LayerListing {
1✔
1551
                            id: ProviderLayerId {
1✔
1552
                                provider_id: INTERNAL_PROVIDER_ID,
1✔
1553
                                layer_id: l_id.clone(),
1✔
1554
                            },
1✔
1555
                            name: "layer".to_string(),
1✔
1556
                            description: "description".to_string(),
1✔
1557
                            properties: vec![],
1✔
1558
                        })
1✔
1559
                    ],
1✔
1560
                    entry_label: None,
1✔
1561
                    properties: vec![],
1✔
1562
                }
1✔
1563
            );
1✔
1564

1565
            // remove empty collection
1566
            layer_db.remove_layer_collection(&empty_c_id).await.unwrap();
9✔
1567

1568
            let items = layer_db
1✔
1569
                .load_layer_collection(
1✔
1570
                    &top_c_id,
1✔
1571
                    LayerCollectionListOptions {
1✔
1572
                        offset: 0,
1✔
1573
                        limit: 20,
1✔
1574
                    },
1✔
1575
                )
1✔
1576
                .await
5✔
1577
                .unwrap();
1✔
1578

1✔
1579
            assert_eq!(
1✔
1580
                items,
1✔
1581
                LayerCollection {
1✔
1582
                    id: ProviderLayerCollectionId {
1✔
1583
                        provider_id: INTERNAL_PROVIDER_ID,
1✔
1584
                        collection_id: top_c_id.clone(),
1✔
1585
                    },
1✔
1586
                    name: "top collection".to_string(),
1✔
1587
                    description: "description".to_string(),
1✔
1588
                    items: vec![CollectionItem::Layer(LayerListing {
1✔
1589
                        id: ProviderLayerId {
1✔
1590
                            provider_id: INTERNAL_PROVIDER_ID,
1✔
1591
                            layer_id: l_id.clone(),
1✔
1592
                        },
1✔
1593
                        name: "layer".to_string(),
1✔
1594
                        description: "description".to_string(),
1✔
1595
                        properties: vec![],
1✔
1596
                    })],
1✔
1597
                    entry_label: None,
1✔
1598
                    properties: vec![],
1✔
1599
                }
1✔
1600
            );
1✔
1601

1602
            // remove top (not root) collection
1603
            layer_db.remove_layer_collection(&top_c_id).await.unwrap();
9✔
1604

1✔
1605
            layer_db
1✔
1606
                .load_layer_collection(
1✔
1607
                    &top_c_id,
1✔
1608
                    LayerCollectionListOptions {
1✔
1609
                        offset: 0,
1✔
1610
                        limit: 20,
1✔
1611
                    },
1✔
1612
                )
1✔
1613
                .await
3✔
1614
                .unwrap_err();
1✔
1615

1✔
1616
            // should be deleted automatically
1✔
1617
            layer_db.load_layer(&l_id).await.unwrap_err();
3✔
1618

1✔
1619
            // it is not allowed to remove the root collection
1✔
1620
            layer_db
1✔
1621
                .remove_layer_collection(root_collection)
1✔
1622
                .await
×
1623
                .unwrap_err();
1✔
1624
            layer_db
1✔
1625
                .load_layer_collection(
1✔
1626
                    root_collection,
1✔
1627
                    LayerCollectionListOptions {
1✔
1628
                        offset: 0,
1✔
1629
                        limit: 20,
1✔
1630
                    },
1✔
1631
                )
1✔
1632
                .await
5✔
1633
                .unwrap();
1✔
1634
        })
1✔
1635
        .await;
12✔
1636
    }
1637

1638
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1639
    #[allow(clippy::too_many_lines)]
1640
    async fn it_removes_collections_from_collections() {
1✔
1641
        with_temp_context(|app_ctx, _| async move {
1✔
1642
            let session = app_ctx.default_session().await.unwrap();
18✔
1643

1✔
1644
            let db = app_ctx.session_context(session).db();
1✔
1645

1646
            let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
1647

1648
            let mid_collection_id = db
1✔
1649
                .add_layer_collection(
1✔
1650
                    AddLayerCollection {
1✔
1651
                        name: "mid collection".to_string(),
1✔
1652
                        description: "description".to_string(),
1✔
1653
                        properties: Default::default(),
1✔
1654
                    },
1✔
1655
                    root_collection_id,
1✔
1656
                )
1✔
1657
                .await
10✔
1658
                .unwrap();
1✔
1659

1660
            let bottom_collection_id = db
1✔
1661
                .add_layer_collection(
1✔
1662
                    AddLayerCollection {
1✔
1663
                        name: "bottom collection".to_string(),
1✔
1664
                        description: "description".to_string(),
1✔
1665
                        properties: Default::default(),
1✔
1666
                    },
1✔
1667
                    &mid_collection_id,
1✔
1668
                )
1✔
1669
                .await
7✔
1670
                .unwrap();
1✔
1671

1672
            let layer_id = db
1✔
1673
                .add_layer(
1✔
1674
                    AddLayer {
1✔
1675
                        name: "layer".to_string(),
1✔
1676
                        description: "description".to_string(),
1✔
1677
                        workflow: Workflow {
1✔
1678
                            operator: TypedOperator::Vector(
1✔
1679
                                MockPointSource {
1✔
1680
                                    params: MockPointSourceParams {
1✔
1681
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1682
                                    },
1✔
1683
                                }
1✔
1684
                                .boxed(),
1✔
1685
                            ),
1✔
1686
                        },
1✔
1687
                        symbology: None,
1✔
1688
                        metadata: Default::default(),
1✔
1689
                        properties: Default::default(),
1✔
1690
                    },
1✔
1691
                    &mid_collection_id,
1✔
1692
                )
1✔
1693
                .await
40✔
1694
                .unwrap();
1✔
1695

1✔
1696
            // removing the mid collection…
1✔
1697
            db.remove_layer_collection_from_parent(&mid_collection_id, root_collection_id)
1✔
1698
                .await
11✔
1699
                .unwrap();
1✔
1700

1✔
1701
            // …should remove itself
1✔
1702
            db.load_layer_collection(&mid_collection_id, LayerCollectionListOptions::default())
1✔
1703
                .await
3✔
1704
                .unwrap_err();
1✔
1705

1✔
1706
            // …should remove the bottom collection
1✔
1707
            db.load_layer_collection(&bottom_collection_id, LayerCollectionListOptions::default())
1✔
1708
                .await
3✔
1709
                .unwrap_err();
1✔
1710

1✔
1711
            // … and should remove the layer of the bottom collection
1✔
1712
            db.load_layer(&layer_id).await.unwrap_err();
3✔
1713

1✔
1714
            // the root collection is still there
1✔
1715
            db.load_layer_collection(root_collection_id, LayerCollectionListOptions::default())
1✔
1716
                .await
5✔
1717
                .unwrap();
1✔
1718
        })
1✔
1719
        .await;
11✔
1720
    }
1721

1722
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1723
    #[allow(clippy::too_many_lines)]
1724
    async fn it_removes_layers_from_collections() {
1✔
1725
        with_temp_context(|app_ctx, _| async move {
1✔
1726
            let session = app_ctx.default_session().await.unwrap();
18✔
1727

1✔
1728
            let db = app_ctx.session_context(session).db();
1✔
1729

1730
            let root_collection = &db.get_root_layer_collection_id().await.unwrap();
1✔
1731

1732
            let another_collection = db
1✔
1733
                .add_layer_collection(
1✔
1734
                    AddLayerCollection {
1✔
1735
                        name: "top collection".to_string(),
1✔
1736
                        description: "description".to_string(),
1✔
1737
                        properties: Default::default(),
1✔
1738
                    },
1✔
1739
                    root_collection,
1✔
1740
                )
1✔
1741
                .await
10✔
1742
                .unwrap();
1✔
1743

1744
            let layer_in_one_collection = db
1✔
1745
                .add_layer(
1✔
1746
                    AddLayer {
1✔
1747
                        name: "layer 1".to_string(),
1✔
1748
                        description: "description".to_string(),
1✔
1749
                        workflow: Workflow {
1✔
1750
                            operator: TypedOperator::Vector(
1✔
1751
                                MockPointSource {
1✔
1752
                                    params: MockPointSourceParams {
1✔
1753
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1754
                                    },
1✔
1755
                                }
1✔
1756
                                .boxed(),
1✔
1757
                            ),
1✔
1758
                        },
1✔
1759
                        symbology: None,
1✔
1760
                        metadata: Default::default(),
1✔
1761
                        properties: Default::default(),
1✔
1762
                    },
1✔
1763
                    &another_collection,
1✔
1764
                )
1✔
1765
                .await
41✔
1766
                .unwrap();
1✔
1767

1768
            let layer_in_two_collections = db
1✔
1769
                .add_layer(
1✔
1770
                    AddLayer {
1✔
1771
                        name: "layer 2".to_string(),
1✔
1772
                        description: "description".to_string(),
1✔
1773
                        workflow: Workflow {
1✔
1774
                            operator: TypedOperator::Vector(
1✔
1775
                                MockPointSource {
1✔
1776
                                    params: MockPointSourceParams {
1✔
1777
                                        points: vec![Coordinate2D::new(1., 2.); 3],
1✔
1778
                                    },
1✔
1779
                                }
1✔
1780
                                .boxed(),
1✔
1781
                            ),
1✔
1782
                        },
1✔
1783
                        symbology: None,
1✔
1784
                        metadata: Default::default(),
1✔
1785
                        properties: Default::default(),
1✔
1786
                    },
1✔
1787
                    &another_collection,
1✔
1788
                )
1✔
1789
                .await
9✔
1790
                .unwrap();
1✔
1791

1✔
1792
            db.add_layer_to_collection(&layer_in_two_collections, root_collection)
1✔
1793
                .await
3✔
1794
                .unwrap();
1✔
1795

1✔
1796
            // remove first layer --> should be deleted entirely
1✔
1797

1✔
1798
            db.remove_layer_from_collection(&layer_in_one_collection, &another_collection)
1✔
1799
                .await
7✔
1800
                .unwrap();
1✔
1801

1802
            let number_of_layer_in_collection = db
1✔
1803
                .load_layer_collection(
1✔
1804
                    &another_collection,
1✔
1805
                    LayerCollectionListOptions {
1✔
1806
                        offset: 0,
1✔
1807
                        limit: 20,
1✔
1808
                    },
1✔
1809
                )
1✔
1810
                .await
5✔
1811
                .unwrap()
1✔
1812
                .items
1✔
1813
                .len();
1✔
1814
            assert_eq!(
1✔
1815
                number_of_layer_in_collection,
1✔
1816
                1 /* only the other collection should be here */
1✔
1817
            );
1✔
1818

1819
            db.load_layer(&layer_in_one_collection).await.unwrap_err();
3✔
1820

1✔
1821
            // remove second layer --> should only be gone in collection
1✔
1822

1✔
1823
            db.remove_layer_from_collection(&layer_in_two_collections, &another_collection)
1✔
1824
                .await
7✔
1825
                .unwrap();
1✔
1826

1827
            let number_of_layer_in_collection = db
1✔
1828
                .load_layer_collection(
1✔
1829
                    &another_collection,
1✔
1830
                    LayerCollectionListOptions {
1✔
1831
                        offset: 0,
1✔
1832
                        limit: 20,
1✔
1833
                    },
1✔
1834
                )
1✔
1835
                .await
5✔
1836
                .unwrap()
1✔
1837
                .items
1✔
1838
                .len();
1✔
1839
            assert_eq!(
1✔
1840
                number_of_layer_in_collection,
1✔
1841
                0 /* both layers were deleted */
1✔
1842
            );
1✔
1843

1844
            db.load_layer(&layer_in_two_collections).await.unwrap();
3✔
1845
        })
1✔
1846
        .await;
12✔
1847
    }
1848

1849
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1850
    #[allow(clippy::too_many_lines)]
1851
    async fn it_deletes_dataset() {
1✔
1852
        with_temp_context(|app_ctx, _| async move {
1✔
1853
            let loading_info = OgrSourceDataset {
1✔
1854
                file_name: PathBuf::from("test.csv"),
1✔
1855
                layer_name: "test.csv".to_owned(),
1✔
1856
                data_type: Some(VectorDataType::MultiPoint),
1✔
1857
                time: OgrSourceDatasetTimeType::Start {
1✔
1858
                    start_field: "start".to_owned(),
1✔
1859
                    start_format: OgrSourceTimeFormat::Auto,
1✔
1860
                    duration: OgrSourceDurationSpec::Zero,
1✔
1861
                },
1✔
1862
                default_geometry: None,
1✔
1863
                columns: Some(OgrSourceColumnSpec {
1✔
1864
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
1865
                        header: CsvHeader::Auto,
1✔
1866
                    }),
1✔
1867
                    x: "x".to_owned(),
1✔
1868
                    y: None,
1✔
1869
                    int: vec![],
1✔
1870
                    float: vec![],
1✔
1871
                    text: vec![],
1✔
1872
                    bool: vec![],
1✔
1873
                    datetime: vec![],
1✔
1874
                    rename: None,
1✔
1875
                }),
1✔
1876
                force_ogr_time_filter: false,
1✔
1877
                force_ogr_spatial_filter: false,
1✔
1878
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1879
                sql_query: None,
1✔
1880
                attribute_query: None,
1✔
1881
                cache_ttl: CacheTtlSeconds::default(),
1✔
1882
            };
1✔
1883

1✔
1884
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1885
                OgrSourceDataset,
1✔
1886
                VectorResultDescriptor,
1✔
1887
                VectorQueryRectangle,
1✔
1888
            > {
1✔
1889
                loading_info: loading_info.clone(),
1✔
1890
                result_descriptor: VectorResultDescriptor {
1✔
1891
                    data_type: VectorDataType::MultiPoint,
1✔
1892
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1893
                    columns: [(
1✔
1894
                        "foo".to_owned(),
1✔
1895
                        VectorColumnInfo {
1✔
1896
                            data_type: FeatureDataType::Float,
1✔
1897
                            measurement: Measurement::Unitless.into(),
1✔
1898
                        },
1✔
1899
                    )]
1✔
1900
                    .into_iter()
1✔
1901
                    .collect(),
1✔
1902
                    time: None,
1✔
1903
                    bbox: None,
1✔
1904
                },
1✔
1905
                phantom: Default::default(),
1✔
1906
            });
1✔
1907

1908
            let session = app_ctx.default_session().await.unwrap();
18✔
1909

1✔
1910
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
1911

1✔
1912
            let db = app_ctx.session_context(session.clone()).db();
1✔
1913
            let wrap = db.wrap_meta_data(meta_data);
1✔
1914
            let dataset_id = db
1✔
1915
                .add_dataset(
1✔
1916
                    AddDataset {
1✔
1917
                        name: Some(dataset_name),
1✔
1918
                        display_name: "Ogr Test".to_owned(),
1✔
1919
                        description: "desc".to_owned(),
1✔
1920
                        source_operator: "OgrSource".to_owned(),
1✔
1921
                        symbology: None,
1✔
1922
                        provenance: Some(vec![Provenance {
1✔
1923
                            citation: "citation".to_owned(),
1✔
1924
                            license: "license".to_owned(),
1✔
1925
                            uri: "uri".to_owned(),
1✔
1926
                        }]),
1✔
1927
                    },
1✔
1928
                    wrap,
1✔
1929
                )
1✔
1930
                .await
66✔
1931
                .unwrap()
1✔
1932
                .id;
1933

1934
            assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
1935

1936
            db.delete_dataset(dataset_id).await.unwrap();
3✔
1937

1938
            assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
1939
        })
1✔
1940
        .await;
11✔
1941
    }
1942

1943
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
1944
    #[allow(clippy::too_many_lines)]
1945
    async fn it_deletes_admin_dataset() {
1✔
1946
        with_temp_context(|app_ctx, _| async move {
1✔
1947
            let dataset_name = DatasetName::new(None, "my_dataset");
1✔
1948

1✔
1949
            let loading_info = OgrSourceDataset {
1✔
1950
                file_name: PathBuf::from("test.csv"),
1✔
1951
                layer_name: "test.csv".to_owned(),
1✔
1952
                data_type: Some(VectorDataType::MultiPoint),
1✔
1953
                time: OgrSourceDatasetTimeType::Start {
1✔
1954
                    start_field: "start".to_owned(),
1✔
1955
                    start_format: OgrSourceTimeFormat::Auto,
1✔
1956
                    duration: OgrSourceDurationSpec::Zero,
1✔
1957
                },
1✔
1958
                default_geometry: None,
1✔
1959
                columns: Some(OgrSourceColumnSpec {
1✔
1960
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
1961
                        header: CsvHeader::Auto,
1✔
1962
                    }),
1✔
1963
                    x: "x".to_owned(),
1✔
1964
                    y: None,
1✔
1965
                    int: vec![],
1✔
1966
                    float: vec![],
1✔
1967
                    text: vec![],
1✔
1968
                    bool: vec![],
1✔
1969
                    datetime: vec![],
1✔
1970
                    rename: None,
1✔
1971
                }),
1✔
1972
                force_ogr_time_filter: false,
1✔
1973
                force_ogr_spatial_filter: false,
1✔
1974
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1975
                sql_query: None,
1✔
1976
                attribute_query: None,
1✔
1977
                cache_ttl: CacheTtlSeconds::default(),
1✔
1978
            };
1✔
1979

1✔
1980
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
1981
                OgrSourceDataset,
1✔
1982
                VectorResultDescriptor,
1✔
1983
                VectorQueryRectangle,
1✔
1984
            > {
1✔
1985
                loading_info: loading_info.clone(),
1✔
1986
                result_descriptor: VectorResultDescriptor {
1✔
1987
                    data_type: VectorDataType::MultiPoint,
1✔
1988
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1989
                    columns: [(
1✔
1990
                        "foo".to_owned(),
1✔
1991
                        VectorColumnInfo {
1✔
1992
                            data_type: FeatureDataType::Float,
1✔
1993
                            measurement: Measurement::Unitless.into(),
1✔
1994
                        },
1✔
1995
                    )]
1✔
1996
                    .into_iter()
1✔
1997
                    .collect(),
1✔
1998
                    time: None,
1✔
1999
                    bbox: None,
1✔
2000
                },
1✔
2001
                phantom: Default::default(),
1✔
2002
            });
1✔
2003

2004
            let session = app_ctx.default_session().await.unwrap();
18✔
2005

1✔
2006
            let db = app_ctx.session_context(session).db();
1✔
2007
            let wrap = db.wrap_meta_data(meta_data);
1✔
2008
            let dataset_id = db
1✔
2009
                .add_dataset(
1✔
2010
                    AddDataset {
1✔
2011
                        name: Some(dataset_name),
1✔
2012
                        display_name: "Ogr Test".to_owned(),
1✔
2013
                        description: "desc".to_owned(),
1✔
2014
                        source_operator: "OgrSource".to_owned(),
1✔
2015
                        symbology: None,
1✔
2016
                        provenance: Some(vec![Provenance {
1✔
2017
                            citation: "citation".to_owned(),
1✔
2018
                            license: "license".to_owned(),
1✔
2019
                            uri: "uri".to_owned(),
1✔
2020
                        }]),
1✔
2021
                    },
1✔
2022
                    wrap,
1✔
2023
                )
1✔
2024
                .await
67✔
2025
                .unwrap()
1✔
2026
                .id;
2027

2028
            assert!(db.load_dataset(&dataset_id).await.is_ok());
3✔
2029

2030
            db.delete_dataset(dataset_id).await.unwrap();
3✔
2031

2032
            assert!(db.load_dataset(&dataset_id).await.is_err());
3✔
2033
        })
1✔
2034
        .await;
11✔
2035
    }
2036

2037
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2038
    async fn test_missing_layer_dataset_in_collection_listing() {
1✔
2039
        with_temp_context(|app_ctx, _| async move {
1✔
2040
            let session = app_ctx.default_session().await.unwrap();
18✔
2041
            let db = app_ctx.session_context(session).db();
1✔
2042

2043
            let root_collection_id = &db.get_root_layer_collection_id().await.unwrap();
1✔
2044

2045
            let top_collection_id = db
1✔
2046
                .add_layer_collection(
1✔
2047
                    AddLayerCollection {
1✔
2048
                        name: "top collection".to_string(),
1✔
2049
                        description: "description".to_string(),
1✔
2050
                        properties: Default::default(),
1✔
2051
                    },
1✔
2052
                    root_collection_id,
1✔
2053
                )
1✔
2054
                .await
10✔
2055
                .unwrap();
1✔
2056

1✔
2057
            let faux_layer = LayerId("faux".to_string());
1✔
2058

1✔
2059
            // this should fail
1✔
2060
            db.add_layer_to_collection(&faux_layer, &top_collection_id)
1✔
2061
                .await
×
2062
                .unwrap_err();
1✔
2063

2064
            let root_collection_layers = db
1✔
2065
                .load_layer_collection(
1✔
2066
                    &top_collection_id,
1✔
2067
                    LayerCollectionListOptions {
1✔
2068
                        offset: 0,
1✔
2069
                        limit: 20,
1✔
2070
                    },
1✔
2071
                )
1✔
2072
                .await
5✔
2073
                .unwrap();
1✔
2074

1✔
2075
            assert_eq!(
1✔
2076
                root_collection_layers,
1✔
2077
                LayerCollection {
1✔
2078
                    id: ProviderLayerCollectionId {
1✔
2079
                        provider_id: DataProviderId(
1✔
2080
                            "ce5e84db-cbf9-48a2-9a32-d4b7cc56ea74".try_into().unwrap()
1✔
2081
                        ),
1✔
2082
                        collection_id: top_collection_id.clone(),
1✔
2083
                    },
1✔
2084
                    name: "top collection".to_string(),
1✔
2085
                    description: "description".to_string(),
1✔
2086
                    items: vec![],
1✔
2087
                    entry_label: None,
1✔
2088
                    properties: vec![],
1✔
2089
                }
1✔
2090
            );
1✔
2091
        })
1✔
2092
        .await;
12✔
2093
    }
2094

2095
    #[allow(clippy::too_many_lines)]
2096
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2097
    async fn it_updates_project_layer_symbology() {
1✔
2098
        with_temp_context(|app_ctx, _| async move {
1✔
2099
            let session = app_ctx.default_session().await.unwrap();
18✔
2100

2101
            let (_, workflow_id) = register_ndvi_workflow_helper(&app_ctx).await;
77✔
2102

2103
            let db = app_ctx.session_context(session.clone()).db();
1✔
2104

1✔
2105
            let create_project: CreateProject = serde_json::from_value(json!({
1✔
2106
                "name": "Default",
1✔
2107
                "description": "Default project",
1✔
2108
                "bounds": {
1✔
2109
                    "boundingBox": {
1✔
2110
                        "lowerLeftCoordinate": {
1✔
2111
                            "x": -180,
1✔
2112
                            "y": -90
1✔
2113
                        },
1✔
2114
                        "upperRightCoordinate": {
1✔
2115
                            "x": 180,
1✔
2116
                            "y": 90
1✔
2117
                        }
1✔
2118
                    },
1✔
2119
                    "spatialReference": "EPSG:4326",
1✔
2120
                    "timeInterval": {
1✔
2121
                        "start": 1_396_353_600_000i64,
1✔
2122
                        "end": 1_396_353_600_000i64
1✔
2123
                    }
1✔
2124
                },
1✔
2125
                "timeStep": {
1✔
2126
                    "step": 1,
1✔
2127
                    "granularity": "months"
1✔
2128
                }
1✔
2129
            }))
1✔
2130
            .unwrap();
1✔
2131

2132
            let project_id = db.create_project(create_project).await.unwrap();
11✔
2133

1✔
2134
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2135
                "id": project_id.to_string(),
1✔
2136
                "layers": [{
1✔
2137
                    "name": "NDVI",
1✔
2138
                    "workflow": workflow_id.to_string(),
1✔
2139
                    "visibility": {
1✔
2140
                        "data": true,
1✔
2141
                        "legend": false
1✔
2142
                    },
1✔
2143
                    "symbology": {
1✔
2144
                        "type": "raster",
1✔
2145
                        "opacity": 1,
1✔
2146
                        "colorizer": {
1✔
2147
                            "type": "linearGradient",
1✔
2148
                            "breakpoints": [{
1✔
2149
                                "value": 1,
1✔
2150
                                "color": [0, 0, 0, 255]
1✔
2151
                            }, {
1✔
2152
                                "value": 255,
1✔
2153
                                "color": [255, 255, 255, 255]
1✔
2154
                            }],
1✔
2155
                            "noDataColor": [0, 0, 0, 0],
1✔
2156
                            "overColor": [255, 255, 255, 127],
1✔
2157
                            "underColor": [255, 255, 255, 127]
1✔
2158
                        }
1✔
2159
                    }
1✔
2160
                }]
1✔
2161
            }))
1✔
2162
            .unwrap();
1✔
2163

1✔
2164
            db.update_project(update).await.unwrap();
65✔
2165

1✔
2166
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2167
                "id": project_id.to_string(),
1✔
2168
                "layers": [{
1✔
2169
                    "name": "NDVI",
1✔
2170
                    "workflow": workflow_id.to_string(),
1✔
2171
                    "visibility": {
1✔
2172
                        "data": true,
1✔
2173
                        "legend": false
1✔
2174
                    },
1✔
2175
                    "symbology": {
1✔
2176
                        "type": "raster",
1✔
2177
                        "opacity": 1,
1✔
2178
                        "colorizer": {
1✔
2179
                            "type": "linearGradient",
1✔
2180
                            "breakpoints": [{
1✔
2181
                                "value": 1,
1✔
2182
                                "color": [0, 0, 4, 255]
1✔
2183
                            }, {
1✔
2184
                                "value": 17.866_666_666_666_667,
1✔
2185
                                "color": [11, 9, 36, 255]
1✔
2186
                            }, {
1✔
2187
                                "value": 34.733_333_333_333_334,
1✔
2188
                                "color": [32, 17, 75, 255]
1✔
2189
                            }, {
1✔
2190
                                "value": 51.6,
1✔
2191
                                "color": [59, 15, 112, 255]
1✔
2192
                            }, {
1✔
2193
                                "value": 68.466_666_666_666_67,
1✔
2194
                                "color": [87, 21, 126, 255]
1✔
2195
                            }, {
1✔
2196
                                "value": 85.333_333_333_333_33,
1✔
2197
                                "color": [114, 31, 129, 255]
1✔
2198
                            }, {
1✔
2199
                                "value": 102.199_999_999_999_99,
1✔
2200
                                "color": [140, 41, 129, 255]
1✔
2201
                            }, {
1✔
2202
                                "value": 119.066_666_666_666_65,
1✔
2203
                                "color": [168, 50, 125, 255]
1✔
2204
                            }, {
1✔
2205
                                "value": 135.933_333_333_333_34,
1✔
2206
                                "color": [196, 60, 117, 255]
1✔
2207
                            }, {
1✔
2208
                                "value": 152.799_999_999_999_98,
1✔
2209
                                "color": [222, 73, 104, 255]
1✔
2210
                            }, {
1✔
2211
                                "value": 169.666_666_666_666_66,
1✔
2212
                                "color": [241, 96, 93, 255]
1✔
2213
                            }, {
1✔
2214
                                "value": 186.533_333_333_333_33,
1✔
2215
                                "color": [250, 127, 94, 255]
1✔
2216
                            }, {
1✔
2217
                                "value": 203.399_999_999_999_98,
1✔
2218
                                "color": [254, 159, 109, 255]
1✔
2219
                            }, {
1✔
2220
                                "value": 220.266_666_666_666_65,
1✔
2221
                                "color": [254, 191, 132, 255]
1✔
2222
                            }, {
1✔
2223
                                "value": 237.133_333_333_333_3,
1✔
2224
                                "color": [253, 222, 160, 255]
1✔
2225
                            }, {
1✔
2226
                                "value": 254,
1✔
2227
                                "color": [252, 253, 191, 255]
1✔
2228
                            }],
1✔
2229
                            "noDataColor": [0, 0, 0, 0],
1✔
2230
                            "overColor": [255, 255, 255, 127],
1✔
2231
                            "underColor": [255, 255, 255, 127]
1✔
2232
                        }
1✔
2233
                    }
1✔
2234
                }]
1✔
2235
            }))
1✔
2236
            .unwrap();
1✔
2237

1✔
2238
            db.update_project(update).await.unwrap();
14✔
2239

1✔
2240
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2241
                "id": project_id.to_string(),
1✔
2242
                "layers": [{
1✔
2243
                    "name": "NDVI",
1✔
2244
                    "workflow": workflow_id.to_string(),
1✔
2245
                    "visibility": {
1✔
2246
                        "data": true,
1✔
2247
                        "legend": false
1✔
2248
                    },
1✔
2249
                    "symbology": {
1✔
2250
                        "type": "raster",
1✔
2251
                        "opacity": 1,
1✔
2252
                        "colorizer": {
1✔
2253
                            "type": "linearGradient",
1✔
2254
                            "breakpoints": [{
1✔
2255
                                "value": 1,
1✔
2256
                                "color": [0, 0, 4, 255]
1✔
2257
                            }, {
1✔
2258
                                "value": 17.866_666_666_666_667,
1✔
2259
                                "color": [11, 9, 36, 255]
1✔
2260
                            }, {
1✔
2261
                                "value": 34.733_333_333_333_334,
1✔
2262
                                "color": [32, 17, 75, 255]
1✔
2263
                            }, {
1✔
2264
                                "value": 51.6,
1✔
2265
                                "color": [59, 15, 112, 255]
1✔
2266
                            }, {
1✔
2267
                                "value": 68.466_666_666_666_67,
1✔
2268
                                "color": [87, 21, 126, 255]
1✔
2269
                            }, {
1✔
2270
                                "value": 85.333_333_333_333_33,
1✔
2271
                                "color": [114, 31, 129, 255]
1✔
2272
                            }, {
1✔
2273
                                "value": 102.199_999_999_999_99,
1✔
2274
                                "color": [140, 41, 129, 255]
1✔
2275
                            }, {
1✔
2276
                                "value": 119.066_666_666_666_65,
1✔
2277
                                "color": [168, 50, 125, 255]
1✔
2278
                            }, {
1✔
2279
                                "value": 135.933_333_333_333_34,
1✔
2280
                                "color": [196, 60, 117, 255]
1✔
2281
                            }, {
1✔
2282
                                "value": 152.799_999_999_999_98,
1✔
2283
                                "color": [222, 73, 104, 255]
1✔
2284
                            }, {
1✔
2285
                                "value": 169.666_666_666_666_66,
1✔
2286
                                "color": [241, 96, 93, 255]
1✔
2287
                            }, {
1✔
2288
                                "value": 186.533_333_333_333_33,
1✔
2289
                                "color": [250, 127, 94, 255]
1✔
2290
                            }, {
1✔
2291
                                "value": 203.399_999_999_999_98,
1✔
2292
                                "color": [254, 159, 109, 255]
1✔
2293
                            }, {
1✔
2294
                                "value": 220.266_666_666_666_65,
1✔
2295
                                "color": [254, 191, 132, 255]
1✔
2296
                            }, {
1✔
2297
                                "value": 237.133_333_333_333_3,
1✔
2298
                                "color": [253, 222, 160, 255]
1✔
2299
                            }, {
1✔
2300
                                "value": 254,
1✔
2301
                                "color": [252, 253, 191, 255]
1✔
2302
                            }],
1✔
2303
                            "noDataColor": [0, 0, 0, 0],
1✔
2304
                            "overColor": [255, 255, 255, 127],
1✔
2305
                            "underColor": [255, 255, 255, 127]
1✔
2306
                        }
1✔
2307
                    }
1✔
2308
                }]
1✔
2309
            }))
1✔
2310
            .unwrap();
1✔
2311

1✔
2312
            db.update_project(update).await.unwrap();
15✔
2313

1✔
2314
            let update: UpdateProject = serde_json::from_value(json!({
1✔
2315
                "id": project_id.to_string(),
1✔
2316
                "layers": [{
1✔
2317
                    "name": "NDVI",
1✔
2318
                    "workflow": workflow_id.to_string(),
1✔
2319
                    "visibility": {
1✔
2320
                        "data": true,
1✔
2321
                        "legend": false
1✔
2322
                    },
1✔
2323
                    "symbology": {
1✔
2324
                        "type": "raster",
1✔
2325
                        "opacity": 1,
1✔
2326
                        "colorizer": {
1✔
2327
                            "type": "linearGradient",
1✔
2328
                            "breakpoints": [{
1✔
2329
                                "value": 1,
1✔
2330
                                "color": [0, 0, 4, 255]
1✔
2331
                            }, {
1✔
2332
                                "value": 17.933_333_333_333_334,
1✔
2333
                                "color": [11, 9, 36, 255]
1✔
2334
                            }, {
1✔
2335
                                "value": 34.866_666_666_666_67,
1✔
2336
                                "color": [32, 17, 75, 255]
1✔
2337
                            }, {
1✔
2338
                                "value": 51.800_000_000_000_004,
1✔
2339
                                "color": [59, 15, 112, 255]
1✔
2340
                            }, {
1✔
2341
                                "value": 68.733_333_333_333_33,
1✔
2342
                                "color": [87, 21, 126, 255]
1✔
2343
                            }, {
1✔
2344
                                "value": 85.666_666_666_666_66,
1✔
2345
                                "color": [114, 31, 129, 255]
1✔
2346
                            }, {
1✔
2347
                                "value": 102.6,
1✔
2348
                                "color": [140, 41, 129, 255]
1✔
2349
                            }, {
1✔
2350
                                "value": 119.533_333_333_333_32,
1✔
2351
                                "color": [168, 50, 125, 255]
1✔
2352
                            }, {
1✔
2353
                                "value": 136.466_666_666_666_67,
1✔
2354
                                "color": [196, 60, 117, 255]
1✔
2355
                            }, {
1✔
2356
                                "value": 153.4,
1✔
2357
                                "color": [222, 73, 104, 255]
1✔
2358
                            }, {
1✔
2359
                                "value": 170.333_333_333_333_31,
1✔
2360
                                "color": [241, 96, 93, 255]
1✔
2361
                            }, {
1✔
2362
                                "value": 187.266_666_666_666_65,
1✔
2363
                                "color": [250, 127, 94, 255]
1✔
2364
                            }, {
1✔
2365
                                "value": 204.2,
1✔
2366
                                "color": [254, 159, 109, 255]
1✔
2367
                            }, {
1✔
2368
                                "value": 221.133_333_333_333_33,
1✔
2369
                                "color": [254, 191, 132, 255]
1✔
2370
                            }, {
1✔
2371
                                "value": 238.066_666_666_666_63,
1✔
2372
                                "color": [253, 222, 160, 255]
1✔
2373
                            }, {
1✔
2374
                                "value": 255,
1✔
2375
                                "color": [252, 253, 191, 255]
1✔
2376
                            }],
1✔
2377
                            "noDataColor": [0, 0, 0, 0],
1✔
2378
                            "overColor": [255, 255, 255, 127],
1✔
2379
                            "underColor": [255, 255, 255, 127]
1✔
2380
                        }
1✔
2381
                    }
1✔
2382
                }]
1✔
2383
            }))
1✔
2384
            .unwrap();
1✔
2385

1✔
2386
            let update = update;
1✔
2387

2388
            // run two updates concurrently
2389
            let (r0, r1) = join!(db.update_project(update.clone()), db.update_project(update));
1✔
2390

2391
            assert!(r0.is_ok());
1✔
2392
            assert!(r1.is_ok());
1✔
2393
        })
1✔
2394
        .await;
12✔
2395
    }
2396

2397
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2398
    #[allow(clippy::too_many_lines)]
2399
    async fn it_resolves_dataset_names_to_ids() {
1✔
2400
        with_temp_context(|app_ctx, _| async move {
1✔
2401
            let session = app_ctx.default_session().await.unwrap();
18✔
2402
            let db = app_ctx.session_context(session.clone()).db();
1✔
2403

1✔
2404
            let loading_info = OgrSourceDataset {
1✔
2405
                file_name: PathBuf::from("test.csv"),
1✔
2406
                layer_name: "test.csv".to_owned(),
1✔
2407
                data_type: Some(VectorDataType::MultiPoint),
1✔
2408
                time: OgrSourceDatasetTimeType::Start {
1✔
2409
                    start_field: "start".to_owned(),
1✔
2410
                    start_format: OgrSourceTimeFormat::Auto,
1✔
2411
                    duration: OgrSourceDurationSpec::Zero,
1✔
2412
                },
1✔
2413
                default_geometry: None,
1✔
2414
                columns: Some(OgrSourceColumnSpec {
1✔
2415
                    format_specifics: Some(FormatSpecifics::Csv {
1✔
2416
                        header: CsvHeader::Auto,
1✔
2417
                    }),
1✔
2418
                    x: "x".to_owned(),
1✔
2419
                    y: None,
1✔
2420
                    int: vec![],
1✔
2421
                    float: vec![],
1✔
2422
                    text: vec![],
1✔
2423
                    bool: vec![],
1✔
2424
                    datetime: vec![],
1✔
2425
                    rename: None,
1✔
2426
                }),
1✔
2427
                force_ogr_time_filter: false,
1✔
2428
                force_ogr_spatial_filter: false,
1✔
2429
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2430
                sql_query: None,
1✔
2431
                attribute_query: None,
1✔
2432
                cache_ttl: CacheTtlSeconds::default(),
1✔
2433
            };
1✔
2434

1✔
2435
            let meta_data = MetaDataDefinition::OgrMetaData(StaticMetaData::<
1✔
2436
                OgrSourceDataset,
1✔
2437
                VectorResultDescriptor,
1✔
2438
                VectorQueryRectangle,
1✔
2439
            > {
1✔
2440
                loading_info: loading_info.clone(),
1✔
2441
                result_descriptor: VectorResultDescriptor {
1✔
2442
                    data_type: VectorDataType::MultiPoint,
1✔
2443
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2444
                    columns: [(
1✔
2445
                        "foo".to_owned(),
1✔
2446
                        VectorColumnInfo {
1✔
2447
                            data_type: FeatureDataType::Float,
1✔
2448
                            measurement: Measurement::Unitless.into(),
1✔
2449
                        },
1✔
2450
                    )]
1✔
2451
                    .into_iter()
1✔
2452
                    .collect(),
1✔
2453
                    time: None,
1✔
2454
                    bbox: None,
1✔
2455
                },
1✔
2456
                phantom: Default::default(),
1✔
2457
            });
1✔
2458

2459
            let DatasetIdAndName {
2460
                id: dataset_id1,
1✔
2461
                name: dataset_name1,
1✔
2462
            } = db
1✔
2463
                .add_dataset(
1✔
2464
                    AddDataset {
1✔
2465
                        name: Some(DatasetName::new(None, "my_dataset".to_owned())),
1✔
2466
                        display_name: "Ogr Test".to_owned(),
1✔
2467
                        description: "desc".to_owned(),
1✔
2468
                        source_operator: "OgrSource".to_owned(),
1✔
2469
                        symbology: None,
1✔
2470
                        provenance: Some(vec![Provenance {
1✔
2471
                            citation: "citation".to_owned(),
1✔
2472
                            license: "license".to_owned(),
1✔
2473
                            uri: "uri".to_owned(),
1✔
2474
                        }]),
1✔
2475
                    },
1✔
2476
                    db.wrap_meta_data(meta_data.clone()),
1✔
2477
                )
1✔
2478
                .await
67✔
2479
                .unwrap();
1✔
2480

2481
            assert_eq!(
1✔
2482
                db.resolve_dataset_name_to_id(&dataset_name1).await.unwrap(),
3✔
2483
                dataset_id1
2484
            );
2485
        })
1✔
2486
        .await;
11✔
2487
    }
2488

2489
    #[tokio::test(flavor = "multi_thread", worker_threads = 1)]
1✔
2490
    #[allow(clippy::too_many_lines)]
2491
    async fn test_postgres_type_serialization() {
1✔
2492
        pub async fn test_type<T>(
26✔
2493
            conn: &PooledConnection<'_, PostgresConnectionManager<tokio_postgres::NoTls>>,
26✔
2494
            sql_type: &str,
26✔
2495
            checks: impl IntoIterator<Item = T>,
26✔
2496
        ) where
26✔
2497
            T: PartialEq + postgres_types::FromSqlOwned + postgres_types::ToSql + Sync,
26✔
2498
        {
26✔
2499
            const UNQUOTED: [&str; 1] = ["double precision"];
1✔
2500

1✔
2501
            // don't quote built-in types
1✔
2502
            let quote = if UNQUOTED.contains(&sql_type) || sql_type.contains('[') {
26✔
2503
                ""
2✔
2504
            } else {
1✔
2505
                "\""
24✔
2506
            };
1✔
2507

1✔
2508
            for value in checks {
86✔
2509
                let stmt = conn
60✔
2510
                    .prepare(&format!("SELECT $1::{quote}{sql_type}{quote}"))
60✔
2511
                    .await
134✔
2512
                    .unwrap();
60✔
2513
                let result: T = conn.query_one(&stmt, &[&value]).await.unwrap().get(0);
60✔
2514

60✔
2515
                assert_eq!(value, result);
60✔
2516
            }
1✔
2517
        }
26✔
2518

1✔
2519
        with_temp_context(|app_ctx, _| async move {
1✔
2520
            let pool = app_ctx.pool.get().await.unwrap();
1✔
2521

1✔
2522
            test_type(&pool, "RgbaColor", [RgbaColor([0, 1, 2, 3])]).await;
4✔
2523

2524
            test_type(
1✔
2525
                &pool,
1✔
2526
                "double precision",
1✔
2527
                [NotNanF64::from(NotNan::<f64>::new(1.0).unwrap())],
1✔
2528
            )
1✔
2529
            .await;
2✔
2530

2531
            test_type(
1✔
2532
                &pool,
1✔
2533
                "Breakpoint",
1✔
2534
                [Breakpoint {
1✔
2535
                    value: NotNan::<f64>::new(1.0).unwrap().into(),
1✔
2536
                    color: RgbaColor([0, 0, 0, 0]),
1✔
2537
                }],
1✔
2538
            )
1✔
2539
            .await;
5✔
2540

2541
            test_type(
1✔
2542
                &pool,
1✔
2543
                "DefaultColors",
1✔
2544
                [
1✔
2545
                    DefaultColors::DefaultColor {
1✔
2546
                        default_color: RgbaColor([0, 10, 20, 30]),
1✔
2547
                    },
1✔
2548
                    DefaultColors::OverUnder(OverUnderColors {
1✔
2549
                        over_color: RgbaColor([1, 2, 3, 4]),
1✔
2550
                        under_color: RgbaColor([5, 6, 7, 8]),
1✔
2551
                    }),
1✔
2552
                ],
1✔
2553
            )
1✔
2554
            .await;
6✔
2555

2556
            test_type(
1✔
2557
                &pool,
1✔
2558
                "ColorizerType",
1✔
2559
                [
1✔
2560
                    ColorizerTypeDbType::LinearGradient,
1✔
2561
                    ColorizerTypeDbType::LogarithmicGradient,
1✔
2562
                    ColorizerTypeDbType::Palette,
1✔
2563
                    ColorizerTypeDbType::Rgba,
1✔
2564
                ],
1✔
2565
            )
1✔
2566
            .await;
11✔
2567

2568
            test_type(
1✔
2569
                &pool,
1✔
2570
                "Colorizer",
1✔
2571
                [
1✔
2572
                    Colorizer::LinearGradient(LinearGradient {
1✔
2573
                        breakpoints: vec![
1✔
2574
                            Breakpoint {
1✔
2575
                                value: NotNan::<f64>::new(-10.0).unwrap().into(),
1✔
2576
                                color: RgbaColor([0, 0, 0, 0]),
1✔
2577
                            },
1✔
2578
                            Breakpoint {
1✔
2579
                                value: NotNan::<f64>::new(2.0).unwrap().into(),
1✔
2580
                                color: RgbaColor([255, 0, 0, 255]),
1✔
2581
                            },
1✔
2582
                        ],
1✔
2583
                        no_data_color: RgbaColor([0, 10, 20, 30]),
1✔
2584
                        color_fields: DefaultColors::OverUnder(OverUnderColors {
1✔
2585
                            over_color: RgbaColor([1, 2, 3, 4]),
1✔
2586
                            under_color: RgbaColor([5, 6, 7, 8]),
1✔
2587
                        }),
1✔
2588
                    }),
1✔
2589
                    Colorizer::LogarithmicGradient(LogarithmicGradient {
1✔
2590
                        breakpoints: vec![
1✔
2591
                            Breakpoint {
1✔
2592
                                value: NotNan::<f64>::new(1.0).unwrap().into(),
1✔
2593
                                color: RgbaColor([0, 0, 0, 0]),
1✔
2594
                            },
1✔
2595
                            Breakpoint {
1✔
2596
                                value: NotNan::<f64>::new(2.0).unwrap().into(),
1✔
2597
                                color: RgbaColor([255, 0, 0, 255]),
1✔
2598
                            },
1✔
2599
                        ],
1✔
2600
                        no_data_color: RgbaColor([0, 10, 20, 30]),
1✔
2601
                        color_fields: DefaultColors::OverUnder(OverUnderColors {
1✔
2602
                            over_color: RgbaColor([1, 2, 3, 4]),
1✔
2603
                            under_color: RgbaColor([5, 6, 7, 8]),
1✔
2604
                        }),
1✔
2605
                    }),
1✔
2606
                    Colorizer::Palette {
1✔
2607
                        colors: Palette(
1✔
2608
                            [
1✔
2609
                                (NotNan::<f64>::new(1.0).unwrap(), RgbaColor([0, 0, 0, 0])),
1✔
2610
                                (
1✔
2611
                                    NotNan::<f64>::new(2.0).unwrap(),
1✔
2612
                                    RgbaColor([255, 0, 0, 255]),
1✔
2613
                                ),
1✔
2614
                                (NotNan::<f64>::new(3.0).unwrap(), RgbaColor([0, 10, 20, 30])),
1✔
2615
                            ]
1✔
2616
                            .into(),
1✔
2617
                        ),
1✔
2618
                        no_data_color: RgbaColor([1, 2, 3, 4]),
1✔
2619
                        default_color: RgbaColor([5, 6, 7, 8]),
1✔
2620
                    },
1✔
2621
                    Colorizer::Rgba,
1✔
2622
                ],
1✔
2623
            )
1✔
2624
            .await;
11✔
2625

2626
            test_type(
1✔
2627
                &pool,
1✔
2628
                "ColorParam",
1✔
2629
                [
1✔
2630
                    ColorParam::Static {
1✔
2631
                        color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2632
                    },
1✔
2633
                    ColorParam::Derived(DerivedColor {
1✔
2634
                        attribute: "foobar".to_string(),
1✔
2635
                        colorizer: Colorizer::Rgba,
1✔
2636
                    }),
1✔
2637
                ],
1✔
2638
            )
1✔
2639
            .await;
6✔
2640

2641
            test_type(
1✔
2642
                &pool,
1✔
2643
                "NumberParam",
1✔
2644
                [
1✔
2645
                    NumberParam::Static { value: 42 },
1✔
2646
                    NumberParam::Derived(DerivedNumber {
1✔
2647
                        attribute: "foobar".to_string(),
1✔
2648
                        factor: 1.0,
1✔
2649
                        default_value: 42.,
1✔
2650
                    }),
1✔
2651
                ],
1✔
2652
            )
1✔
2653
            .await;
6✔
2654

2655
            test_type(
1✔
2656
                &pool,
1✔
2657
                "StrokeParam",
1✔
2658
                [StrokeParam {
1✔
2659
                    width: NumberParam::Static { value: 42 },
1✔
2660
                    color: ColorParam::Static {
1✔
2661
                        color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2662
                    },
1✔
2663
                }],
1✔
2664
            )
1✔
2665
            .await;
4✔
2666

2667
            test_type(
1✔
2668
                &pool,
1✔
2669
                "TextSymbology",
1✔
2670
                [TextSymbology {
1✔
2671
                    attribute: "attribute".to_string(),
1✔
2672
                    fill_color: ColorParam::Static {
1✔
2673
                        color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2674
                    },
1✔
2675
                    stroke: StrokeParam {
1✔
2676
                        width: NumberParam::Static { value: 42 },
1✔
2677
                        color: ColorParam::Static {
1✔
2678
                            color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2679
                        },
1✔
2680
                    },
1✔
2681
                }],
1✔
2682
            )
1✔
2683
            .await;
4✔
2684

2685
            test_type(
1✔
2686
                &pool,
1✔
2687
                "Symbology",
1✔
2688
                [
1✔
2689
                    Symbology::Point(PointSymbology {
1✔
2690
                        fill_color: ColorParam::Static {
1✔
2691
                            color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2692
                        },
1✔
2693
                        stroke: StrokeParam {
1✔
2694
                            width: NumberParam::Static { value: 42 },
1✔
2695
                            color: ColorParam::Static {
1✔
2696
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2697
                            },
1✔
2698
                        },
1✔
2699
                        radius: NumberParam::Static { value: 42 },
1✔
2700
                        text: Some(TextSymbology {
1✔
2701
                            attribute: "attribute".to_string(),
1✔
2702
                            fill_color: ColorParam::Static {
1✔
2703
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2704
                            },
1✔
2705
                            stroke: StrokeParam {
1✔
2706
                                width: NumberParam::Static { value: 42 },
1✔
2707
                                color: ColorParam::Static {
1✔
2708
                                    color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2709
                                },
1✔
2710
                            },
1✔
2711
                        }),
1✔
2712
                    }),
1✔
2713
                    Symbology::Line(LineSymbology {
1✔
2714
                        stroke: StrokeParam {
1✔
2715
                            width: NumberParam::Static { value: 42 },
1✔
2716
                            color: ColorParam::Static {
1✔
2717
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2718
                            },
1✔
2719
                        },
1✔
2720
                        text: Some(TextSymbology {
1✔
2721
                            attribute: "attribute".to_string(),
1✔
2722
                            fill_color: ColorParam::Static {
1✔
2723
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2724
                            },
1✔
2725
                            stroke: StrokeParam {
1✔
2726
                                width: NumberParam::Static { value: 42 },
1✔
2727
                                color: ColorParam::Static {
1✔
2728
                                    color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2729
                                },
1✔
2730
                            },
1✔
2731
                        }),
1✔
2732
                        auto_simplified: true,
1✔
2733
                    }),
1✔
2734
                    Symbology::Polygon(PolygonSymbology {
1✔
2735
                        fill_color: ColorParam::Static {
1✔
2736
                            color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2737
                        },
1✔
2738
                        stroke: StrokeParam {
1✔
2739
                            width: NumberParam::Static { value: 42 },
1✔
2740
                            color: ColorParam::Static {
1✔
2741
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2742
                            },
1✔
2743
                        },
1✔
2744
                        text: Some(TextSymbology {
1✔
2745
                            attribute: "attribute".to_string(),
1✔
2746
                            fill_color: ColorParam::Static {
1✔
2747
                                color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2748
                            },
1✔
2749
                            stroke: StrokeParam {
1✔
2750
                                width: NumberParam::Static { value: 42 },
1✔
2751
                                color: ColorParam::Static {
1✔
2752
                                    color: RgbaColor([0, 10, 20, 30]).into(),
1✔
2753
                                },
1✔
2754
                            },
1✔
2755
                        }),
1✔
2756
                        auto_simplified: true,
1✔
2757
                    }),
1✔
2758
                    Symbology::Raster(RasterSymbology {
1✔
2759
                        opacity: 1.0,
1✔
2760
                        colorizer: Colorizer::LinearGradient(LinearGradient {
1✔
2761
                            breakpoints: vec![
1✔
2762
                                Breakpoint {
1✔
2763
                                    value: NotNan::<f64>::new(-10.0).unwrap().into(),
1✔
2764
                                    color: RgbaColor([0, 0, 0, 0]),
1✔
2765
                                },
1✔
2766
                                Breakpoint {
1✔
2767
                                    value: NotNan::<f64>::new(2.0).unwrap().into(),
1✔
2768
                                    color: RgbaColor([255, 0, 0, 255]),
1✔
2769
                                },
1✔
2770
                            ],
1✔
2771
                            no_data_color: RgbaColor([0, 10, 20, 30]),
1✔
2772
                            color_fields: DefaultColors::OverUnder(OverUnderColors {
1✔
2773
                                over_color: RgbaColor([1, 2, 3, 4]),
1✔
2774
                                under_color: RgbaColor([5, 6, 7, 8]),
1✔
2775
                            }),
1✔
2776
                        }),
1✔
2777
                    }),
1✔
2778
                ],
1✔
2779
            )
1✔
2780
            .await;
18✔
2781

2782
            test_type(
1✔
2783
                &pool,
1✔
2784
                "RasterDataType",
1✔
2785
                [
1✔
2786
                    crate::api::model::datatypes::RasterDataType::U8,
1✔
2787
                    crate::api::model::datatypes::RasterDataType::U16,
1✔
2788
                    crate::api::model::datatypes::RasterDataType::U32,
1✔
2789
                    crate::api::model::datatypes::RasterDataType::U64,
1✔
2790
                    crate::api::model::datatypes::RasterDataType::I8,
1✔
2791
                    crate::api::model::datatypes::RasterDataType::I16,
1✔
2792
                    crate::api::model::datatypes::RasterDataType::I32,
1✔
2793
                    crate::api::model::datatypes::RasterDataType::I64,
1✔
2794
                    crate::api::model::datatypes::RasterDataType::F32,
1✔
2795
                    crate::api::model::datatypes::RasterDataType::F64,
1✔
2796
                ],
1✔
2797
            )
1✔
2798
            .await;
22✔
2799

2800
            test_type(
1✔
2801
                &pool,
1✔
2802
                "Measurement",
1✔
2803
                [
1✔
2804
                    Measurement::Unitless,
1✔
2805
                    Measurement::Continuous(ContinuousMeasurement {
1✔
2806
                        measurement: "Temperature".to_string(),
1✔
2807
                        unit: Some("°C".to_string()),
1✔
2808
                    }),
1✔
2809
                    Measurement::Classification(ClassificationMeasurement {
1✔
2810
                        measurement: "Color".to_string(),
1✔
2811
                        classes: [(1, "Grayscale".to_string()), (2, "Colorful".to_string())].into(),
1✔
2812
                    }),
1✔
2813
                ],
1✔
2814
            )
1✔
2815
            .await;
15✔
2816

2817
            test_type(
1✔
2818
                &pool,
1✔
2819
                "Coordinate2D",
1✔
2820
                [crate::api::model::datatypes::Coordinate2D::from(
1✔
2821
                    Coordinate2D::new(0.0f64, 1.),
1✔
2822
                )],
1✔
2823
            )
1✔
2824
            .await;
4✔
2825

2826
            test_type(
1✔
2827
                &pool,
1✔
2828
                "SpatialPartition2D",
1✔
2829
                [crate::api::model::datatypes::SpatialPartition2D {
1✔
2830
                    upper_left_coordinate: Coordinate2D::new(0.0f64, 1.).into(),
1✔
2831
                    lower_right_coordinate: Coordinate2D::new(2., 0.5).into(),
1✔
2832
                }],
1✔
2833
            )
1✔
2834
            .await;
4✔
2835

2836
            test_type(
1✔
2837
                &pool,
1✔
2838
                "BoundingBox2D",
1✔
2839
                [crate::api::model::datatypes::BoundingBox2D {
1✔
2840
                    lower_left_coordinate: Coordinate2D::new(0.0f64, 0.5).into(),
1✔
2841
                    upper_right_coordinate: Coordinate2D::new(2., 1.0).into(),
1✔
2842
                }],
1✔
2843
            )
1✔
2844
            .await;
4✔
2845

2846
            test_type(
1✔
2847
                &pool,
1✔
2848
                "SpatialResolution",
1✔
2849
                [crate::api::model::datatypes::SpatialResolution { x: 1.2, y: 2.3 }],
1✔
2850
            )
1✔
2851
            .await;
4✔
2852

2853
            test_type(
1✔
2854
                &pool,
1✔
2855
                "VectorDataType",
1✔
2856
                [
1✔
2857
                    crate::api::model::datatypes::VectorDataType::Data,
1✔
2858
                    crate::api::model::datatypes::VectorDataType::MultiPoint,
1✔
2859
                    crate::api::model::datatypes::VectorDataType::MultiLineString,
1✔
2860
                    crate::api::model::datatypes::VectorDataType::MultiPolygon,
1✔
2861
                ],
1✔
2862
            )
1✔
2863
            .await;
10✔
2864

2865
            test_type(
1✔
2866
                &pool,
1✔
2867
                "FeatureDataType",
1✔
2868
                [
1✔
2869
                    crate::api::model::datatypes::FeatureDataType::Category,
1✔
2870
                    crate::api::model::datatypes::FeatureDataType::Int,
1✔
2871
                    crate::api::model::datatypes::FeatureDataType::Float,
1✔
2872
                    crate::api::model::datatypes::FeatureDataType::Text,
1✔
2873
                    crate::api::model::datatypes::FeatureDataType::Bool,
1✔
2874
                    crate::api::model::datatypes::FeatureDataType::DateTime,
1✔
2875
                ],
1✔
2876
            )
1✔
2877
            .await;
14✔
2878

2879
            test_type(
1✔
2880
                &pool,
1✔
2881
                "TimeInterval",
1✔
2882
                [crate::api::model::datatypes::TimeInterval::from(
1✔
2883
                    TimeInterval::default(),
1✔
2884
                )],
1✔
2885
            )
1✔
2886
            .await;
4✔
2887

2888
            test_type(
1✔
2889
                &pool,
1✔
2890
                "SpatialReference",
1✔
2891
                [
1✔
2892
                    crate::api::model::datatypes::SpatialReferenceOption::Unreferenced,
1✔
2893
                    crate::api::model::datatypes::SpatialReferenceOption::SpatialReference(
1✔
2894
                        SpatialReference::epsg_4326().into(),
1✔
2895
                    ),
1✔
2896
                ],
1✔
2897
            )
1✔
2898
            .await;
8✔
2899

2900
            test_type(
1✔
2901
                &pool,
1✔
2902
                "PlotResultDescriptor",
1✔
2903
                [PlotResultDescriptor {
1✔
2904
                    spatial_reference: SpatialReferenceOption::Unreferenced.into(),
1✔
2905
                    time: None,
1✔
2906
                    bbox: None,
1✔
2907
                }],
1✔
2908
            )
1✔
2909
            .await;
4✔
2910

2911
            test_type(
1✔
2912
                &pool,
1✔
2913
                "VectorResultDescriptor",
1✔
2914
                [crate::api::model::operators::VectorResultDescriptor {
1✔
2915
                    data_type: VectorDataType::MultiPoint.into(),
1✔
2916
                    spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2917
                        SpatialReference::epsg_4326(),
1✔
2918
                    )
1✔
2919
                    .into(),
1✔
2920
                    columns: [(
1✔
2921
                        "foo".to_string(),
1✔
2922
                        VectorColumnInfo {
1✔
2923
                            data_type: FeatureDataType::Int,
1✔
2924
                            measurement: Measurement::Unitless.into(),
1✔
2925
                        }
1✔
2926
                        .into(),
1✔
2927
                    )]
1✔
2928
                    .into(),
1✔
2929
                    time: Some(TimeInterval::default().into()),
1✔
2930
                    bbox: Some(
1✔
2931
                        BoundingBox2D::new(
1✔
2932
                            Coordinate2D::new(0.0f64, 0.5),
1✔
2933
                            Coordinate2D::new(2., 1.0),
1✔
2934
                        )
1✔
2935
                        .unwrap()
1✔
2936
                        .into(),
1✔
2937
                    ),
1✔
2938
                }],
1✔
2939
            )
1✔
2940
            .await;
7✔
2941

2942
            test_type(
1✔
2943
                &pool,
1✔
2944
                "RasterResultDescriptor",
1✔
2945
                [crate::api::model::operators::RasterResultDescriptor {
1✔
2946
                    data_type: RasterDataType::U8.into(),
1✔
2947
                    spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2948
                        SpatialReference::epsg_4326(),
1✔
2949
                    )
1✔
2950
                    .into(),
1✔
2951
                    measurement: Measurement::Unitless,
1✔
2952
                    time: Some(TimeInterval::default().into()),
1✔
2953
                    bbox: Some(SpatialPartition2D {
1✔
2954
                        upper_left_coordinate: Coordinate2D::new(0.0f64, 1.).into(),
1✔
2955
                        lower_right_coordinate: Coordinate2D::new(2., 0.5).into(),
1✔
2956
                    }),
1✔
2957
                    resolution: Some(SpatialResolution { x: 1.2, y: 2.3 }.into()),
1✔
2958
                }],
1✔
2959
            )
1✔
2960
            .await;
4✔
2961

2962
            test_type(
1✔
2963
                &pool,
1✔
2964
                "ResultDescriptor",
1✔
2965
                [
1✔
2966
                    crate::api::model::operators::TypedResultDescriptor::Vector(
1✔
2967
                        VectorResultDescriptor {
1✔
2968
                            data_type: VectorDataType::MultiPoint,
1✔
2969
                            spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2970
                                SpatialReference::epsg_4326(),
1✔
2971
                            ),
1✔
2972
                            columns: [(
1✔
2973
                                "foo".to_string(),
1✔
2974
                                VectorColumnInfo {
1✔
2975
                                    data_type: FeatureDataType::Int,
1✔
2976
                                    measurement: Measurement::Unitless.into(),
1✔
2977
                                },
1✔
2978
                            )]
1✔
2979
                            .into(),
1✔
2980
                            time: Some(TimeInterval::default()),
1✔
2981
                            bbox: Some(
1✔
2982
                                BoundingBox2D::new(
1✔
2983
                                    Coordinate2D::new(0.0f64, 0.5),
1✔
2984
                                    Coordinate2D::new(2., 1.0),
1✔
2985
                                )
1✔
2986
                                .unwrap(),
1✔
2987
                            ),
1✔
2988
                        }
1✔
2989
                        .into(),
1✔
2990
                    ),
1✔
2991
                    crate::api::model::operators::TypedResultDescriptor::Raster(
1✔
2992
                        crate::api::model::operators::RasterResultDescriptor {
1✔
2993
                            data_type: RasterDataType::U8.into(),
1✔
2994
                            spatial_reference: SpatialReferenceOption::SpatialReference(
1✔
2995
                                SpatialReference::epsg_4326(),
1✔
2996
                            )
1✔
2997
                            .into(),
1✔
2998
                            measurement: Measurement::Unitless,
1✔
2999
                            time: Some(TimeInterval::default().into()),
1✔
3000
                            bbox: Some(SpatialPartition2D {
1✔
3001
                                upper_left_coordinate: Coordinate2D::new(0.0f64, 1.).into(),
1✔
3002
                                lower_right_coordinate: Coordinate2D::new(2., 0.5).into(),
1✔
3003
                            }),
1✔
3004
                            resolution: Some(SpatialResolution { x: 1.2, y: 2.3 }.into()),
1✔
3005
                        },
1✔
3006
                    ),
1✔
3007
                    crate::api::model::operators::TypedResultDescriptor::Plot(
1✔
3008
                        PlotResultDescriptor {
1✔
3009
                            spatial_reference: SpatialReferenceOption::Unreferenced.into(),
1✔
3010
                            time: None,
1✔
3011
                            bbox: None,
1✔
3012
                        },
1✔
3013
                    ),
1✔
3014
                ],
1✔
3015
            )
1✔
3016
            .await;
8✔
3017

3018
            test_type(
1✔
3019
                &pool,
1✔
3020
                "\"TextTextKeyValue\"[]",
1✔
3021
                [HashMapTextTextDbType::from(
1✔
3022
                    &HashMap::<String, String>::from([
1✔
3023
                        ("foo".to_string(), "bar".to_string()),
1✔
3024
                        ("baz".to_string(), "fuu".to_string()),
1✔
3025
                    ]),
1✔
3026
                )],
1✔
3027
            )
1✔
3028
            .await;
5✔
3029
        })
1✔
3030
        .await;
12✔
3031
    }
3032
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc