• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 13809415963

12 Mar 2025 10:42AM UTC coverage: 90.026% (-0.05%) from 90.076%
13809415963

Pull #1013

github

web-flow
Merge b51e2554c into c96026921
Pull Request #1013: Update-utoipa

787 of 935 new or added lines in 41 files covered. (84.17%)

28 existing lines in 10 files now uncovered.

125995 of 139954 relevant lines covered (90.03%)

57510.86 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.66
/services/src/api/handlers/datasets.rs
1
use crate::{
2
    api::model::{
3
        operators::{GdalLoadingInfoTemporalSlice, GdalMetaDataList},
4
        responses::{
5
            datasets::{errors::*, DatasetNameResponse},
6
            ErrorResponse,
7
        },
8
        services::{
9
            AddDataset, CreateDataset, DataPath, DatasetDefinition, MetaDataDefinition,
10
            MetaDataSuggestion, Provenances, UpdateDataset,
11
        },
12
    },
13
    config::{get_config_element, Data},
14
    contexts::{ApplicationContext, SessionContext},
15
    datasets::{
16
        listing::{DatasetListOptions, DatasetListing, DatasetProvider},
17
        storage::{AutoCreateDataset, Dataset, DatasetStore, SuggestMetaData},
18
        upload::{
19
            AdjustFilePath, Upload, UploadDb, UploadId, UploadRootPath, Volume, VolumeName, Volumes,
20
        },
21
        DatasetName,
22
    },
23
    error::{self, Error, Result},
24
    permissions::{Permission, PermissionDb, Role},
25
    projects::Symbology,
26
    util::{
27
        extractors::{ValidatedJson, ValidatedQuery},
28
        path_with_base_path,
29
    },
30
};
31
use actix_web::{web, FromRequest, HttpResponse, HttpResponseBuilder, Responder};
32
use gdal::{
33
    vector::{Layer, LayerAccess, OGRFieldType},
34
    DatasetOptions,
35
};
36
use geoengine_datatypes::{
37
    collections::VectorDataType,
38
    error::BoxedResultExt,
39
    primitives::{
40
        CacheTtlSeconds, FeatureDataType, Measurement, TimeInterval, VectorQueryRectangle,
41
    },
42
    spatial_reference::{SpatialReference, SpatialReferenceOption},
43
};
44
use geoengine_operators::{
45
    engine::{StaticMetaData, VectorColumnInfo, VectorResultDescriptor},
46
    source::{
47
        OgrSourceColumnSpec, OgrSourceDataset, OgrSourceDatasetTimeType, OgrSourceDurationSpec,
48
        OgrSourceErrorSpec, OgrSourceTimeFormat,
49
    },
50
    util::gdal::{
51
        gdal_open_dataset, gdal_open_dataset_ex, gdal_parameters_from_dataset,
52
        raster_descriptor_from_dataset,
53
    },
54
};
55
use serde::{Deserialize, Serialize};
56
use snafu::ResultExt;
57
use std::{
58
    collections::HashMap,
59
    convert::{TryFrom, TryInto},
60
    path::Path,
61
};
62
use utoipa::{ToResponse, ToSchema};
63

64
pub(crate) fn init_dataset_routes<C>(cfg: &mut web::ServiceConfig)
342✔
65
where
342✔
66
    C: ApplicationContext,
342✔
67

342✔
68
    C::Session: FromRequest,
342✔
69
{
342✔
70
    cfg.service(
342✔
71
        web::scope("/dataset")
342✔
72
            .service(
342✔
73
                web::resource("/suggest").route(web::post().to(suggest_meta_data_handler::<C>)),
342✔
74
            )
342✔
75
            .service(web::resource("/auto").route(web::post().to(auto_create_dataset_handler::<C>)))
342✔
76
            .service(
342✔
77
                web::resource("/volumes/{volume_name}/files/{file_name}/layers")
342✔
78
                    .route(web::get().to(list_volume_file_layers_handler::<C>)),
342✔
79
            )
342✔
80
            .service(web::resource("/volumes").route(web::get().to(list_volumes_handler::<C>)))
342✔
81
            .service(
342✔
82
                web::resource("/{dataset}/loadingInfo")
342✔
83
                    .route(web::get().to(get_loading_info_handler::<C>))
342✔
84
                    .route(web::put().to(update_loading_info_handler::<C>)),
342✔
85
            )
342✔
86
            .service(
342✔
87
                web::resource("/{dataset}/symbology")
342✔
88
                    .route(web::put().to(update_dataset_symbology_handler::<C>)),
342✔
89
            )
342✔
90
            .service(
342✔
91
                web::resource("/{dataset}/provenance")
342✔
92
                    .route(web::put().to(update_dataset_provenance_handler::<C>)),
342✔
93
            )
342✔
94
            .service(
342✔
95
                web::resource("/{dataset}")
342✔
96
                    .route(web::get().to(get_dataset_handler::<C>))
342✔
97
                    .route(web::post().to(update_dataset_handler::<C>))
342✔
98
                    .route(web::delete().to(delete_dataset_handler::<C>)),
342✔
99
            )
342✔
100
            .service(web::resource("").route(web::post().to(create_dataset_handler::<C>))), // must come last to not match other routes
342✔
101
    )
342✔
102
    .service(web::resource("/datasets").route(web::get().to(list_datasets_handler::<C>)));
342✔
103
}
342✔
104

105
/// Lists available volumes.
106
#[utoipa::path(
16✔
107
    tag = "Datasets",
16✔
108
    get,
16✔
109
    path = "/dataset/volumes",
16✔
110
    responses(
16✔
111
        (status = 200, description = "OK", body = [Volume],
16✔
112
            example = json!([
16✔
113
                {
16✔
114
                    "name": "test_data",
16✔
115
                    "path": "./test_data/"
16✔
116
                }
16✔
117
            ])
16✔
118
        ),
16✔
119
        (status = 401, response = crate::api::model::responses::UnauthorizedAdminResponse)
16✔
120
    ),
16✔
121
    security(
16✔
122
        ("session_token" = [])
16✔
123
    )
16✔
124
)]
16✔
125
#[allow(clippy::unused_async)]
126
pub async fn list_volumes_handler<C: ApplicationContext>(
×
127
    app_ctx: web::Data<C>,
×
128
    session: C::Session,
×
129
) -> Result<impl Responder> {
×
130
    let volumes = app_ctx.session_context(session).volumes()?;
×
131
    Ok(web::Json(volumes))
×
132
}
×
133

134
/// Lists available datasets.
135
#[utoipa::path(
16✔
136
    tag = "Datasets",
16✔
137
    get,
16✔
138
    path = "/datasets",
16✔
139
    responses(
16✔
140
        (status = 200, description = "OK", body = [DatasetListing],
16✔
141
            example = json!([
16✔
142
                {
16✔
143
                    "id": {
16✔
144
                        "internal": "9c874b9e-cea0-4553-b727-a13cb26ae4bb"
16✔
145
                    },
16✔
146
                    "name": "Germany",
16✔
147
                    "description": "Boundaries of Germany",
16✔
148
                    "tags": [],
16✔
149
                    "sourceOperator": "OgrSource",
16✔
150
                    "resultDescriptor": {
16✔
151
                        "vector": {
16✔
152
                            "dataType": "MultiPolygon",
16✔
153
                            "spatialReference": "EPSG:4326",
16✔
154
                            "columns": {}
16✔
155
                        }
16✔
156
                    }
16✔
157
                }
16✔
158
            ])
16✔
159
        ),
16✔
160
        (status = 400, response = crate::api::model::responses::BadRequestQueryResponse),
16✔
161
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
162
    ),
16✔
163
    params(
16✔
164
        DatasetListOptions
16✔
165
    ),
16✔
166
    security(
16✔
167
        ("session_token" = [])
16✔
168
    )
16✔
169
)]
16✔
170
pub async fn list_datasets_handler<C: ApplicationContext>(
1✔
171
    session: C::Session,
1✔
172
    app_ctx: web::Data<C>,
1✔
173
    options: ValidatedQuery<DatasetListOptions>,
1✔
174
) -> Result<impl Responder> {
1✔
175
    let options = options.into_inner();
1✔
176
    let list = app_ctx
1✔
177
        .session_context(session)
1✔
178
        .db()
1✔
179
        .list_datasets(options)
1✔
180
        .await?;
1✔
181
    Ok(web::Json(list))
1✔
182
}
1✔
183

184
/// Retrieves details about a dataset using the internal name.
185
#[utoipa::path(
16✔
186
    tag = "Datasets",
16✔
187
    get,
16✔
188
    path = "/dataset/{dataset}",
16✔
189
    responses(
16✔
190
        (status = 200, description = "OK", body = Dataset,
16✔
191
            example = json!({
16✔
192
                "id": {
16✔
193
                    "internal": "9c874b9e-cea0-4553-b727-a13cb26ae4bb"
16✔
194
                },
16✔
195
                "name": "Germany",
16✔
196
                "description": "Boundaries of Germany",
16✔
197
                "resultDescriptor": {
16✔
198
                    "vector": {
16✔
199
                        "dataType": "MultiPolygon",
16✔
200
                        "spatialReference": "EPSG:4326",
16✔
201
                        "columns": {}
16✔
202
                    }
16✔
203
                },
16✔
204
                "sourceOperator": "OgrSource"
16✔
205
            })
16✔
206
        ),
16✔
207
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
16✔
208
            ("Referenced an unknown dataset" = (value = json!({
16✔
209
                "error": "CannotLoadDataset",
16✔
210
                "message": "CannotLoadDataset: UnknownDatasetName"
16✔
211
            })))
16✔
212
        )),
16✔
213
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
214
    ),
16✔
215
    params(
16✔
216
        ("dataset" = DatasetName, description = "Dataset Name")
16✔
217
    ),
16✔
218
    security(
16✔
219
        ("session_token" = [])
16✔
220
    )
16✔
221
)]
16✔
222
pub async fn get_dataset_handler<C: ApplicationContext>(
3✔
223
    dataset: web::Path<DatasetName>,
3✔
224
    session: C::Session,
3✔
225
    app_ctx: web::Data<C>,
3✔
226
) -> Result<impl Responder, GetDatasetError> {
3✔
227
    let session_ctx = app_ctx.session_context(session).db();
3✔
228

3✔
229
    let real_dataset = dataset.into_inner();
3✔
230

231
    let dataset_id = session_ctx
3✔
232
        .resolve_dataset_name_to_id(&real_dataset)
3✔
233
        .await
3✔
234
        .context(CannotLoadDataset)?;
3✔
235

236
    // handle the case where the dataset name is not known
237
    let dataset_id = dataset_id
3✔
238
        .ok_or(error::Error::UnknownDatasetName {
3✔
239
            dataset_name: real_dataset.to_string(),
3✔
240
        })
3✔
241
        .context(CannotLoadDataset)?;
3✔
242

243
    let dataset = session_ctx
3✔
244
        .load_dataset(&dataset_id)
3✔
245
        .await
3✔
246
        .context(CannotLoadDataset)?;
3✔
247

248
    Ok(web::Json(dataset))
3✔
249
}
3✔
250

251
/// Update details about a dataset using the internal name.
252
#[utoipa::path(
16✔
253
    tag = "Datasets",
16✔
254
    post,
16✔
255
    path = "/dataset/{dataset}",
16✔
256
    request_body = UpdateDataset,
16✔
257
    responses(
16✔
258
        (status = 200, description = "OK" ),
16✔
259
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
16✔
260
            ("Referenced an unknown dataset" = (value = json!({
16✔
261
                "error": "CannotLoadDataset",
16✔
262
                "message": "CannotLoadDataset: UnknownDatasetName"
16✔
263
            })))
16✔
264
        )),
16✔
265
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
266
    ),
16✔
267
    params(
16✔
268
        ("dataset" = DatasetName, description = "Dataset Name"),
16✔
269
    ),
16✔
270
    security(
16✔
271
        ("session_token" = [])
16✔
272
    )
16✔
273
)]
16✔
274
pub async fn update_dataset_handler<C: ApplicationContext>(
1✔
275
    dataset: web::Path<DatasetName>,
1✔
276
    session: C::Session,
1✔
277
    app_ctx: web::Data<C>,
1✔
278
    update: ValidatedJson<UpdateDataset>,
1✔
279
) -> Result<impl Responder, UpdateDatasetError> {
1✔
280
    let session_ctx = app_ctx.session_context(session).db();
1✔
281

1✔
282
    let real_dataset = dataset.into_inner();
1✔
283

284
    let dataset_id = session_ctx
1✔
285
        .resolve_dataset_name_to_id(&real_dataset)
1✔
286
        .await
1✔
287
        .context(CannotLoadDatasetForUpdate)?;
1✔
288

289
    // handle the case where the dataset name is not known
290
    let dataset_id = dataset_id
1✔
291
        .ok_or(error::Error::UnknownDatasetName {
1✔
292
            dataset_name: real_dataset.to_string(),
1✔
293
        })
1✔
294
        .context(CannotLoadDatasetForUpdate)?;
1✔
295

296
    session_ctx
1✔
297
        .update_dataset(dataset_id, update.into_inner())
1✔
298
        .await
1✔
299
        .context(CannotUpdateDataset)?;
1✔
300

301
    Ok(HttpResponse::Ok())
1✔
302
}
1✔
303

304
/// Retrieves the loading information of a dataset
305
#[utoipa::path(
16✔
306
    tag = "Datasets",
16✔
307
    get,
16✔
308
    path = "/dataset/{dataset}/loadingInfo",
16✔
309
    responses(
16✔
310
        (status = 200, description = "OK", body = MetaDataDefinition)
16✔
311
    ),
16✔
312
    params(
16✔
313
        ("dataset" = DatasetName, description = "Dataset Name")
16✔
314
    ),
16✔
315
    security(
16✔
316
        ("session_token" = [])
16✔
317
    )
16✔
318
)]
16✔
319
pub async fn get_loading_info_handler<C: ApplicationContext>(
1✔
320
    dataset: web::Path<DatasetName>,
1✔
321
    session: C::Session,
1✔
322
    app_ctx: web::Data<C>,
1✔
323
) -> Result<web::Json<MetaDataDefinition>> {
1✔
324
    let session_ctx = app_ctx.session_context(session).db();
1✔
325

1✔
326
    let real_dataset = dataset.into_inner();
1✔
327

328
    let dataset_id = session_ctx
1✔
329
        .resolve_dataset_name_to_id(&real_dataset)
1✔
330
        .await?;
1✔
331

332
    // handle the case where the dataset name is not known
333
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
334
        dataset_name: real_dataset.to_string(),
1✔
335
    })?;
1✔
336

337
    let dataset = session_ctx.load_loading_info(&dataset_id).await?;
1✔
338

339
    Ok(web::Json(dataset.into()))
1✔
340
}
1✔
341

342
/// Updates the dataset's loading info
343
#[utoipa::path(
16✔
344
    tag = "Datasets",
16✔
345
    put,
16✔
346
    path = "/dataset/{dataset}/loadingInfo",
16✔
347
    request_body = MetaDataDefinition,
16✔
348
    responses(
16✔
349
        (status = 200, description = "OK"),
16✔
350
        (status = 400, description = "Bad request", body = ErrorResponse),
16✔
351
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
352
    ),
16✔
353
    params(
16✔
354
        ("dataset" = DatasetName, description = "Dataset Name"),
16✔
355
    ),
16✔
356
    security(
16✔
357
        ("session_token" = [])
16✔
358
    )
16✔
359
)]
16✔
360
pub async fn update_loading_info_handler<C: ApplicationContext>(
1✔
361
    session: C::Session,
1✔
362
    app_ctx: web::Data<C>,
1✔
363
    dataset: web::Path<DatasetName>,
1✔
364
    meta_data: web::Json<MetaDataDefinition>,
1✔
365
) -> Result<HttpResponse> {
1✔
366
    let session_ctx = app_ctx.session_context(session).db();
1✔
367

1✔
368
    let real_dataset = dataset.into_inner();
1✔
369

370
    let dataset_id = session_ctx
1✔
371
        .resolve_dataset_name_to_id(&real_dataset)
1✔
372
        .await?;
1✔
373

374
    // handle the case where the dataset name is not known
375
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
376
        dataset_name: real_dataset.to_string(),
1✔
377
    })?;
1✔
378

379
    session_ctx
1✔
380
        .update_dataset_loading_info(dataset_id, &meta_data.into_inner().into())
1✔
381
        .await?;
1✔
382

383
    Ok(HttpResponse::Ok().finish())
1✔
384
}
1✔
385

386
/// Updates the dataset's symbology
387
#[utoipa::path(
16✔
388
    tag = "Datasets",
16✔
389
    put,
16✔
390
    path = "/dataset/{dataset}/symbology",
16✔
391
    request_body = Symbology,
16✔
392
    responses(
16✔
393
        (status = 200, description = "OK"),
16✔
394
        (status = 400, description = "Bad request", body = ErrorResponse),
16✔
395
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
396
    ),
16✔
397
    params(
16✔
398
        ("dataset" = DatasetName, description = "Dataset Name"),
16✔
399
    ),
16✔
400
    security(
16✔
401
        ("session_token" = [])
16✔
402
    )
16✔
403
)]
16✔
404
pub async fn update_dataset_symbology_handler<C: ApplicationContext>(
1✔
405
    session: C::Session,
1✔
406
    app_ctx: web::Data<C>,
1✔
407
    dataset: web::Path<DatasetName>,
1✔
408
    symbology: web::Json<Symbology>,
1✔
409
) -> Result<impl Responder> {
1✔
410
    let session_ctx = app_ctx.session_context(session).db();
1✔
411

1✔
412
    let real_dataset = dataset.into_inner();
1✔
413

414
    let dataset_id = session_ctx
1✔
415
        .resolve_dataset_name_to_id(&real_dataset)
1✔
416
        .await?;
1✔
417

418
    // handle the case where the dataset name is not known
419
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
420
        dataset_name: real_dataset.to_string(),
1✔
421
    })?;
1✔
422

423
    session_ctx
1✔
424
        .update_dataset_symbology(dataset_id, &symbology.into_inner())
1✔
425
        .await?;
1✔
426

427
    Ok(HttpResponse::Ok())
1✔
428
}
1✔
429

430
// Updates the dataset's provenance
431
#[utoipa::path(
16✔
432
    tag = "Datasets",
16✔
433
    put,
16✔
434
    path = "/dataset/{dataset}/provenance",
16✔
435
    request_body = Provenances,
16✔
436
    responses(
16✔
437
        (status = 200, description = "OK"),
16✔
438
        (status = 400, description = "Bad request", body = ErrorResponse),
16✔
439
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
440
    ),
16✔
441
    params(
16✔
442
        ("dataset" = DatasetName, description = "Dataset Name"),
16✔
443
    ),
16✔
444
    security(
16✔
445
        ("session_token" = [])
16✔
446
    )
16✔
447
)]
16✔
448
pub async fn update_dataset_provenance_handler<C: ApplicationContext>(
1✔
449
    session: C::Session,
1✔
450
    app_ctx: web::Data<C>,
1✔
451
    dataset: web::Path<DatasetName>,
1✔
452
    provenance: ValidatedJson<Provenances>,
1✔
453
) -> Result<HttpResponseBuilder> {
1✔
454
    let session_ctx = app_ctx.session_context(session).db();
1✔
455

1✔
456
    let real_dataset = dataset.into_inner();
1✔
457

458
    let dataset_id = session_ctx
1✔
459
        .resolve_dataset_name_to_id(&real_dataset)
1✔
460
        .await?;
1✔
461

462
    // handle the case where the dataset name is not known
463
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
464
        dataset_name: real_dataset.to_string(),
1✔
465
    })?;
1✔
466

467
    let provenance = provenance
1✔
468
        .into_inner()
1✔
469
        .provenances
1✔
470
        .into_iter()
1✔
471
        .map(Into::into)
1✔
472
        .collect::<Vec<_>>();
1✔
473

1✔
474
    session_ctx
1✔
475
        .update_dataset_provenance(dataset_id, &provenance)
1✔
476
        .await?;
1✔
477

478
    Ok(HttpResponse::Ok())
1✔
479
}
1✔
480

481
pub async fn create_upload_dataset<C: ApplicationContext>(
2✔
482
    session: C::Session,
2✔
483
    app_ctx: web::Data<C>,
2✔
484
    upload_id: UploadId,
2✔
485
    mut definition: DatasetDefinition,
2✔
486
) -> Result<web::Json<DatasetNameResponse>, CreateDatasetError> {
2✔
487
    let db = app_ctx.session_context(session).db();
2✔
488
    let upload = db.load_upload(upload_id).await.context(UploadNotFound)?;
2✔
489

490
    add_tag(&mut definition.properties, "upload".to_owned());
2✔
491

2✔
492
    adjust_meta_data_path(&mut definition.meta_data, &upload)
2✔
493
        .context(CannotResolveUploadFilePath)?;
2✔
494

495
    let result = db
2✔
496
        .add_dataset(definition.properties.into(), definition.meta_data.into())
2✔
497
        .await
2✔
498
        .context(CannotCreateDataset)?;
2✔
499

500
    Ok(web::Json(result.name.into()))
2✔
501
}
2✔
502

503
pub fn adjust_meta_data_path<A: AdjustFilePath>(
6✔
504
    meta: &mut MetaDataDefinition,
6✔
505
    adjust: &A,
6✔
506
) -> Result<()> {
6✔
507
    match meta {
6✔
508
        MetaDataDefinition::MockMetaData(_) => {}
×
509
        MetaDataDefinition::OgrMetaData(m) => {
2✔
510
            m.inner.loading_info.file_name =
2✔
511
                adjust.adjust_file_path(&m.inner.loading_info.file_name)?;
2✔
512
        }
513
        MetaDataDefinition::GdalMetaDataRegular(m) => {
4✔
514
            m.params.file_path = adjust.adjust_file_path(&m.params.file_path)?;
4✔
515
        }
516
        MetaDataDefinition::GdalStatic(m) => {
×
517
            m.params.file_path = adjust.adjust_file_path(&m.params.file_path)?;
×
518
        }
519
        MetaDataDefinition::GdalMetadataNetCdfCf(m) => {
×
520
            m.params.file_path = adjust.adjust_file_path(&m.params.file_path)?;
×
521
        }
522
        MetaDataDefinition::GdalMetaDataList(m) => {
×
523
            for p in &mut m.params {
×
524
                if let Some(ref mut params) = p.params {
×
525
                    params.file_path = adjust.adjust_file_path(&params.file_path)?;
×
526
                }
×
527
            }
528
        }
529
    }
530
    Ok(())
6✔
531
}
6✔
532

533
/// Add the upload tag to the dataset properties.
534
/// If the tag already exists, it will not be added again.
535
pub fn add_tag(properties: &mut AddDataset, tag: String) {
2✔
536
    if let Some(ref mut tags) = properties.tags {
2✔
537
        if !tags.contains(&tag) {
×
538
            tags.push(tag);
×
539
        }
×
540
    } else {
2✔
541
        properties.tags = Some(vec![tag]);
2✔
542
    }
2✔
543
}
2✔
544

545
/// Creates a new dataset using previously uploaded files.
546
/// The format of the files will be automatically detected when possible.
547
#[utoipa::path(
16✔
548
    tag = "Datasets",
16✔
549
    post,
16✔
550
    path = "/dataset/auto",
16✔
551
    request_body = AutoCreateDataset,
16✔
552
    responses(
16✔
553
        (status = 200, response = DatasetNameResponse),
16✔
554
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
16✔
555
            ("Body is invalid json" = (value = json!({
16✔
556
                "error": "BodyDeserializeError",
16✔
557
                "message": "expected `,` or `}` at line 13 column 7"
16✔
558
            }))),
16✔
559
            ("Failed to read body" = (value = json!({
16✔
560
                "error": "Payload",
16✔
561
                "message": "Error that occur during reading payload: Can not decode content-encoding."
16✔
562
            }))),
16✔
563
            ("Referenced an unknown upload" = (value = json!({
16✔
564
                "error": "UnknownUploadId",
16✔
565
                "message": "Unknown upload id"
16✔
566
            }))),
16✔
567
            ("Dataset name is empty" = (value = json!({
16✔
568
                "error": "InvalidDatasetName",
16✔
569
                "message": "Invalid dataset name"
16✔
570
            }))),
16✔
571
            ("Upload filename is invalid" = (value = json!({
16✔
572
                "error": "InvalidUploadFileName",
16✔
573
                "message": "Invalid upload file name"
16✔
574
            }))),
16✔
575
            ("File does not exist" = (value = json!({
16✔
576
                "error": "GdalError",
16✔
577
                "message": "GdalError: GDAL method 'GDALOpenEx' returned a NULL pointer. Error msg: 'upload/0bdd1062-7796-4d44-a655-e548144281a6/asdf: No such file or directory'"
16✔
578
            }))),
16✔
579
            ("Dataset has no auto-importable layer" = (value = json!({
16✔
580
                "error": "DatasetHasNoAutoImportableLayer",
16✔
581
                "message": "Dataset has no auto importable layer"
16✔
582
            })))
16✔
583
        )),
16✔
584
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse),
16✔
585
        (status = 413, response = crate::api::model::responses::PayloadTooLargeResponse),
16✔
586
        (status = 415, response = crate::api::model::responses::UnsupportedMediaTypeForJsonResponse)
16✔
587
    ),
16✔
588
    security(
16✔
589
        ("session_token" = [])
16✔
590
    )
16✔
591
)]
16✔
592
pub async fn auto_create_dataset_handler<C: ApplicationContext>(
×
593
    session: C::Session,
×
594
    app_ctx: web::Data<C>,
×
595
    create: ValidatedJson<AutoCreateDataset>,
×
596
) -> Result<web::Json<DatasetNameResponse>> {
×
597
    let db = app_ctx.session_context(session).db();
×
598
    let upload = db.load_upload(create.upload).await?;
×
599

600
    let create = create.into_inner();
×
601

602
    let main_file_path = upload.id.root_path()?.join(&create.main_file);
×
603
    let meta_data = auto_detect_vector_meta_data_definition(&main_file_path, &create.layer_name)?;
×
604
    let meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
×
605

×
606
    let properties = AddDataset {
×
607
        name: None,
×
608
        display_name: create.dataset_name,
×
609
        description: create.dataset_description,
×
610
        source_operator: meta_data.source_operator_type().to_owned(),
×
611
        symbology: None,
×
612
        provenance: None,
×
613
        tags: Some(vec!["upload".to_owned(), "auto".to_owned()]),
×
614
    };
×
615

616
    let result = db.add_dataset(properties.into(), meta_data).await?;
×
617

618
    Ok(web::Json(result.name.into()))
×
619
}
×
620

621
/// Inspects an upload and suggests metadata that can be used when creating a new dataset based on it.
622
/// Tries to automatically detect the main file and layer name if not specified.
623
#[utoipa::path(
16✔
624
    tag = "Datasets",
16✔
625
    post,
16✔
626
    path = "/dataset/suggest",
16✔
627
    request_body = SuggestMetaData,
16✔
628
    responses(
16✔
629
        (status = 200, description = "OK", body = MetaDataSuggestion,
16✔
630
            example = json!({
16✔
631
                "mainFile": "germany_polygon.gpkg",
16✔
632
                "metaData": {
16✔
633
                    "type": "ogrMetaData",
16✔
634
                    "loadingInfo": {
16✔
635
                        "fileName": "upload/23c9ea9e-15d6-453b-a243-1390967a5669/germany_polygon.gpkg",
16✔
636
                        "layerName": "test_germany",
16✔
637
                        "dataType": "MultiPolygon",
16✔
638
                        "time": {
16✔
639
                            "type": "none"
16✔
640
                        },
16✔
641
                        "defaultGeometry": null,
16✔
642
                        "columns": {
16✔
643
                            "formatSpecifics": null,
16✔
644
                            "x": "",
16✔
645
                            "y": null,
16✔
646
                            "int": [],
16✔
647
                            "float": [],
16✔
648
                            "text": [],
16✔
649
                            "bool": [],
16✔
650
                            "datetime": [],
16✔
651
                            "rename": null
16✔
652
                        },
16✔
653
                        "forceOgrTimeFilter": false,
16✔
654
                        "forceOgrSpatialFilter": false,
16✔
655
                        "onError": "ignore",
16✔
656
                        "sqlQuery": null,
16✔
657
                        "attributeQuery": null
16✔
658
                    },
16✔
659
                    "resultDescriptor": {
16✔
660
                        "dataType": "MultiPolygon",
16✔
661
                        "spatialReference": "EPSG:4326",
16✔
662
                        "columns": {},
16✔
663
                        "time": null,
16✔
664
                        "bbox": null
16✔
665
                    }
16✔
666
                }
16✔
667
            })
16✔
668
        ),
16✔
669
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
16✔
670
            ("Missing field in query string" = (value = json!({
16✔
671
                "error": "UnableToParseQueryString",
16✔
672
                "message": "Unable to parse query string: missing field `offset`"
16✔
673
            }))),
16✔
674
            ("Number in query string contains letters" = (value = json!({
16✔
675
                "error": "UnableToParseQueryString",
16✔
676
                "message": "Unable to parse query string: invalid digit found in string"
16✔
677
            }))),
16✔
678
            ("Referenced an unknown upload" = (value = json!({
16✔
679
                "error": "UnknownUploadId",
16✔
680
                "message": "Unknown upload id"
16✔
681
            }))),
16✔
682
            ("No suitable mainfile found" = (value = json!({
16✔
683
                "error": "NoMainFileCandidateFound",
16✔
684
                "message": "No main file candidate found"
16✔
685
            }))),
16✔
686
            ("File does not exist" = (value = json!({
16✔
687
                "error": "GdalError",
16✔
688
                "message": "GdalError: GDAL method 'GDALOpenEx' returned a NULL pointer. Error msg: 'upload/0bdd1062-7796-4d44-a655-e548144281a6/asdf: No such file or directory'"
16✔
689
            }))),
16✔
690
            ("Dataset has no auto-importable layer" = (value = json!({
16✔
691
                "error": "DatasetHasNoAutoImportableLayer",
16✔
692
                "message": "Dataset has no auto importable layer"
16✔
693
            })))
16✔
694
        )),
16✔
695
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
696
    ),
16✔
697
    security(
16✔
698
        ("session_token" = [])
16✔
699
    )
16✔
700
)]
16✔
701
pub async fn suggest_meta_data_handler<C: ApplicationContext>(
1✔
702
    session: C::Session,
1✔
703
    app_ctx: web::Data<C>,
1✔
704
    suggest: web::Json<SuggestMetaData>,
1✔
705
) -> Result<impl Responder> {
1✔
706
    let suggest = suggest.into_inner();
1✔
707

708
    let (root_path, main_file) = match suggest.data_path {
1✔
709
        DataPath::Upload(upload) => {
1✔
710
            let upload = app_ctx
1✔
711
                .session_context(session)
1✔
712
                .db()
1✔
713
                .load_upload(upload)
1✔
714
                .await?;
1✔
715

716
            let main_file = suggest
1✔
717
                .main_file
1✔
718
                .or_else(|| suggest_main_file(&upload))
1✔
719
                .ok_or(error::Error::NoMainFileCandidateFound)?;
1✔
720

721
            let root_path = upload.id.root_path()?;
1✔
722

723
            (root_path, main_file)
1✔
724
        }
725
        DataPath::Volume(volume) => {
×
726
            let main_file = suggest
×
727
                .main_file
×
728
                .ok_or(error::Error::NoMainFileCandidateFound)?;
×
729

730
            let volumes = Volumes::default();
×
731

732
            let root_path = volumes.volumes.iter().find(|v| v.name == volume).ok_or(
×
733
                crate::error::Error::UnknownVolumeName {
×
734
                    volume_name: volume.0,
×
735
                },
×
736
            )?;
×
737

738
            (root_path.path.clone(), main_file)
×
739
        }
740
    };
741

742
    let layer_name = suggest.layer_name;
1✔
743

744
    let main_file_path = path_with_base_path(&root_path, Path::new(&main_file))?;
1✔
745

746
    let dataset = gdal_open_dataset(&main_file_path)?;
1✔
747

748
    if dataset.layer_count() > 0 {
1✔
749
        let meta_data = auto_detect_vector_meta_data_definition(&main_file_path, &layer_name)?;
1✔
750

751
        let layer_name = meta_data.loading_info.layer_name.clone();
1✔
752

1✔
753
        let meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
754

1✔
755
        Ok(web::Json(MetaDataSuggestion {
1✔
756
            main_file,
1✔
757
            layer_name,
1✔
758
            meta_data: meta_data.into(),
1✔
759
        }))
1✔
760
    } else {
761
        let mut gdal_params =
×
762
            gdal_parameters_from_dataset(&dataset, 1, &main_file_path, None, None)?;
×
763
        if let Ok(relative_path) = gdal_params.file_path.strip_prefix(root_path) {
×
764
            gdal_params.file_path = relative_path.to_path_buf();
×
765
        }
×
766
        let result_descriptor = raster_descriptor_from_dataset(&dataset, 1)?;
×
767

768
        Ok(web::Json(MetaDataSuggestion {
×
769
            main_file,
×
770
            layer_name: String::new(),
×
771
            meta_data: MetaDataDefinition::GdalMetaDataList(GdalMetaDataList {
×
NEW
772
                r#type: Default::default(),
×
773
                result_descriptor: result_descriptor.into(),
×
774
                params: vec![GdalLoadingInfoTemporalSlice {
×
775
                    time: TimeInterval::default().into(),
×
776
                    params: Some(gdal_params.into()),
×
777
                    cache_ttl: CacheTtlSeconds::default().into(),
×
778
                }],
×
779
            }),
×
780
        }))
×
781
    }
782
}
1✔
783

784
fn suggest_main_file(upload: &Upload) -> Option<String> {
1✔
785
    let known_extensions = ["csv", "shp", "json", "geojson", "gpkg", "sqlite"]; // TODO: rasters
1✔
786

1✔
787
    if upload.files.len() == 1 {
1✔
788
        return Some(upload.files[0].name.clone());
1✔
789
    }
×
790

×
791
    let mut sorted_files = upload.files.clone();
×
792
    sorted_files.sort_by(|a, b| b.byte_size.cmp(&a.byte_size));
×
793

794
    for file in sorted_files {
×
795
        if known_extensions.iter().any(|ext| file.name.ends_with(ext)) {
×
796
            return Some(file.name);
×
797
        }
×
798
    }
799
    None
×
800
}
1✔
801

802
#[allow(clippy::ref_option)]
803
fn select_layer_from_dataset<'a>(
8✔
804
    dataset: &'a gdal::Dataset,
8✔
805
    layer_name: &Option<String>,
8✔
806
) -> Result<Layer<'a>> {
8✔
807
    if let Some(ref layer_name) = layer_name {
8✔
808
        dataset.layer_by_name(layer_name).map_err(|_| {
×
809
            crate::error::Error::DatasetInvalidLayerName {
×
810
                layer_name: layer_name.clone(),
×
811
            }
×
812
        })
×
813
    } else {
814
        dataset
8✔
815
            .layer(0)
8✔
816
            .map_err(|_| crate::error::Error::DatasetHasNoAutoImportableLayer)
8✔
817
    }
818
}
8✔
819

820
#[allow(clippy::ref_option)]
821
fn auto_detect_vector_meta_data_definition(
7✔
822
    main_file_path: &Path,
7✔
823
    layer_name: &Option<String>,
7✔
824
) -> Result<StaticMetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>> {
7✔
825
    let dataset = gdal_open_dataset(main_file_path)?;
7✔
826

827
    auto_detect_vector_meta_data_definition_from_dataset(&dataset, main_file_path, layer_name)
7✔
828
}
7✔
829

830
#[allow(clippy::ref_option)]
831
fn auto_detect_vector_meta_data_definition_from_dataset(
7✔
832
    dataset: &gdal::Dataset,
7✔
833
    main_file_path: &Path,
7✔
834
    layer_name: &Option<String>,
7✔
835
) -> Result<StaticMetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>> {
7✔
836
    let layer = select_layer_from_dataset(dataset, layer_name)?;
7✔
837

838
    let columns_map = detect_columns(&layer);
7✔
839
    let columns_vecs = column_map_to_column_vecs(&columns_map);
7✔
840

7✔
841
    let mut geometry = detect_vector_geometry(&layer);
7✔
842
    let mut x = String::new();
7✔
843
    let mut y: Option<String> = None;
7✔
844

7✔
845
    if geometry.data_type == VectorDataType::Data {
7✔
846
        // help Gdal detecting geometry
847
        if let Some(auto_detect) = gdal_autodetect(main_file_path, &columns_vecs.text) {
1✔
848
            let layer = select_layer_from_dataset(&auto_detect.dataset, layer_name)?;
1✔
849
            geometry = detect_vector_geometry(&layer);
1✔
850
            if geometry.data_type != VectorDataType::Data {
1✔
851
                x = auto_detect.x;
1✔
852
                y = auto_detect.y;
1✔
853
            }
1✔
854
        }
×
855
    }
6✔
856

857
    let time = detect_time_type(&columns_vecs);
7✔
858

7✔
859
    Ok(StaticMetaData::<_, _, VectorQueryRectangle> {
7✔
860
        loading_info: OgrSourceDataset {
7✔
861
            file_name: main_file_path.into(),
7✔
862
            layer_name: geometry.layer_name.unwrap_or_else(|| layer.name()),
7✔
863
            data_type: Some(geometry.data_type),
7✔
864
            time,
7✔
865
            default_geometry: None,
7✔
866
            columns: Some(OgrSourceColumnSpec {
7✔
867
                format_specifics: None,
7✔
868
                x,
7✔
869
                y,
7✔
870
                int: columns_vecs.int,
7✔
871
                float: columns_vecs.float,
7✔
872
                text: columns_vecs.text,
7✔
873
                bool: vec![],
7✔
874
                datetime: columns_vecs.date,
7✔
875
                rename: None,
7✔
876
            }),
7✔
877
            force_ogr_time_filter: false,
7✔
878
            force_ogr_spatial_filter: false,
7✔
879
            on_error: OgrSourceErrorSpec::Ignore,
7✔
880
            sql_query: None,
7✔
881
            attribute_query: None,
7✔
882
            cache_ttl: CacheTtlSeconds::default(),
7✔
883
        },
7✔
884
        result_descriptor: VectorResultDescriptor {
7✔
885
            data_type: geometry.data_type,
7✔
886
            spatial_reference: geometry.spatial_reference,
7✔
887
            columns: columns_map
7✔
888
                .into_iter()
7✔
889
                .filter_map(|(k, v)| {
18✔
890
                    v.try_into()
18✔
891
                        .map(|v| {
18✔
892
                            (
18✔
893
                                k,
18✔
894
                                VectorColumnInfo {
18✔
895
                                    data_type: v,
18✔
896
                                    measurement: Measurement::Unitless,
18✔
897
                                },
18✔
898
                            )
18✔
899
                        })
18✔
900
                        .ok()
18✔
901
                }) // ignore all columns here that don't have a corresponding type in our collections
18✔
902
                .collect(),
7✔
903
            time: None,
7✔
904
            bbox: None,
7✔
905
        },
7✔
906
        phantom: Default::default(),
7✔
907
    })
7✔
908
}
7✔
909

910
/// create Gdal dataset with autodetect parameters based on available columns
911
fn gdal_autodetect(path: &Path, columns: &[String]) -> Option<GdalAutoDetect> {
1✔
912
    let columns_lower = columns.iter().map(|s| s.to_lowercase()).collect::<Vec<_>>();
3✔
913

1✔
914
    // TODO: load candidates from config
1✔
915
    let xy = [("x", "y"), ("lon", "lat"), ("longitude", "latitude")];
1✔
916

917
    for (x, y) in xy {
3✔
918
        let mut found_x = None;
3✔
919
        let mut found_y = None;
3✔
920

921
        for (column_lower, column) in columns_lower.iter().zip(columns) {
9✔
922
            if x == column_lower {
9✔
923
                found_x = Some(column);
1✔
924
            }
8✔
925

926
            if y == column_lower {
9✔
927
                found_y = Some(column);
1✔
928
            }
8✔
929

930
            if let (Some(x), Some(y)) = (found_x, found_y) {
9✔
931
                let mut dataset_options = DatasetOptions::default();
1✔
932

1✔
933
                let open_opts = &[
1✔
934
                    &format!("X_POSSIBLE_NAMES={x}"),
1✔
935
                    &format!("Y_POSSIBLE_NAMES={y}"),
1✔
936
                    "AUTODETECT_TYPE=YES",
1✔
937
                ];
1✔
938

1✔
939
                dataset_options.open_options = Some(open_opts);
1✔
940

1✔
941
                return gdal_open_dataset_ex(path, dataset_options)
1✔
942
                    .ok()
1✔
943
                    .map(|dataset| GdalAutoDetect {
1✔
944
                        dataset,
1✔
945
                        x: x.clone(),
1✔
946
                        y: Some(y.clone()),
1✔
947
                    });
1✔
948
            }
8✔
949
        }
950
    }
951

952
    // TODO: load candidates from config
953
    let geoms = ["geom", "wkt"];
×
954
    for geom in geoms {
×
955
        for (column_lower, column) in columns_lower.iter().zip(columns) {
×
956
            if geom == column_lower {
×
957
                let mut dataset_options = DatasetOptions::default();
×
958

×
959
                let open_opts = &[
×
960
                    &format!("GEOM_POSSIBLE_NAMES={column}"),
×
961
                    "AUTODETECT_TYPE=YES",
×
962
                ];
×
963

×
964
                dataset_options.open_options = Some(open_opts);
×
965

×
966
                return gdal_open_dataset_ex(path, dataset_options)
×
967
                    .ok()
×
968
                    .map(|dataset| GdalAutoDetect {
×
969
                        dataset,
×
970
                        x: geom.to_owned(),
×
971
                        y: None,
×
972
                    });
×
973
            }
×
974
        }
975
    }
976

977
    None
×
978
}
1✔
979

980
fn detect_time_type(columns: &Columns) -> OgrSourceDatasetTimeType {
7✔
981
    // TODO: load candidate names from config
7✔
982
    let known_start = [
7✔
983
        "start",
7✔
984
        "time",
7✔
985
        "begin",
7✔
986
        "date",
7✔
987
        "time_start",
7✔
988
        "start time",
7✔
989
        "date_start",
7✔
990
        "start date",
7✔
991
        "datetime",
7✔
992
        "date_time",
7✔
993
        "date time",
7✔
994
        "event",
7✔
995
        "timestamp",
7✔
996
        "time_from",
7✔
997
        "t1",
7✔
998
        "t",
7✔
999
    ];
7✔
1000
    let known_end = [
7✔
1001
        "end",
7✔
1002
        "stop",
7✔
1003
        "time2",
7✔
1004
        "date2",
7✔
1005
        "time_end",
7✔
1006
        "time_stop",
7✔
1007
        "time end",
7✔
1008
        "time stop",
7✔
1009
        "end time",
7✔
1010
        "stop time",
7✔
1011
        "date_end",
7✔
1012
        "date_stop",
7✔
1013
        "date end",
7✔
1014
        "date stop",
7✔
1015
        "end date",
7✔
1016
        "stop date",
7✔
1017
        "time_to",
7✔
1018
        "t2",
7✔
1019
    ];
7✔
1020
    let known_duration = ["duration", "length", "valid for", "valid_for"];
7✔
1021

7✔
1022
    let mut start = None;
7✔
1023
    let mut end = None;
7✔
1024
    for column in &columns.date {
11✔
1025
        if known_start.contains(&column.as_ref()) && start.is_none() {
7✔
1026
            start = Some(column);
4✔
1027
        } else if known_end.contains(&column.as_ref()) && end.is_none() {
4✔
1028
            end = Some(column);
3✔
1029
        }
3✔
1030

1031
        if start.is_some() && end.is_some() {
7✔
1032
            break;
3✔
1033
        }
4✔
1034
    }
1035

1036
    let duration = columns
7✔
1037
        .int
7✔
1038
        .iter()
7✔
1039
        .find(|c| known_duration.contains(&c.as_ref()));
7✔
1040

7✔
1041
    match (start, end, duration) {
7✔
1042
        (Some(start), Some(end), _) => OgrSourceDatasetTimeType::StartEnd {
3✔
1043
            start_field: start.clone(),
3✔
1044
            start_format: OgrSourceTimeFormat::Auto,
3✔
1045
            end_field: end.clone(),
3✔
1046
            end_format: OgrSourceTimeFormat::Auto,
3✔
1047
        },
3✔
1048
        (Some(start), None, Some(duration)) => OgrSourceDatasetTimeType::StartDuration {
1✔
1049
            start_field: start.clone(),
1✔
1050
            start_format: OgrSourceTimeFormat::Auto,
1✔
1051
            duration_field: duration.clone(),
1✔
1052
        },
1✔
1053
        (Some(start), None, None) => OgrSourceDatasetTimeType::Start {
×
1054
            start_field: start.clone(),
×
1055
            start_format: OgrSourceTimeFormat::Auto,
×
1056
            duration: OgrSourceDurationSpec::Zero,
×
1057
        },
×
1058
        _ => OgrSourceDatasetTimeType::None,
3✔
1059
    }
1060
}
7✔
1061

1062
fn detect_vector_geometry(layer: &Layer) -> DetectedGeometry {
8✔
1063
    for g in layer.defn().geom_fields() {
8✔
1064
        if let Ok(data_type) = VectorDataType::try_from_ogr_type_code(g.field_type()) {
7✔
1065
            return DetectedGeometry {
7✔
1066
                layer_name: Some(layer.name()),
7✔
1067
                data_type,
7✔
1068
                spatial_reference: g
7✔
1069
                    .spatial_ref()
7✔
1070
                    .context(error::Gdal)
7✔
1071
                    .and_then(|s| {
7✔
1072
                        let s: Result<SpatialReference> = s.try_into().map_err(Into::into);
6✔
1073
                        s
6✔
1074
                    })
7✔
1075
                    .map(Into::into)
7✔
1076
                    .unwrap_or(SpatialReferenceOption::Unreferenced),
7✔
1077
            };
7✔
1078
        }
×
1079
    }
1080

1081
    // fallback type if no geometry was found
1082
    DetectedGeometry {
1✔
1083
        layer_name: Some(layer.name()),
1✔
1084
        data_type: VectorDataType::Data,
1✔
1085
        spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1086
    }
1✔
1087
}
8✔
1088

1089
struct GdalAutoDetect {
1090
    dataset: gdal::Dataset,
1091
    x: String,
1092
    y: Option<String>,
1093
}
1094

1095
struct DetectedGeometry {
1096
    layer_name: Option<String>,
1097
    data_type: VectorDataType,
1098
    spatial_reference: SpatialReferenceOption,
1099
}
1100

1101
struct Columns {
1102
    int: Vec<String>,
1103
    float: Vec<String>,
1104
    text: Vec<String>,
1105
    date: Vec<String>,
1106
}
1107

1108
enum ColumnDataType {
1109
    Int,
1110
    Float,
1111
    Text,
1112
    Date,
1113
    Unknown,
1114
}
1115

1116
impl TryFrom<ColumnDataType> for FeatureDataType {
1117
    type Error = error::Error;
1118

1119
    fn try_from(value: ColumnDataType) -> Result<Self, Self::Error> {
18✔
1120
        match value {
18✔
1121
            ColumnDataType::Int => Ok(Self::Int),
3✔
1122
            ColumnDataType::Float => Ok(Self::Float),
1✔
1123
            ColumnDataType::Text => Ok(Self::Text),
7✔
1124
            ColumnDataType::Date => Ok(Self::DateTime),
7✔
1125
            ColumnDataType::Unknown => Err(error::Error::NoFeatureDataTypeForColumnDataType),
×
1126
        }
1127
    }
18✔
1128
}
1129

1130
impl TryFrom<ColumnDataType> for crate::api::model::datatypes::FeatureDataType {
1131
    type Error = error::Error;
1132

1133
    fn try_from(value: ColumnDataType) -> Result<Self, Self::Error> {
×
1134
        match value {
×
1135
            ColumnDataType::Int => Ok(Self::Int),
×
1136
            ColumnDataType::Float => Ok(Self::Float),
×
1137
            ColumnDataType::Text => Ok(Self::Text),
×
1138
            ColumnDataType::Date => Ok(Self::DateTime),
×
1139
            ColumnDataType::Unknown => Err(error::Error::NoFeatureDataTypeForColumnDataType),
×
1140
        }
1141
    }
×
1142
}
1143

1144
fn detect_columns(layer: &Layer) -> HashMap<String, ColumnDataType> {
7✔
1145
    let mut columns = HashMap::default();
7✔
1146

1147
    for field in layer.defn().fields() {
18✔
1148
        let field_type = field.field_type();
18✔
1149

1150
        let data_type = match field_type {
18✔
1151
            OGRFieldType::OFTInteger | OGRFieldType::OFTInteger64 => ColumnDataType::Int,
3✔
1152
            OGRFieldType::OFTReal => ColumnDataType::Float,
1✔
1153
            OGRFieldType::OFTString => ColumnDataType::Text,
7✔
1154
            OGRFieldType::OFTDate | OGRFieldType::OFTDateTime => ColumnDataType::Date,
7✔
1155
            _ => ColumnDataType::Unknown,
×
1156
        };
1157

1158
        columns.insert(field.name(), data_type);
18✔
1159
    }
1160

1161
    columns
7✔
1162
}
7✔
1163

1164
fn column_map_to_column_vecs(columns: &HashMap<String, ColumnDataType>) -> Columns {
7✔
1165
    let mut int = Vec::new();
7✔
1166
    let mut float = Vec::new();
7✔
1167
    let mut text = Vec::new();
7✔
1168
    let mut date = Vec::new();
7✔
1169

1170
    for (k, v) in columns {
25✔
1171
        match v {
18✔
1172
            ColumnDataType::Int => int.push(k.clone()),
3✔
1173
            ColumnDataType::Float => float.push(k.clone()),
1✔
1174
            ColumnDataType::Text => text.push(k.clone()),
7✔
1175
            ColumnDataType::Date => date.push(k.clone()),
7✔
1176
            ColumnDataType::Unknown => {}
×
1177
        }
1178
    }
1179

1180
    Columns {
7✔
1181
        int,
7✔
1182
        float,
7✔
1183
        text,
7✔
1184
        date,
7✔
1185
    }
7✔
1186
}
7✔
1187

1188
/// Delete a dataset
1189
#[utoipa::path(
16✔
1190
    tag = "Datasets",
16✔
1191
    delete,
16✔
1192
    path = "/dataset/{dataset}",
16✔
1193
    responses(
16✔
1194
        (status = 200, description = "OK"),
16✔
1195
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
16✔
1196
            ("Referenced an unknown dataset" = (value = json!({
16✔
1197
                "error": "UnknownDatasetName",
16✔
1198
                "message": "Unknown dataset name"
16✔
1199
            }))),
16✔
1200
            ("Given dataset can only be deleted by owner" = (value = json!({
16✔
1201
                "error": "OperationRequiresOwnerPermission",
16✔
1202
                "message": "Operation requires owner permission"
16✔
1203
            })))
16✔
1204
        )),
16✔
1205
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
16✔
1206
    ),
16✔
1207
    params(
16✔
1208
        ("dataset" = DatasetName, description = "Dataset id")
16✔
1209
    ),
16✔
1210
    security(
16✔
1211
        ("session_token" = [])
16✔
1212
    )
16✔
1213
)]
16✔
1214
pub async fn delete_dataset_handler<C: ApplicationContext>(
3✔
1215
    dataset: web::Path<DatasetName>,
3✔
1216
    session: C::Session,
3✔
1217
    app_ctx: web::Data<C>,
3✔
1218
) -> Result<HttpResponse> {
3✔
1219
    let session_ctx = app_ctx.session_context(session).db();
3✔
1220

3✔
1221
    let real_dataset = dataset.into_inner();
3✔
1222

1223
    let dataset_id = session_ctx
3✔
1224
        .resolve_dataset_name_to_id(&real_dataset)
3✔
1225
        .await?;
3✔
1226

1227
    // handle the case where the dataset name is not known
1228
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
3✔
1229
        dataset_name: real_dataset.to_string(),
3✔
1230
    })?;
3✔
1231

1232
    session_ctx.delete_dataset(dataset_id).await?;
3✔
1233

1234
    Ok(actix_web::HttpResponse::Ok().finish())
3✔
1235
}
3✔
1236

1237
#[derive(Deserialize, Serialize, ToSchema, ToResponse)]
28✔
1238
pub struct VolumeFileLayersResponse {
1239
    layers: Vec<String>,
1240
}
1241

1242
/// List the layers of a file in a volume.
1243
#[utoipa::path(
16✔
1244
    tag = "Datasets",
16✔
1245
    get,
16✔
1246
    path = "/dataset/volumes/{volume_name}/files/{file_name}/layers",
16✔
1247
    responses(
16✔
1248
        (status = 200, body = VolumeFileLayersResponse,
16✔
1249
             example = json!({"layers": ["layer1", "layer2"]}))
16✔
1250
    ),
16✔
1251
    params(
16✔
1252
        ("volume_name" = VolumeName, description = "Volume name"),
16✔
1253
        ("file_name" = String, description = "File name")
16✔
1254
    ),
16✔
1255
    security(
16✔
1256
        ("session_token" = [])
16✔
1257
    )
16✔
1258
)]
16✔
1259
pub async fn list_volume_file_layers_handler<C: ApplicationContext>(
1✔
1260
    path: web::Path<(VolumeName, String)>,
1✔
1261
    session: C::Session,
1✔
1262
    app_ctx: web::Data<C>,
1✔
1263
) -> Result<impl Responder> {
1✔
1264
    let (volume_name, file_name) = path.into_inner();
1✔
1265

1✔
1266
    let session_ctx = app_ctx.session_context(session);
1✔
1267
    let volumes = session_ctx.volumes()?;
1✔
1268

1269
    let volume = volumes.iter().find(|v| v.name == volume_name.0).ok_or(
1✔
1270
        crate::error::Error::UnknownVolumeName {
1✔
1271
            volume_name: volume_name.0.clone(),
1✔
1272
        },
1✔
1273
    )?;
1✔
1274

1275
    let Some(volume_path) = volume.path.as_ref() else {
1✔
1276
        return Err(crate::error::Error::CannotAccessVolumePath {
×
1277
            volume_name: volume_name.0.clone(),
×
1278
        });
×
1279
    };
1280

1281
    let file_path = path_with_base_path(Path::new(volume_path), Path::new(&file_name))?;
1✔
1282

1283
    let layers = crate::util::spawn_blocking(move || {
1✔
1284
        let dataset = gdal_open_dataset(&file_path)?;
1✔
1285

1286
        // TODO: hide system/internal layer like "layer_styles"
1287
        Result::<_, Error>::Ok(dataset.layers().map(|l| l.name()).collect::<Vec<_>>())
3✔
1288
    })
1✔
1289
    .await??;
1✔
1290

1291
    Ok(web::Json(VolumeFileLayersResponse { layers }))
1✔
1292
}
1✔
1293

1294
/// Creates a new dataset referencing files.
1295
/// Users can reference previously uploaded files.
1296
/// Admins can reference files from a volume.
1297
#[utoipa::path(
16✔
1298
    tag = "Datasets",
16✔
1299
    post,
16✔
1300
    path = "/dataset", 
16✔
1301
    request_body = CreateDataset,
16✔
1302
    responses(
16✔
1303
        (status = 200, response = DatasetNameResponse),
16✔
1304
    ),
16✔
1305
    security(
16✔
1306
        ("session_token" = [])
16✔
1307
    )
16✔
1308
)]
16✔
1309
async fn create_dataset_handler<C: ApplicationContext>(
6✔
1310
    session: C::Session,
6✔
1311
    app_ctx: web::Data<C>,
6✔
1312
    create: web::Json<CreateDataset>,
6✔
1313
) -> Result<web::Json<DatasetNameResponse>, CreateDatasetError> {
6✔
1314
    let create = create.into_inner();
6✔
1315
    match create {
6✔
1316
        CreateDataset {
1317
            data_path: DataPath::Volume(upload),
4✔
1318
            definition,
4✔
1319
        } => create_system_dataset(session, app_ctx, upload, definition).await,
4✔
1320
        CreateDataset {
1321
            data_path: DataPath::Upload(volume),
2✔
1322
            definition,
2✔
1323
        } => create_upload_dataset(session, app_ctx, volume, definition).await,
2✔
1324
    }
1325
}
6✔
1326

1327
async fn create_system_dataset<C: ApplicationContext>(
4✔
1328
    session: C::Session,
4✔
1329
    app_ctx: web::Data<C>,
4✔
1330
    volume_name: VolumeName,
4✔
1331
    mut definition: DatasetDefinition,
4✔
1332
) -> Result<web::Json<DatasetNameResponse>, CreateDatasetError> {
4✔
1333
    let volumes = get_config_element::<Data>()
4✔
1334
        .context(CannotAccessConfig)?
4✔
1335
        .volumes;
1336
    let volume_path = volumes
4✔
1337
        .get(&volume_name)
4✔
1338
        .ok_or(CreateDatasetError::UnknownVolume)?;
4✔
1339
    let volume = Volume {
4✔
1340
        name: volume_name,
4✔
1341
        path: volume_path.clone(),
4✔
1342
    };
4✔
1343

4✔
1344
    adjust_meta_data_path(&mut definition.meta_data, &volume)
4✔
1345
        .context(CannotResolveUploadFilePath)?;
4✔
1346

1347
    let db = app_ctx.session_context(session).db();
4✔
1348

1349
    let dataset = db
4✔
1350
        .add_dataset(definition.properties.into(), definition.meta_data.into())
4✔
1351
        .await
4✔
1352
        .context(CannotCreateDataset)?;
4✔
1353

1354
    db.add_permission(
4✔
1355
        Role::registered_user_role_id(),
4✔
1356
        dataset.id,
4✔
1357
        Permission::Read,
4✔
1358
    )
4✔
1359
    .await
4✔
1360
    .boxed_context(crate::error::PermissionDb)
4✔
1361
    .context(DatabaseAccess)?;
4✔
1362

1363
    db.add_permission(Role::anonymous_role_id(), dataset.id, Permission::Read)
4✔
1364
        .await
4✔
1365
        .boxed_context(crate::error::PermissionDb)
4✔
1366
        .context(DatabaseAccess)?;
4✔
1367

1368
    Ok(web::Json(dataset.name.into()))
4✔
1369
}
4✔
1370

1371
#[cfg(test)]
1372
mod tests {
1373
    use super::*;
1374
    use crate::api::model::datatypes::NamedData;
1375
    use crate::api::model::responses::datasets::DatasetNameResponse;
1376
    use crate::api::model::responses::IdResponse;
1377
    use crate::api::model::services::{DatasetDefinition, Provenance};
1378
    use crate::contexts::PostgresContext;
1379
    use crate::contexts::{Session, SessionId};
1380
    use crate::datasets::storage::DatasetStore;
1381
    use crate::datasets::upload::{UploadId, VolumeName};
1382
    use crate::datasets::DatasetIdAndName;
1383
    use crate::error::Result;
1384
    use crate::ge_context;
1385
    use crate::projects::{PointSymbology, RasterSymbology, Symbology};
1386
    use crate::test_data;
1387
    use crate::users::UserAuth;
1388
    use crate::util::tests::admin_login;
1389
    use crate::util::tests::{
1390
        add_file_definition_to_datasets, read_body_json, read_body_string, send_test_request,
1391
        MockQueryContext, SetMultipartBody, TestDataUploads,
1392
    };
1393
    use actix_web;
1394
    use actix_web::http::header;
1395
    use actix_web_httpauth::headers::authorization::Bearer;
1396
    use futures::TryStreamExt;
1397
    use geoengine_datatypes::collections::{
1398
        GeometryCollection, MultiPointCollection, VectorDataType,
1399
    };
1400
    use geoengine_datatypes::operations::image::{RasterColorizer, RgbaColor};
1401
    use geoengine_datatypes::primitives::{BoundingBox2D, ColumnSelection, SpatialResolution};
1402
    use geoengine_datatypes::raster::{GridShape2D, TilingSpecification};
1403
    use geoengine_datatypes::spatial_reference::SpatialReferenceOption;
1404
    use geoengine_operators::engine::{
1405
        ExecutionContext, InitializedVectorOperator, QueryProcessor, StaticMetaData,
1406
        VectorOperator, VectorResultDescriptor, WorkflowOperatorPath,
1407
    };
1408
    use geoengine_operators::source::{
1409
        OgrSource, OgrSourceDataset, OgrSourceErrorSpec, OgrSourceParameters,
1410
    };
1411
    use geoengine_operators::util::gdal::create_ndvi_meta_data;
1412
    use serde_json::{json, Value};
1413
    use tokio_postgres::NoTls;
1414

1415
    #[ge_context::test]
1✔
1416
    #[allow(clippy::too_many_lines)]
1417
    async fn test_list_datasets(app_ctx: PostgresContext<NoTls>) {
1✔
1418
        let session = admin_login(&app_ctx).await;
1✔
1419
        let ctx = app_ctx.session_context(session.clone());
1✔
1420

1✔
1421
        let descriptor = VectorResultDescriptor {
1✔
1422
            data_type: VectorDataType::MultiPoint,
1✔
1423
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1424
            columns: Default::default(),
1✔
1425
            time: None,
1✔
1426
            bbox: None,
1✔
1427
        };
1✔
1428

1✔
1429
        let ds = AddDataset {
1✔
1430
            name: Some(DatasetName::new(None, "My_Dataset")),
1✔
1431
            display_name: "OgrDataset".to_string(),
1✔
1432
            description: "My Ogr dataset".to_string(),
1✔
1433
            source_operator: "OgrSource".to_string(),
1✔
1434
            symbology: None,
1✔
1435
            provenance: None,
1✔
1436
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
1437
        };
1✔
1438

1✔
1439
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1440
            loading_info: OgrSourceDataset {
1✔
1441
                file_name: Default::default(),
1✔
1442
                layer_name: String::new(),
1✔
1443
                data_type: None,
1✔
1444
                time: Default::default(),
1✔
1445
                default_geometry: None,
1✔
1446
                columns: None,
1✔
1447
                force_ogr_time_filter: false,
1✔
1448
                force_ogr_spatial_filter: false,
1✔
1449
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1450
                sql_query: None,
1✔
1451
                attribute_query: None,
1✔
1452
                cache_ttl: CacheTtlSeconds::default(),
1✔
1453
            },
1✔
1454
            result_descriptor: descriptor.clone(),
1✔
1455
            phantom: Default::default(),
1✔
1456
        });
1✔
1457

1✔
1458
        let db = ctx.db();
1✔
1459
        let DatasetIdAndName { id: id1, name: _ } = db.add_dataset(ds.into(), meta).await.unwrap();
1✔
1460

1✔
1461
        let ds = AddDataset {
1✔
1462
            name: Some(DatasetName::new(None, "My_Dataset2")),
1✔
1463
            display_name: "OgrDataset2".to_string(),
1✔
1464
            description: "My Ogr dataset2".to_string(),
1✔
1465
            source_operator: "OgrSource".to_string(),
1✔
1466
            symbology: Some(Symbology::Point(PointSymbology::default())),
1✔
1467
            provenance: None,
1✔
1468
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
1469
        };
1✔
1470

1✔
1471
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1472
            loading_info: OgrSourceDataset {
1✔
1473
                file_name: Default::default(),
1✔
1474
                layer_name: String::new(),
1✔
1475
                data_type: None,
1✔
1476
                time: Default::default(),
1✔
1477
                default_geometry: None,
1✔
1478
                columns: None,
1✔
1479
                force_ogr_time_filter: false,
1✔
1480
                force_ogr_spatial_filter: false,
1✔
1481
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1482
                sql_query: None,
1✔
1483
                attribute_query: None,
1✔
1484
                cache_ttl: CacheTtlSeconds::default(),
1✔
1485
            },
1✔
1486
            result_descriptor: descriptor,
1✔
1487
            phantom: Default::default(),
1✔
1488
        });
1✔
1489

1490
        let DatasetIdAndName { id: id2, name: _ } = db.add_dataset(ds.into(), meta).await.unwrap();
1✔
1491

1✔
1492
        let req = actix_web::test::TestRequest::get()
1✔
1493
            .uri(&format!(
1✔
1494
                "/datasets?{}",
1✔
1495
                &serde_urlencoded::to_string([
1✔
1496
                    ("order", "NameAsc"),
1✔
1497
                    ("offset", "0"),
1✔
1498
                    ("limit", "2"),
1✔
1499
                ])
1✔
1500
                .unwrap()
1✔
1501
            ))
1✔
1502
            .append_header((header::CONTENT_LENGTH, 0))
1✔
1503
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
1504
        let res = send_test_request(req, app_ctx).await;
1✔
1505

1506
        assert_eq!(res.status(), 200);
1✔
1507

1508
        assert_eq!(
1✔
1509
            read_body_json(res).await,
1✔
1510
            json!([ {
1✔
1511
                "id": id1,
1✔
1512
                "name": "My_Dataset",
1✔
1513
                "displayName": "OgrDataset",
1✔
1514
                "description": "My Ogr dataset",
1✔
1515
                "tags": ["upload", "test"],
1✔
1516
                "sourceOperator": "OgrSource",
1✔
1517
                "resultDescriptor": {
1✔
1518
                    "type": "vector",
1✔
1519
                    "dataType": "MultiPoint",
1✔
1520
                    "spatialReference": "",
1✔
1521
                    "columns": {},
1✔
1522
                    "time": null,
1✔
1523
                    "bbox": null
1✔
1524
                },
1✔
1525
                "symbology": null
1✔
1526
            },{
1✔
1527
                "id": id2,
1✔
1528
                "name": "My_Dataset2",
1✔
1529
                "displayName": "OgrDataset2",
1✔
1530
                "description": "My Ogr dataset2",
1✔
1531
                "tags": ["upload", "test"],
1✔
1532
                "sourceOperator": "OgrSource",
1✔
1533
                "resultDescriptor": {
1✔
1534
                    "type": "vector",
1✔
1535
                    "dataType": "MultiPoint",
1✔
1536
                    "spatialReference": "",
1✔
1537
                    "columns": {},
1✔
1538
                    "time": null,
1✔
1539
                    "bbox": null
1✔
1540
                },
1✔
1541
                "symbology": {
1✔
1542
                    "type": "point",
1✔
1543
                    "radius": {
1✔
1544
                        "type": "static",
1✔
1545
                        "value": 10
1✔
1546
                    },
1✔
1547
                    "fillColor": {
1✔
1548
                        "type": "static",
1✔
1549
                        "color": [255, 255, 255, 255]
1✔
1550
                    },
1✔
1551
                    "stroke": {
1✔
1552
                        "width": {
1✔
1553
                            "type": "static",
1✔
1554
                            "value": 1
1✔
1555
                        },
1✔
1556
                        "color": {
1✔
1557
                            "type": "static",
1✔
1558
                            "color": [0, 0, 0, 255]
1✔
1559
                        }
1✔
1560
                    },
1✔
1561
                    "text": null
1✔
1562
                }
1✔
1563
            }])
1✔
1564
        );
1565
    }
1✔
1566

1567
    async fn upload_ne_10m_ports_files(
2✔
1568
        app_ctx: PostgresContext<NoTls>,
2✔
1569
        session_id: SessionId,
2✔
1570
    ) -> Result<UploadId> {
2✔
1571
        let files = vec![
2✔
1572
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp").to_path_buf(),
2✔
1573
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.shx").to_path_buf(),
2✔
1574
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.prj").to_path_buf(),
2✔
1575
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.dbf").to_path_buf(),
2✔
1576
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.cpg").to_path_buf(),
2✔
1577
        ];
2✔
1578

2✔
1579
        let req = actix_web::test::TestRequest::post()
2✔
1580
            .uri("/upload")
2✔
1581
            .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string())))
2✔
1582
            .set_multipart_files(&files);
2✔
1583
        let res = send_test_request(req, app_ctx).await;
2✔
1584
        assert_eq!(res.status(), 200);
2✔
1585

1586
        let upload: IdResponse<UploadId> = actix_web::test::read_body_json(res).await;
2✔
1587
        let root = upload.id.root_path()?;
2✔
1588

1589
        for file in files {
12✔
1590
            let file_name = file.file_name().unwrap();
10✔
1591
            assert!(root.join(file_name).exists());
10✔
1592
        }
1593

1594
        Ok(upload.id)
2✔
1595
    }
2✔
1596

1597
    pub async fn construct_dataset_from_upload(
2✔
1598
        app_ctx: PostgresContext<NoTls>,
2✔
1599
        upload_id: UploadId,
2✔
1600
        session_id: SessionId,
2✔
1601
    ) -> DatasetName {
2✔
1602
        let s = json!({
2✔
1603
            "dataPath": {
2✔
1604
                "upload": upload_id
2✔
1605
            },
2✔
1606
            "definition": {
2✔
1607
                "properties": {
2✔
1608
                    "name": null,
2✔
1609
                    "displayName": "Uploaded Natural Earth 10m Ports",
2✔
1610
                    "description": "Ports from Natural Earth",
2✔
1611
                    "sourceOperator": "OgrSource"
2✔
1612
                },
2✔
1613
                "metaData": {
2✔
1614
                    "type": "ogrMetaData",
2✔
1615
                    "loadingInfo": {
2✔
1616
                        "fileName": "ne_10m_ports.shp",
2✔
1617
                        "layerName": "ne_10m_ports",
2✔
1618
                        "dataType": "MultiPoint",
2✔
1619
                        "time": {
2✔
1620
                            "type": "none"
2✔
1621
                        },
2✔
1622
                        "columns": {
2✔
1623
                            "x": "",
2✔
1624
                            "y": null,
2✔
1625
                            "float": ["natlscale"],
2✔
1626
                            "int": ["scalerank"],
2✔
1627
                            "text": ["featurecla", "name", "website"],
2✔
1628
                            "bool": [],
2✔
1629
                            "datetime": []
2✔
1630
                        },
2✔
1631
                        "forceOgrTimeGilter": false,
2✔
1632
                        "onError": "ignore",
2✔
1633
                        "provenance": null
2✔
1634
                    },
2✔
1635
                    "resultDescriptor": {
2✔
1636
                        "dataType": "MultiPoint",
2✔
1637
                        "spatialReference": "EPSG:4326",
2✔
1638
                        "columns": {
2✔
1639
                            "website": {
2✔
1640
                                "dataType": "text",
2✔
1641
                                "measurement": {
2✔
1642
                                    "type": "unitless"
2✔
1643
                                }
2✔
1644
                            },
2✔
1645
                            "name": {
2✔
1646
                                "dataType": "text",
2✔
1647
                                "measurement": {
2✔
1648
                                    "type": "unitless"
2✔
1649
                                }
2✔
1650
                            },
2✔
1651
                            "natlscale": {
2✔
1652
                                "dataType": "float",
2✔
1653
                                "measurement": {
2✔
1654
                                    "type": "unitless"
2✔
1655
                                }
2✔
1656
                            },
2✔
1657
                            "scalerank": {
2✔
1658
                                "dataType": "int",
2✔
1659
                                "measurement": {
2✔
1660
                                    "type": "unitless"
2✔
1661
                                }
2✔
1662
                            },
2✔
1663
                            "featurecla": {
2✔
1664
                                "dataType": "text",
2✔
1665
                                "measurement": {
2✔
1666
                                    "type": "unitless"
2✔
1667
                                }
2✔
1668
                            }
2✔
1669
                        }
2✔
1670
                    }
2✔
1671
                }
2✔
1672
            }
2✔
1673
        });
2✔
1674

2✔
1675
        let req = actix_web::test::TestRequest::post()
2✔
1676
            .uri("/dataset")
2✔
1677
            .append_header((header::CONTENT_LENGTH, 0))
2✔
1678
            .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string())))
2✔
1679
            .set_json(s);
2✔
1680
        let res = send_test_request(req, app_ctx).await;
2✔
1681
        assert_eq!(res.status(), 200, "response: {res:?}");
2✔
1682

1683
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
2✔
1684
        dataset_name
2✔
1685
    }
2✔
1686

1687
    async fn make_ogr_source<C: ExecutionContext>(
1✔
1688
        exe_ctx: &C,
1✔
1689
        named_data: NamedData,
1✔
1690
    ) -> Result<Box<dyn InitializedVectorOperator>> {
1✔
1691
        OgrSource {
1✔
1692
            params: OgrSourceParameters {
1✔
1693
                data: named_data.into(),
1✔
1694
                attribute_projection: None,
1✔
1695
                attribute_filters: None,
1✔
1696
            },
1✔
1697
        }
1✔
1698
        .boxed()
1✔
1699
        .initialize(WorkflowOperatorPath::initialize_root(), exe_ctx)
1✔
1700
        .await
1✔
1701
        .map_err(Into::into)
1✔
1702
    }
1✔
1703

1704
    #[ge_context::test]
1✔
1705
    async fn it_creates_system_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
1706
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
1707

1✔
1708
        let volume = VolumeName("test_data".to_string());
1✔
1709

1✔
1710
        let mut meta_data = create_ndvi_meta_data();
1✔
1711

1✔
1712
        // make path relative to volume
1✔
1713
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
1714

1✔
1715
        let create = CreateDataset {
1✔
1716
            data_path: DataPath::Volume(volume.clone()),
1✔
1717
            definition: DatasetDefinition {
1✔
1718
                properties: AddDataset {
1✔
1719
                    name: None,
1✔
1720
                    display_name: "ndvi".to_string(),
1✔
1721
                    description: "ndvi".to_string(),
1✔
1722
                    source_operator: "GdalSource".to_string(),
1✔
1723
                    symbology: None,
1✔
1724
                    provenance: None,
1✔
1725
                    tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
1726
                },
1✔
1727
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
1728
            },
1✔
1729
        };
1✔
1730

1731
        // create via admin session
1732
        let req = actix_web::test::TestRequest::post()
1✔
1733
            .uri("/dataset")
1✔
1734
            .append_header((header::CONTENT_LENGTH, 0))
1✔
1735
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
1736
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
1737
            .set_payload(serde_json::to_string(&create)?);
1✔
1738
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
1739
        assert_eq!(res.status(), 200);
1✔
1740

1741
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
1742

1743
        // assert dataset is accessible via regular session
1744
        let req = actix_web::test::TestRequest::get()
1✔
1745
            .uri(&format!("/dataset/{dataset_name}"))
1✔
1746
            .append_header((header::CONTENT_LENGTH, 0))
1✔
1747
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
1748
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
1749
            .set_payload(serde_json::to_string(&create)?);
1✔
1750

1751
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
1752
        assert_eq!(res.status(), 200);
1✔
1753

1754
        Ok(())
1✔
1755
    }
1✔
1756

1757
    #[test]
1758
    fn it_auto_detects() {
1✔
1759
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
1760
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp"),
1✔
1761
            &None,
1✔
1762
        )
1✔
1763
        .unwrap();
1✔
1764
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
1765

1766
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
1767
        {
1768
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
1769
                columns.text.sort();
1✔
1770
            }
1✔
1771
        }
×
1772

1773
        assert_eq!(
1✔
1774
            meta_data,
1✔
1775
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1776
                loading_info: OgrSourceDataset {
1✔
1777
                    file_name: test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp").into(),
1✔
1778
                    layer_name: "ne_10m_ports".to_string(),
1✔
1779
                    data_type: Some(VectorDataType::MultiPoint),
1✔
1780
                    time: OgrSourceDatasetTimeType::None,
1✔
1781
                    default_geometry: None,
1✔
1782
                    columns: Some(OgrSourceColumnSpec {
1✔
1783
                        format_specifics: None,
1✔
1784
                        x: String::new(),
1✔
1785
                        y: None,
1✔
1786
                        int: vec!["scalerank".to_string()],
1✔
1787
                        float: vec!["natlscale".to_string()],
1✔
1788
                        text: vec![
1✔
1789
                            "featurecla".to_string(),
1✔
1790
                            "name".to_string(),
1✔
1791
                            "website".to_string(),
1✔
1792
                        ],
1✔
1793
                        bool: vec![],
1✔
1794
                        datetime: vec![],
1✔
1795
                        rename: None,
1✔
1796
                    }),
1✔
1797
                    force_ogr_time_filter: false,
1✔
1798
                    force_ogr_spatial_filter: false,
1✔
1799
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1800
                    sql_query: None,
1✔
1801
                    attribute_query: None,
1✔
1802
                    cache_ttl: CacheTtlSeconds::default()
1✔
1803
                },
1✔
1804
                result_descriptor: VectorResultDescriptor {
1✔
1805
                    data_type: VectorDataType::MultiPoint,
1✔
1806
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1807
                    columns: [
1✔
1808
                        (
1✔
1809
                            "name".to_string(),
1✔
1810
                            VectorColumnInfo {
1✔
1811
                                data_type: FeatureDataType::Text,
1✔
1812
                                measurement: Measurement::Unitless
1✔
1813
                            }
1✔
1814
                        ),
1✔
1815
                        (
1✔
1816
                            "scalerank".to_string(),
1✔
1817
                            VectorColumnInfo {
1✔
1818
                                data_type: FeatureDataType::Int,
1✔
1819
                                measurement: Measurement::Unitless
1✔
1820
                            }
1✔
1821
                        ),
1✔
1822
                        (
1✔
1823
                            "website".to_string(),
1✔
1824
                            VectorColumnInfo {
1✔
1825
                                data_type: FeatureDataType::Text,
1✔
1826
                                measurement: Measurement::Unitless
1✔
1827
                            }
1✔
1828
                        ),
1✔
1829
                        (
1✔
1830
                            "natlscale".to_string(),
1✔
1831
                            VectorColumnInfo {
1✔
1832
                                data_type: FeatureDataType::Float,
1✔
1833
                                measurement: Measurement::Unitless
1✔
1834
                            }
1✔
1835
                        ),
1✔
1836
                        (
1✔
1837
                            "featurecla".to_string(),
1✔
1838
                            VectorColumnInfo {
1✔
1839
                                data_type: FeatureDataType::Text,
1✔
1840
                                measurement: Measurement::Unitless
1✔
1841
                            }
1✔
1842
                        ),
1✔
1843
                    ]
1✔
1844
                    .iter()
1✔
1845
                    .cloned()
1✔
1846
                    .collect(),
1✔
1847
                    time: None,
1✔
1848
                    bbox: None,
1✔
1849
                },
1✔
1850
                phantom: Default::default(),
1✔
1851
            })
1✔
1852
        );
1✔
1853
    }
1✔
1854

1855
    #[test]
1856
    fn it_detects_time_json() {
1✔
1857
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
1858
            test_data!("vector/data/points_with_iso_time.json"),
1✔
1859
            &None,
1✔
1860
        )
1✔
1861
        .unwrap();
1✔
1862

1✔
1863
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
1864

1865
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
1866
        {
1867
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
1868
                columns.datetime.sort();
1✔
1869
            }
1✔
1870
        }
×
1871

1872
        assert_eq!(
1✔
1873
            meta_data,
1✔
1874
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1875
                loading_info: OgrSourceDataset {
1✔
1876
                    file_name: test_data!("vector/data/points_with_iso_time.json").into(),
1✔
1877
                    layer_name: "points_with_iso_time".to_string(),
1✔
1878
                    data_type: Some(VectorDataType::MultiPoint),
1✔
1879
                    time: OgrSourceDatasetTimeType::StartEnd {
1✔
1880
                        start_field: "time_start".to_owned(),
1✔
1881
                        start_format: OgrSourceTimeFormat::Auto,
1✔
1882
                        end_field: "time_end".to_owned(),
1✔
1883
                        end_format: OgrSourceTimeFormat::Auto,
1✔
1884
                    },
1✔
1885
                    default_geometry: None,
1✔
1886
                    columns: Some(OgrSourceColumnSpec {
1✔
1887
                        format_specifics: None,
1✔
1888
                        x: String::new(),
1✔
1889
                        y: None,
1✔
1890
                        float: vec![],
1✔
1891
                        int: vec![],
1✔
1892
                        text: vec![],
1✔
1893
                        bool: vec![],
1✔
1894
                        datetime: vec!["time_end".to_owned(), "time_start".to_owned()],
1✔
1895
                        rename: None,
1✔
1896
                    }),
1✔
1897
                    force_ogr_time_filter: false,
1✔
1898
                    force_ogr_spatial_filter: false,
1✔
1899
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1900
                    sql_query: None,
1✔
1901
                    attribute_query: None,
1✔
1902
                    cache_ttl: CacheTtlSeconds::default()
1✔
1903
                },
1✔
1904
                result_descriptor: VectorResultDescriptor {
1✔
1905
                    data_type: VectorDataType::MultiPoint,
1✔
1906
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1907
                    columns: [
1✔
1908
                        (
1✔
1909
                            "time_start".to_owned(),
1✔
1910
                            VectorColumnInfo {
1✔
1911
                                data_type: FeatureDataType::DateTime,
1✔
1912
                                measurement: Measurement::Unitless
1✔
1913
                            }
1✔
1914
                        ),
1✔
1915
                        (
1✔
1916
                            "time_end".to_owned(),
1✔
1917
                            VectorColumnInfo {
1✔
1918
                                data_type: FeatureDataType::DateTime,
1✔
1919
                                measurement: Measurement::Unitless
1✔
1920
                            }
1✔
1921
                        )
1✔
1922
                    ]
1✔
1923
                    .iter()
1✔
1924
                    .cloned()
1✔
1925
                    .collect(),
1✔
1926
                    time: None,
1✔
1927
                    bbox: None,
1✔
1928
                },
1✔
1929
                phantom: Default::default()
1✔
1930
            })
1✔
1931
        );
1✔
1932
    }
1✔
1933

1934
    #[test]
1935
    fn it_detects_time_gpkg() {
1✔
1936
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
1937
            test_data!("vector/data/points_with_time.gpkg"),
1✔
1938
            &None,
1✔
1939
        )
1✔
1940
        .unwrap();
1✔
1941

1✔
1942
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
1943

1944
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
1945
        {
1946
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
1947
                columns.datetime.sort();
1✔
1948
            }
1✔
1949
        }
×
1950

1951
        assert_eq!(
1✔
1952
            meta_data,
1✔
1953
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1954
                loading_info: OgrSourceDataset {
1✔
1955
                    file_name: test_data!("vector/data/points_with_time.gpkg").into(),
1✔
1956
                    layer_name: "points_with_time".to_string(),
1✔
1957
                    data_type: Some(VectorDataType::MultiPoint),
1✔
1958
                    time: OgrSourceDatasetTimeType::StartEnd {
1✔
1959
                        start_field: "time_start".to_owned(),
1✔
1960
                        start_format: OgrSourceTimeFormat::Auto,
1✔
1961
                        end_field: "time_end".to_owned(),
1✔
1962
                        end_format: OgrSourceTimeFormat::Auto,
1✔
1963
                    },
1✔
1964
                    default_geometry: None,
1✔
1965
                    columns: Some(OgrSourceColumnSpec {
1✔
1966
                        format_specifics: None,
1✔
1967
                        x: String::new(),
1✔
1968
                        y: None,
1✔
1969
                        float: vec![],
1✔
1970
                        int: vec![],
1✔
1971
                        text: vec![],
1✔
1972
                        bool: vec![],
1✔
1973
                        datetime: vec!["time_end".to_owned(), "time_start".to_owned()],
1✔
1974
                        rename: None,
1✔
1975
                    }),
1✔
1976
                    force_ogr_time_filter: false,
1✔
1977
                    force_ogr_spatial_filter: false,
1✔
1978
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1979
                    sql_query: None,
1✔
1980
                    attribute_query: None,
1✔
1981
                    cache_ttl: CacheTtlSeconds::default()
1✔
1982
                },
1✔
1983
                result_descriptor: VectorResultDescriptor {
1✔
1984
                    data_type: VectorDataType::MultiPoint,
1✔
1985
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1986
                    columns: [
1✔
1987
                        (
1✔
1988
                            "time_start".to_owned(),
1✔
1989
                            VectorColumnInfo {
1✔
1990
                                data_type: FeatureDataType::DateTime,
1✔
1991
                                measurement: Measurement::Unitless
1✔
1992
                            }
1✔
1993
                        ),
1✔
1994
                        (
1✔
1995
                            "time_end".to_owned(),
1✔
1996
                            VectorColumnInfo {
1✔
1997
                                data_type: FeatureDataType::DateTime,
1✔
1998
                                measurement: Measurement::Unitless
1✔
1999
                            }
1✔
2000
                        )
1✔
2001
                    ]
1✔
2002
                    .iter()
1✔
2003
                    .cloned()
1✔
2004
                    .collect(),
1✔
2005
                    time: None,
1✔
2006
                    bbox: None,
1✔
2007
                },
1✔
2008
                phantom: Default::default(),
1✔
2009
            })
1✔
2010
        );
1✔
2011
    }
1✔
2012

2013
    #[test]
2014
    fn it_detects_time_shp() {
1✔
2015
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
2016
            test_data!("vector/data/points_with_date.shp"),
1✔
2017
            &None,
1✔
2018
        )
1✔
2019
        .unwrap();
1✔
2020

1✔
2021
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
2022

2023
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
2024
        {
2025
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
2026
                columns.datetime.sort();
1✔
2027
            }
1✔
2028
        }
×
2029

2030
        assert_eq!(
1✔
2031
            meta_data,
1✔
2032
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2033
                loading_info: OgrSourceDataset {
1✔
2034
                    file_name: test_data!("vector/data/points_with_date.shp").into(),
1✔
2035
                    layer_name: "points_with_date".to_string(),
1✔
2036
                    data_type: Some(VectorDataType::MultiPoint),
1✔
2037
                    time: OgrSourceDatasetTimeType::StartEnd {
1✔
2038
                        start_field: "time_start".to_owned(),
1✔
2039
                        start_format: OgrSourceTimeFormat::Auto,
1✔
2040
                        end_field: "time_end".to_owned(),
1✔
2041
                        end_format: OgrSourceTimeFormat::Auto,
1✔
2042
                    },
1✔
2043
                    default_geometry: None,
1✔
2044
                    columns: Some(OgrSourceColumnSpec {
1✔
2045
                        format_specifics: None,
1✔
2046
                        x: String::new(),
1✔
2047
                        y: None,
1✔
2048
                        float: vec![],
1✔
2049
                        int: vec![],
1✔
2050
                        text: vec![],
1✔
2051
                        bool: vec![],
1✔
2052
                        datetime: vec!["time_end".to_owned(), "time_start".to_owned()],
1✔
2053
                        rename: None,
1✔
2054
                    }),
1✔
2055
                    force_ogr_time_filter: false,
1✔
2056
                    force_ogr_spatial_filter: false,
1✔
2057
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2058
                    sql_query: None,
1✔
2059
                    attribute_query: None,
1✔
2060
                    cache_ttl: CacheTtlSeconds::default()
1✔
2061
                },
1✔
2062
                result_descriptor: VectorResultDescriptor {
1✔
2063
                    data_type: VectorDataType::MultiPoint,
1✔
2064
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2065
                    columns: [
1✔
2066
                        (
1✔
2067
                            "time_end".to_owned(),
1✔
2068
                            VectorColumnInfo {
1✔
2069
                                data_type: FeatureDataType::DateTime,
1✔
2070
                                measurement: Measurement::Unitless
1✔
2071
                            }
1✔
2072
                        ),
1✔
2073
                        (
1✔
2074
                            "time_start".to_owned(),
1✔
2075
                            VectorColumnInfo {
1✔
2076
                                data_type: FeatureDataType::DateTime,
1✔
2077
                                measurement: Measurement::Unitless
1✔
2078
                            }
1✔
2079
                        )
1✔
2080
                    ]
1✔
2081
                    .iter()
1✔
2082
                    .cloned()
1✔
2083
                    .collect(),
1✔
2084
                    time: None,
1✔
2085
                    bbox: None,
1✔
2086
                },
1✔
2087
                phantom: Default::default(),
1✔
2088
            })
1✔
2089
        );
1✔
2090
    }
1✔
2091

2092
    #[test]
2093
    fn it_detects_time_start_duration() {
1✔
2094
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
2095
            test_data!("vector/data/points_with_iso_start_duration.json"),
1✔
2096
            &None,
1✔
2097
        )
1✔
2098
        .unwrap();
1✔
2099

1✔
2100
        let meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
2101

1✔
2102
        assert_eq!(
1✔
2103
            meta_data,
1✔
2104
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2105
                loading_info: OgrSourceDataset {
1✔
2106
                    file_name: test_data!("vector/data/points_with_iso_start_duration.json").into(),
1✔
2107
                    layer_name: "points_with_iso_start_duration".to_string(),
1✔
2108
                    data_type: Some(VectorDataType::MultiPoint),
1✔
2109
                    time: OgrSourceDatasetTimeType::StartDuration {
1✔
2110
                        start_field: "time_start".to_owned(),
1✔
2111
                        start_format: OgrSourceTimeFormat::Auto,
1✔
2112
                        duration_field: "duration".to_owned(),
1✔
2113
                    },
1✔
2114
                    default_geometry: None,
1✔
2115
                    columns: Some(OgrSourceColumnSpec {
1✔
2116
                        format_specifics: None,
1✔
2117
                        x: String::new(),
1✔
2118
                        y: None,
1✔
2119
                        float: vec![],
1✔
2120
                        int: vec!["duration".to_owned()],
1✔
2121
                        text: vec![],
1✔
2122
                        bool: vec![],
1✔
2123
                        datetime: vec!["time_start".to_owned()],
1✔
2124
                        rename: None,
1✔
2125
                    }),
1✔
2126
                    force_ogr_time_filter: false,
1✔
2127
                    force_ogr_spatial_filter: false,
1✔
2128
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2129
                    sql_query: None,
1✔
2130
                    attribute_query: None,
1✔
2131
                    cache_ttl: CacheTtlSeconds::default()
1✔
2132
                },
1✔
2133
                result_descriptor: VectorResultDescriptor {
1✔
2134
                    data_type: VectorDataType::MultiPoint,
1✔
2135
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2136
                    columns: [
1✔
2137
                        (
1✔
2138
                            "time_start".to_owned(),
1✔
2139
                            VectorColumnInfo {
1✔
2140
                                data_type: FeatureDataType::DateTime,
1✔
2141
                                measurement: Measurement::Unitless
1✔
2142
                            }
1✔
2143
                        ),
1✔
2144
                        (
1✔
2145
                            "duration".to_owned(),
1✔
2146
                            VectorColumnInfo {
1✔
2147
                                data_type: FeatureDataType::Int,
1✔
2148
                                measurement: Measurement::Unitless
1✔
2149
                            }
1✔
2150
                        )
1✔
2151
                    ]
1✔
2152
                    .iter()
1✔
2153
                    .cloned()
1✔
2154
                    .collect(),
1✔
2155
                    time: None,
1✔
2156
                    bbox: None,
1✔
2157
                },
1✔
2158
                phantom: Default::default()
1✔
2159
            })
1✔
2160
        );
1✔
2161
    }
1✔
2162

2163
    #[test]
2164
    fn it_detects_csv() {
1✔
2165
        let meta_data =
1✔
2166
            auto_detect_vector_meta_data_definition(test_data!("vector/data/lonlat.csv"), &None)
1✔
2167
                .unwrap();
1✔
2168

1✔
2169
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
2170

2171
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
2172
        {
2173
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
2174
                columns.text.sort();
1✔
2175
            }
1✔
2176
        }
×
2177

2178
        assert_eq!(
1✔
2179
            meta_data,
1✔
2180
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2181
                loading_info: OgrSourceDataset {
1✔
2182
                    file_name: test_data!("vector/data/lonlat.csv").into(),
1✔
2183
                    layer_name: "lonlat".to_string(),
1✔
2184
                    data_type: Some(VectorDataType::MultiPoint),
1✔
2185
                    time: OgrSourceDatasetTimeType::None,
1✔
2186
                    default_geometry: None,
1✔
2187
                    columns: Some(OgrSourceColumnSpec {
1✔
2188
                        format_specifics: None,
1✔
2189
                        x: "Longitude".to_string(),
1✔
2190
                        y: Some("Latitude".to_string()),
1✔
2191
                        float: vec![],
1✔
2192
                        int: vec![],
1✔
2193
                        text: vec![
1✔
2194
                            "Latitude".to_string(),
1✔
2195
                            "Longitude".to_string(),
1✔
2196
                            "Name".to_string()
1✔
2197
                        ],
1✔
2198
                        bool: vec![],
1✔
2199
                        datetime: vec![],
1✔
2200
                        rename: None,
1✔
2201
                    }),
1✔
2202
                    force_ogr_time_filter: false,
1✔
2203
                    force_ogr_spatial_filter: false,
1✔
2204
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2205
                    sql_query: None,
1✔
2206
                    attribute_query: None,
1✔
2207
                    cache_ttl: CacheTtlSeconds::default()
1✔
2208
                },
1✔
2209
                result_descriptor: VectorResultDescriptor {
1✔
2210
                    data_type: VectorDataType::MultiPoint,
1✔
2211
                    spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2212
                    columns: [
1✔
2213
                        (
1✔
2214
                            "Latitude".to_string(),
1✔
2215
                            VectorColumnInfo {
1✔
2216
                                data_type: FeatureDataType::Text,
1✔
2217
                                measurement: Measurement::Unitless
1✔
2218
                            }
1✔
2219
                        ),
1✔
2220
                        (
1✔
2221
                            "Longitude".to_string(),
1✔
2222
                            VectorColumnInfo {
1✔
2223
                                data_type: FeatureDataType::Text,
1✔
2224
                                measurement: Measurement::Unitless
1✔
2225
                            }
1✔
2226
                        ),
1✔
2227
                        (
1✔
2228
                            "Name".to_string(),
1✔
2229
                            VectorColumnInfo {
1✔
2230
                                data_type: FeatureDataType::Text,
1✔
2231
                                measurement: Measurement::Unitless
1✔
2232
                            }
1✔
2233
                        )
1✔
2234
                    ]
1✔
2235
                    .iter()
1✔
2236
                    .cloned()
1✔
2237
                    .collect(),
1✔
2238
                    time: None,
1✔
2239
                    bbox: None,
1✔
2240
                },
1✔
2241
                phantom: Default::default()
1✔
2242
            })
1✔
2243
        );
1✔
2244
    }
1✔
2245

2246
    #[ge_context::test]
1✔
2247
    async fn get_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2248
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2249
        let ctx = app_ctx.session_context(session.clone());
1✔
2250

1✔
2251
        let descriptor = VectorResultDescriptor {
1✔
2252
            data_type: VectorDataType::Data,
1✔
2253
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2254
            columns: Default::default(),
1✔
2255
            time: None,
1✔
2256
            bbox: None,
1✔
2257
        };
1✔
2258

1✔
2259
        let ds = AddDataset {
1✔
2260
            name: None,
1✔
2261
            display_name: "OgrDataset".to_string(),
1✔
2262
            description: "My Ogr dataset".to_string(),
1✔
2263
            source_operator: "OgrSource".to_string(),
1✔
2264
            symbology: None,
1✔
2265
            provenance: None,
1✔
2266
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2267
        };
1✔
2268

1✔
2269
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2270
            loading_info: OgrSourceDataset {
1✔
2271
                file_name: Default::default(),
1✔
2272
                layer_name: String::new(),
1✔
2273
                data_type: None,
1✔
2274
                time: Default::default(),
1✔
2275
                default_geometry: None,
1✔
2276
                columns: None,
1✔
2277
                force_ogr_time_filter: false,
1✔
2278
                force_ogr_spatial_filter: false,
1✔
2279
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2280
                sql_query: None,
1✔
2281
                attribute_query: None,
1✔
2282
                cache_ttl: CacheTtlSeconds::default(),
1✔
2283
            },
1✔
2284
            result_descriptor: descriptor,
1✔
2285
            phantom: Default::default(),
1✔
2286
        });
1✔
2287

1✔
2288
        let db = ctx.db();
1✔
2289
        let DatasetIdAndName {
2290
            id,
1✔
2291
            name: dataset_name,
1✔
2292
        } = db.add_dataset(ds.into(), meta).await?;
1✔
2293

2294
        let req = actix_web::test::TestRequest::get()
1✔
2295
            .uri(&format!("/dataset/{dataset_name}"))
1✔
2296
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2297
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
2298
        let res = send_test_request(req, app_ctx).await;
1✔
2299

2300
        let res_status = res.status();
1✔
2301
        let res_body = serde_json::from_str::<Value>(&read_body_string(res).await).unwrap();
1✔
2302
        assert_eq!(res_status, 200, "{res_body}");
1✔
2303

2304
        assert_eq!(
1✔
2305
            res_body,
1✔
2306
            json!({
1✔
2307
                "name": dataset_name,
1✔
2308
                "id": id,
1✔
2309
                "displayName": "OgrDataset",
1✔
2310
                "description": "My Ogr dataset",
1✔
2311
                "resultDescriptor": {
1✔
2312
                    "type": "vector",
1✔
2313
                    "dataType": "Data",
1✔
2314
                    "spatialReference": "",
1✔
2315
                    "columns": {},
1✔
2316
                    "time": null,
1✔
2317
                    "bbox": null
1✔
2318
                },
1✔
2319
                "sourceOperator": "OgrSource",
1✔
2320
                "symbology": null,
1✔
2321
                "provenance": null,
1✔
2322
                "tags": ["upload", "test"],
1✔
2323
            })
1✔
2324
        );
1✔
2325

2326
        Ok(())
1✔
2327
    }
1✔
2328

2329
    #[ge_context::test]
1✔
2330
    #[allow(clippy::too_many_lines)]
2331
    async fn it_suggests_metadata(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2332
        let mut test_data = TestDataUploads::default(); // remember created folder and remove them on drop
1✔
2333

2334
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2335

1✔
2336
        let body = vec![(
1✔
2337
            "test.json",
1✔
2338
            r#"{
1✔
2339
                "type": "FeatureCollection",
1✔
2340
                "features": [
1✔
2341
                  {
1✔
2342
                    "type": "Feature",
1✔
2343
                    "geometry": {
1✔
2344
                      "type": "Point",
1✔
2345
                      "coordinates": [
1✔
2346
                        1,
1✔
2347
                        1
1✔
2348
                      ]
1✔
2349
                    },
1✔
2350
                    "properties": {
1✔
2351
                      "name": "foo",
1✔
2352
                      "id": 1
1✔
2353
                    }
1✔
2354
                  },
1✔
2355
                  {
1✔
2356
                    "type": "Feature",
1✔
2357
                    "geometry": {
1✔
2358
                      "type": "Point",
1✔
2359
                      "coordinates": [
1✔
2360
                        2,
1✔
2361
                        2
1✔
2362
                      ]
1✔
2363
                    },
1✔
2364
                    "properties": {
1✔
2365
                      "name": "bar",
1✔
2366
                      "id": 2
1✔
2367
                    }
1✔
2368
                  }
1✔
2369
                ]
1✔
2370
              }"#,
1✔
2371
        )];
1✔
2372

1✔
2373
        let req = actix_web::test::TestRequest::post()
1✔
2374
            .uri("/upload")
1✔
2375
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2376
            .set_multipart(body.clone());
1✔
2377

2378
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
2379

2380
        assert_eq!(res.status(), 200);
1✔
2381

2382
        let upload: IdResponse<UploadId> = actix_web::test::read_body_json(res).await;
1✔
2383
        test_data.uploads.push(upload.id);
1✔
2384

1✔
2385
        let upload_content =
1✔
2386
            std::fs::read_to_string(upload.id.root_path().unwrap().join("test.json")).unwrap();
1✔
2387

1✔
2388
        assert_eq!(&upload_content, body[0].1);
1✔
2389

2390
        let req = actix_web::test::TestRequest::post()
1✔
2391
            .uri("/dataset/suggest")
1✔
2392
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2393
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2394
            .set_json(SuggestMetaData {
1✔
2395
                data_path: DataPath::Upload(upload.id),
1✔
2396
                layer_name: None,
1✔
2397
                main_file: None,
1✔
2398
            });
1✔
2399
        let res = send_test_request(req, app_ctx).await;
1✔
2400

2401
        let res_status = res.status();
1✔
2402
        let res_body = read_body_string(res).await;
1✔
2403
        assert_eq!(res_status, 200, "{res_body}");
1✔
2404

2405
        assert_eq!(
1✔
2406
            serde_json::from_str::<serde_json::Value>(&res_body).unwrap(),
1✔
2407
            json!({
1✔
2408
              "mainFile": "test.json",
1✔
2409
              "layerName": "test",
1✔
2410
              "metaData": {
1✔
2411
                "type": "ogrMetaData",
1✔
2412
                "loadingInfo": {
1✔
2413
                  "fileName": format!("test_upload/{}/test.json", upload.id),
1✔
2414
                  "layerName": "test",
1✔
2415
                  "dataType": "MultiPoint",
1✔
2416
                  "time": {
1✔
2417
                    "type": "none"
1✔
2418
                  },
1✔
2419
                  "defaultGeometry": null,
1✔
2420
                  "columns": {
1✔
2421
                    "formatSpecifics": null,
1✔
2422
                    "x": "",
1✔
2423
                    "y": null,
1✔
2424
                    "int": [
1✔
2425
                      "id"
1✔
2426
                    ],
1✔
2427
                    "float": [],
1✔
2428
                    "text": [
1✔
2429
                      "name"
1✔
2430
                    ],
1✔
2431
                    "bool": [],
1✔
2432
                    "datetime": [],
1✔
2433
                    "rename": null
1✔
2434
                  },
1✔
2435
                  "forceOgrTimeFilter": false,
1✔
2436
                  "forceOgrSpatialFilter": false,
1✔
2437
                  "onError": "ignore",
1✔
2438
                  "sqlQuery": null,
1✔
2439
                  "attributeQuery": null,
1✔
2440
                  "cacheTtl": 0,
1✔
2441
                },
1✔
2442
                "resultDescriptor": {
1✔
2443
                  "dataType": "MultiPoint",
1✔
2444
                  "spatialReference": "EPSG:4326",
1✔
2445
                  "columns": {
1✔
2446
                    "id": {
1✔
2447
                      "dataType": "int",
1✔
2448
                      "measurement": {
1✔
2449
                        "type": "unitless"
1✔
2450
                      }
1✔
2451
                    },
1✔
2452
                    "name": {
1✔
2453
                      "dataType": "text",
1✔
2454
                      "measurement": {
1✔
2455
                        "type": "unitless"
1✔
2456
                      }
1✔
2457
                    }
1✔
2458
                  },
1✔
2459
                  "time": null,
1✔
2460
                  "bbox": null
1✔
2461
                }
1✔
2462
              }
1✔
2463
            })
1✔
2464
        );
1✔
2465

2466
        Ok(())
1✔
2467
    }
1✔
2468

2469
    #[ge_context::test]
1✔
2470
    async fn it_deletes_system_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2471
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2472
        let ctx = app_ctx.session_context(session.clone());
1✔
2473

1✔
2474
        let volume = VolumeName("test_data".to_string());
1✔
2475

1✔
2476
        let mut meta_data = create_ndvi_meta_data();
1✔
2477

1✔
2478
        // make path relative to volume
1✔
2479
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
2480

1✔
2481
        let create = CreateDataset {
1✔
2482
            data_path: DataPath::Volume(volume.clone()),
1✔
2483
            definition: DatasetDefinition {
1✔
2484
                properties: AddDataset {
1✔
2485
                    name: None,
1✔
2486
                    display_name: "ndvi".to_string(),
1✔
2487
                    description: "ndvi".to_string(),
1✔
2488
                    source_operator: "GdalSource".to_string(),
1✔
2489
                    symbology: None,
1✔
2490
                    provenance: None,
1✔
2491
                    tags: None,
1✔
2492
                },
1✔
2493
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
2494
            },
1✔
2495
        };
1✔
2496

2497
        let req = actix_web::test::TestRequest::post()
1✔
2498
            .uri("/dataset")
1✔
2499
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2500
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2501
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
2502
            .set_payload(serde_json::to_string(&create)?);
1✔
2503
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
2504

2505
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
2506

2507
        let db = ctx.db();
1✔
2508
        let dataset_id = db
1✔
2509
            .resolve_dataset_name_to_id(&dataset_name)
1✔
2510
            .await
1✔
2511
            .unwrap()
1✔
2512
            .unwrap();
1✔
2513
        assert!(db.load_dataset(&dataset_id).await.is_ok());
1✔
2514

2515
        let req = actix_web::test::TestRequest::delete()
1✔
2516
            .uri(&format!("/dataset/{dataset_name}"))
1✔
2517
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2518
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2519
            .append_header((header::CONTENT_TYPE, "application/json"));
1✔
2520

2521
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
2522

2523
        assert_eq!(res.status(), 200);
1✔
2524

2525
        assert!(db.load_dataset(&dataset_id).await.is_err());
1✔
2526

2527
        Ok(())
1✔
2528
    }
1✔
2529

2530
    #[ge_context::test]
1✔
2531
    async fn it_gets_loading_info(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2532
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2533
        let ctx = app_ctx.session_context(session.clone());
1✔
2534

1✔
2535
        let descriptor = VectorResultDescriptor {
1✔
2536
            data_type: VectorDataType::Data,
1✔
2537
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2538
            columns: Default::default(),
1✔
2539
            time: None,
1✔
2540
            bbox: None,
1✔
2541
        };
1✔
2542

1✔
2543
        let ds = AddDataset {
1✔
2544
            name: None,
1✔
2545
            display_name: "OgrDataset".to_string(),
1✔
2546
            description: "My Ogr dataset".to_string(),
1✔
2547
            source_operator: "OgrSource".to_string(),
1✔
2548
            symbology: None,
1✔
2549
            provenance: None,
1✔
2550
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2551
        };
1✔
2552

1✔
2553
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2554
            loading_info: OgrSourceDataset {
1✔
2555
                file_name: Default::default(),
1✔
2556
                layer_name: String::new(),
1✔
2557
                data_type: None,
1✔
2558
                time: Default::default(),
1✔
2559
                default_geometry: None,
1✔
2560
                columns: None,
1✔
2561
                force_ogr_time_filter: false,
1✔
2562
                force_ogr_spatial_filter: false,
1✔
2563
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2564
                sql_query: None,
1✔
2565
                attribute_query: None,
1✔
2566
                cache_ttl: CacheTtlSeconds::default(),
1✔
2567
            },
1✔
2568
            result_descriptor: descriptor,
1✔
2569
            phantom: Default::default(),
1✔
2570
        });
1✔
2571

1✔
2572
        let db = ctx.db();
1✔
2573
        let DatasetIdAndName {
2574
            id: _,
2575
            name: dataset_name,
1✔
2576
        } = db.add_dataset(ds.into(), meta).await?;
1✔
2577

2578
        let req = actix_web::test::TestRequest::get()
1✔
2579
            .uri(&format!("/dataset/{dataset_name}/loadingInfo"))
1✔
2580
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2581
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
2582
        let res = send_test_request(req, app_ctx).await;
1✔
2583

2584
        let res_status = res.status();
1✔
2585
        let res_body = serde_json::from_str::<Value>(&read_body_string(res).await).unwrap();
1✔
2586
        assert_eq!(res_status, 200, "{res_body}");
1✔
2587

2588
        assert_eq!(
1✔
2589
            res_body,
1✔
2590
            json!({
1✔
2591
                "loadingInfo":  {
1✔
2592
                    "attributeQuery": null,
1✔
2593
                    "cacheTtl": 0,
1✔
2594
                    "columns": null,
1✔
2595
                    "dataType": null,
1✔
2596
                    "defaultGeometry": null,
1✔
2597
                    "fileName": "",
1✔
2598
                    "forceOgrSpatialFilter": false,
1✔
2599
                    "forceOgrTimeFilter": false,
1✔
2600
                    "layerName": "",
1✔
2601
                    "onError": "ignore",
1✔
2602
                    "sqlQuery": null,
1✔
2603
                    "time":  {
1✔
2604
                        "type": "none"
1✔
2605
                    }
1✔
2606
                },
1✔
2607
                 "resultDescriptor":  {
1✔
2608
                    "bbox": null,
1✔
2609
                    "columns":  {},
1✔
2610
                    "dataType": "Data",
1✔
2611
                    "spatialReference": "",
1✔
2612
                    "time": null
1✔
2613
                },
1✔
2614
                "type": "ogrMetaData"
1✔
2615
            })
1✔
2616
        );
1✔
2617

2618
        Ok(())
1✔
2619
    }
1✔
2620

2621
    #[ge_context::test]
1✔
2622
    async fn it_updates_loading_info(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2623
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2624
        let ctx = app_ctx.session_context(session.clone());
1✔
2625

1✔
2626
        let descriptor = VectorResultDescriptor {
1✔
2627
            data_type: VectorDataType::Data,
1✔
2628
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2629
            columns: Default::default(),
1✔
2630
            time: None,
1✔
2631
            bbox: None,
1✔
2632
        };
1✔
2633

1✔
2634
        let ds = AddDataset {
1✔
2635
            name: None,
1✔
2636
            display_name: "OgrDataset".to_string(),
1✔
2637
            description: "My Ogr dataset".to_string(),
1✔
2638
            source_operator: "OgrSource".to_string(),
1✔
2639
            symbology: None,
1✔
2640
            provenance: None,
1✔
2641
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2642
        };
1✔
2643

1✔
2644
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2645
            loading_info: OgrSourceDataset {
1✔
2646
                file_name: Default::default(),
1✔
2647
                layer_name: String::new(),
1✔
2648
                data_type: None,
1✔
2649
                time: Default::default(),
1✔
2650
                default_geometry: None,
1✔
2651
                columns: None,
1✔
2652
                force_ogr_time_filter: false,
1✔
2653
                force_ogr_spatial_filter: false,
1✔
2654
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2655
                sql_query: None,
1✔
2656
                attribute_query: None,
1✔
2657
                cache_ttl: CacheTtlSeconds::default(),
1✔
2658
            },
1✔
2659
            result_descriptor: descriptor.clone(),
1✔
2660
            phantom: Default::default(),
1✔
2661
        });
1✔
2662

1✔
2663
        let db = ctx.db();
1✔
2664
        let DatasetIdAndName {
2665
            id,
1✔
2666
            name: dataset_name,
1✔
2667
        } = db.add_dataset(ds.into(), meta).await?;
1✔
2668

2669
        let update: MetaDataDefinition =
1✔
2670
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2671
                loading_info: OgrSourceDataset {
1✔
2672
                    file_name: "foo.bar".into(),
1✔
2673
                    layer_name: "baz".to_string(),
1✔
2674
                    data_type: None,
1✔
2675
                    time: Default::default(),
1✔
2676
                    default_geometry: None,
1✔
2677
                    columns: None,
1✔
2678
                    force_ogr_time_filter: false,
1✔
2679
                    force_ogr_spatial_filter: false,
1✔
2680
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2681
                    sql_query: None,
1✔
2682
                    attribute_query: None,
1✔
2683
                    cache_ttl: CacheTtlSeconds::default(),
1✔
2684
                },
1✔
2685
                result_descriptor: descriptor,
1✔
2686
                phantom: Default::default(),
1✔
2687
            })
1✔
2688
            .into();
1✔
2689

1✔
2690
        let req = actix_web::test::TestRequest::put()
1✔
2691
            .uri(&format!("/dataset/{dataset_name}/loadingInfo"))
1✔
2692
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2693
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2694
            .set_json(update.clone());
1✔
2695

2696
        let res = send_test_request(req, app_ctx).await;
1✔
2697
        assert_eq!(res.status(), 200);
1✔
2698

2699
        let loading_info: MetaDataDefinition = db.load_loading_info(&id).await.unwrap().into();
1✔
2700

1✔
2701
        assert_eq!(loading_info, update);
1✔
2702

2703
        Ok(())
1✔
2704
    }
1✔
2705

2706
    #[ge_context::test]
1✔
2707
    async fn it_gets_updates_symbology(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2708
        let session = admin_login(&app_ctx).await;
1✔
2709
        let ctx = app_ctx.session_context(session.clone());
1✔
2710

2711
        let DatasetIdAndName {
2712
            id: dataset_id,
1✔
2713
            name: dataset_name,
1✔
2714
        } = add_file_definition_to_datasets(&ctx.db(), test_data!("dataset_defs/ndvi.json")).await;
1✔
2715

2716
        let symbology = Symbology::Raster(RasterSymbology {
1✔
2717
            opacity: 1.0,
1✔
2718
            raster_colorizer: RasterColorizer::SingleBand {
1✔
2719
                band: 0,
1✔
2720
                band_colorizer: geoengine_datatypes::operations::image::Colorizer::linear_gradient(
1✔
2721
                    vec![
1✔
2722
                        (0.0, RgbaColor::white())
1✔
2723
                            .try_into()
1✔
2724
                            .expect("valid breakpoint"),
1✔
2725
                        (10_000.0, RgbaColor::black())
1✔
2726
                            .try_into()
1✔
2727
                            .expect("valid breakpoint"),
1✔
2728
                    ],
1✔
2729
                    RgbaColor::transparent(),
1✔
2730
                    RgbaColor::white(),
1✔
2731
                    RgbaColor::black(),
1✔
2732
                )
1✔
2733
                .expect("valid colorizer"),
1✔
2734
            },
1✔
2735
        });
1✔
2736

1✔
2737
        let req = actix_web::test::TestRequest::put()
1✔
2738
            .uri(&format!("/dataset/{dataset_name}/symbology"))
1✔
2739
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2740
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2741
            .set_json(symbology.clone());
1✔
2742
        let res = send_test_request(req, app_ctx).await;
1✔
2743

2744
        let res_status = res.status();
1✔
2745
        assert_eq!(res_status, 200);
1✔
2746

2747
        let dataset = ctx.db().load_dataset(&dataset_id).await?;
1✔
2748

2749
        assert_eq!(dataset.symbology, Some(symbology));
1✔
2750

2751
        Ok(())
1✔
2752
    }
1✔
2753

2754
    #[ge_context::test()]
1✔
2755
    async fn it_updates_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2756
        let session = admin_login(&app_ctx).await;
1✔
2757
        let ctx = app_ctx.session_context(session.clone());
1✔
2758

2759
        let DatasetIdAndName {
2760
            id: dataset_id,
1✔
2761
            name: dataset_name,
1✔
2762
        } = add_file_definition_to_datasets(&ctx.db(), test_data!("dataset_defs/ndvi.json")).await;
1✔
2763

2764
        let update: UpdateDataset = UpdateDataset {
1✔
2765
            name: DatasetName::new(None, "new_name"),
1✔
2766
            display_name: "new display name".to_string(),
1✔
2767
            description: "new description".to_string(),
1✔
2768
            tags: vec!["foo".to_string(), "bar".to_string()],
1✔
2769
        };
1✔
2770

1✔
2771
        let req = actix_web::test::TestRequest::post()
1✔
2772
            .uri(&format!("/dataset/{dataset_name}"))
1✔
2773
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2774
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2775
            .set_json(update.clone());
1✔
2776
        let res = send_test_request(req, app_ctx).await;
1✔
2777

2778
        let res_status = res.status();
1✔
2779
        assert_eq!(res_status, 200);
1✔
2780

2781
        let dataset = ctx.db().load_dataset(&dataset_id).await?;
1✔
2782

2783
        assert_eq!(dataset.name, update.name);
1✔
2784
        assert_eq!(dataset.display_name, update.display_name);
1✔
2785
        assert_eq!(dataset.description, update.description);
1✔
2786
        assert_eq!(dataset.tags, Some(update.tags));
1✔
2787

2788
        Ok(())
1✔
2789
    }
1✔
2790

2791
    #[ge_context::test()]
1✔
2792
    async fn it_updates_provenance(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2793
        let session = admin_login(&app_ctx).await;
1✔
2794
        let ctx = app_ctx.session_context(session.clone());
1✔
2795

2796
        let DatasetIdAndName {
2797
            id: dataset_id,
1✔
2798
            name: dataset_name,
1✔
2799
        } = add_file_definition_to_datasets(&ctx.db(), test_data!("dataset_defs/ndvi.json")).await;
1✔
2800

2801
        let provenances: Provenances = Provenances {
1✔
2802
            provenances: vec![Provenance {
1✔
2803
                citation: "foo".to_string(),
1✔
2804
                license: "bar".to_string(),
1✔
2805
                uri: "http://example.com".to_string(),
1✔
2806
            }],
1✔
2807
        };
1✔
2808

1✔
2809
        let req = actix_web::test::TestRequest::put()
1✔
2810
            .uri(&format!("/dataset/{dataset_name}/provenance"))
1✔
2811
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2812
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2813
            .set_json(provenances.clone());
1✔
2814
        let res = send_test_request(req, app_ctx).await;
1✔
2815

2816
        let res_status = res.status();
1✔
2817
        assert_eq!(res_status, 200);
1✔
2818

2819
        let dataset = ctx.db().load_dataset(&dataset_id).await?;
1✔
2820

2821
        assert_eq!(
1✔
2822
            dataset.provenance,
1✔
2823
            Some(
1✔
2824
                provenances
1✔
2825
                    .provenances
1✔
2826
                    .into_iter()
1✔
2827
                    .map(Into::into)
1✔
2828
                    .collect()
1✔
2829
            )
1✔
2830
        );
1✔
2831

2832
        Ok(())
1✔
2833
    }
1✔
2834

2835
    // TODO: better way to get to the root of the project
2836
    struct TestWorkdirChanger {
2837
        package_dir: &'static str,
2838
        modified: bool,
2839
    }
2840

2841
    impl TestWorkdirChanger {
2842
        fn go_to_workspace(package_dir: &'static str) -> Self {
1✔
2843
            let mut working_dir = std::env::current_dir().unwrap();
1✔
2844

1✔
2845
            if !working_dir.ends_with(package_dir) {
1✔
2846
                return Self {
×
2847
                    package_dir,
×
2848
                    modified: false,
×
2849
                };
×
2850
            }
1✔
2851

1✔
2852
            working_dir.pop();
1✔
2853

1✔
2854
            std::env::set_current_dir(working_dir).unwrap();
1✔
2855

1✔
2856
            Self {
1✔
2857
                package_dir,
1✔
2858
                modified: true,
1✔
2859
            }
1✔
2860
        }
1✔
2861
    }
2862

2863
    impl Drop for TestWorkdirChanger {
2864
        fn drop(&mut self) {
1✔
2865
            if !self.modified {
1✔
2866
                return;
×
2867
            }
1✔
2868

1✔
2869
            let mut working_dir = std::env::current_dir().unwrap();
1✔
2870
            working_dir.push(self.package_dir);
1✔
2871
            std::env::set_current_dir(working_dir).unwrap();
1✔
2872
        }
1✔
2873
    }
2874

2875
    #[ge_context::test(test_execution = "serial")]
1✔
2876
    async fn it_lists_layers(app_ctx: PostgresContext<NoTls>) {
1✔
2877
        let changed_workdir = TestWorkdirChanger::go_to_workspace("services");
1✔
2878

2879
        let session = admin_login(&app_ctx).await;
1✔
2880

2881
        let volume_name = "test_data";
1✔
2882
        let file_name = "vector%2Fdata%2Ftwo_layers.gpkg";
1✔
2883

1✔
2884
        let req = actix_web::test::TestRequest::get()
1✔
2885
            .uri(&format!(
1✔
2886
                "/dataset/volumes/{volume_name}/files/{file_name}/layers"
1✔
2887
            ))
1✔
2888
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
2889

2890
        let res = send_test_request(req, app_ctx).await;
1✔
2891

2892
        assert_eq!(res.status(), 200, "{res:?}");
1✔
2893

2894
        let layers: VolumeFileLayersResponse = actix_web::test::read_body_json(res).await;
1✔
2895

2896
        assert_eq!(
1✔
2897
            layers.layers,
1✔
2898
            vec![
1✔
2899
                "points_with_time".to_string(),
1✔
2900
                "points_with_time_and_more".to_string(),
1✔
2901
                "layer_styles".to_string() // TOOO: remove once internal/system layers are hidden
1✔
2902
            ]
1✔
2903
        );
1✔
2904

2905
        drop(changed_workdir);
1✔
2906
    }
1✔
2907

2908
    /// override the pixel size since this test was designed for 600 x 600 pixel tiles
2909
    fn create_dataset_tiling_specification() -> TilingSpecification {
1✔
2910
        TilingSpecification {
1✔
2911
            origin_coordinate: (0., 0.).into(),
1✔
2912
            tile_size_in_pixels: GridShape2D::new([600, 600]),
1✔
2913
        }
1✔
2914
    }
1✔
2915

2916
    #[ge_context::test(tiling_spec = "create_dataset_tiling_specification")]
1✔
2917
    async fn create_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2918
        let mut test_data = TestDataUploads::default(); // remember created folder and remove them on drop
1✔
2919

2920
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2921
        let ctx = app_ctx.session_context(session.clone());
1✔
2922

2923
        let upload_id = upload_ne_10m_ports_files(app_ctx.clone(), session.id()).await?;
1✔
2924
        test_data.uploads.push(upload_id);
1✔
2925

2926
        let dataset_name =
1✔
2927
            construct_dataset_from_upload(app_ctx.clone(), upload_id, session.id()).await;
1✔
2928
        let exe_ctx = ctx.execution_context()?;
1✔
2929

2930
        let source = make_ogr_source(
1✔
2931
            &exe_ctx,
1✔
2932
            geoengine_datatypes::dataset::NamedData::from(dataset_name).into(),
1✔
2933
        )
1✔
2934
        .await?;
1✔
2935

2936
        let query_processor = source.query_processor()?.multi_point().unwrap();
1✔
2937
        let query_ctx = ctx.mock_query_context()?;
1✔
2938

2939
        let query = query_processor
1✔
2940
            .query(
1✔
2941
                VectorQueryRectangle {
1✔
2942
                    spatial_bounds: BoundingBox2D::new((1.85, 50.88).into(), (4.82, 52.95).into())?,
1✔
2943
                    time_interval: Default::default(),
1✔
2944
                    spatial_resolution: SpatialResolution::new(1., 1.)?,
1✔
2945
                    attributes: ColumnSelection::all(),
1✔
2946
                },
1✔
2947
                &query_ctx,
1✔
2948
            )
1✔
2949
            .await
1✔
2950
            .unwrap();
1✔
2951

2952
        let result: Vec<MultiPointCollection> = query.try_collect().await?;
1✔
2953

2954
        let coords = result[0].coordinates();
1✔
2955
        assert_eq!(coords.len(), 10);
1✔
2956
        assert_eq!(
1✔
2957
            coords,
1✔
2958
            &[
1✔
2959
                [2.933_686_69, 51.23].into(),
1✔
2960
                [3.204_593_64_f64, 51.336_388_89].into(),
1✔
2961
                [4.651_413_428, 51.805_833_33].into(),
1✔
2962
                [4.11, 51.95].into(),
1✔
2963
                [4.386_160_188, 50.886_111_11].into(),
1✔
2964
                [3.767_373_38, 51.114_444_44].into(),
1✔
2965
                [4.293_757_362, 51.297_777_78].into(),
1✔
2966
                [1.850_176_678, 50.965_833_33].into(),
1✔
2967
                [2.170_906_949, 51.021_666_67].into(),
1✔
2968
                [4.292_873_969, 51.927_222_22].into(),
1✔
2969
            ]
1✔
2970
        );
1✔
2971

2972
        Ok(())
1✔
2973
    }
1✔
2974

2975
    #[ge_context::test]
1✔
2976
    async fn it_creates_volume_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
2977
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2978

1✔
2979
        let volume = VolumeName("test_data".to_string());
1✔
2980

1✔
2981
        let mut meta_data = create_ndvi_meta_data();
1✔
2982

1✔
2983
        // make path relative to volume
1✔
2984
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
2985

1✔
2986
        let create = CreateDataset {
1✔
2987
            data_path: DataPath::Volume(volume.clone()),
1✔
2988
            definition: DatasetDefinition {
1✔
2989
                properties: AddDataset {
1✔
2990
                    name: None,
1✔
2991
                    display_name: "ndvi".to_string(),
1✔
2992
                    description: "ndvi".to_string(),
1✔
2993
                    source_operator: "GdalSource".to_string(),
1✔
2994
                    symbology: None,
1✔
2995
                    provenance: None,
1✔
2996
                    tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2997
                },
1✔
2998
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
2999
            },
1✔
3000
        };
1✔
3001

3002
        // create via admin session
3003
        let admin_session = admin_login(&app_ctx).await;
1✔
3004
        let req = actix_web::test::TestRequest::post()
1✔
3005
            .uri("/dataset")
1✔
3006
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3007
            .append_header((
1✔
3008
                header::AUTHORIZATION,
1✔
3009
                Bearer::new(admin_session.id().to_string()),
1✔
3010
            ))
1✔
3011
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
3012
            .set_json(create);
1✔
3013
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3014
        assert_eq!(res.status(), 200);
1✔
3015

3016
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
3017

3018
        let req = actix_web::test::TestRequest::get()
1✔
3019
            .uri(&format!("/dataset/{dataset_name}"))
1✔
3020
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3021
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
3022

3023
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3024
        assert_eq!(res.status(), 200);
1✔
3025

3026
        Ok(())
1✔
3027
    }
1✔
3028

3029
    #[ge_context::test]
1✔
3030
    async fn it_deletes_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
3031
        let mut test_data = TestDataUploads::default(); // remember created folder and remove them on drop
1✔
3032

3033
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
3034
        let session_id = session.id();
1✔
3035
        let ctx = app_ctx.session_context(session);
1✔
3036

3037
        let upload_id = upload_ne_10m_ports_files(app_ctx.clone(), session_id).await?;
1✔
3038
        test_data.uploads.push(upload_id);
1✔
3039

3040
        let dataset_name =
1✔
3041
            construct_dataset_from_upload(app_ctx.clone(), upload_id, session_id).await;
1✔
3042

3043
        let db = ctx.db();
1✔
3044
        let dataset_id = db
1✔
3045
            .resolve_dataset_name_to_id(&dataset_name)
1✔
3046
            .await
1✔
3047
            .unwrap()
1✔
3048
            .unwrap();
1✔
3049

1✔
3050
        assert!(db.load_dataset(&dataset_id).await.is_ok());
1✔
3051

3052
        let req = actix_web::test::TestRequest::delete()
1✔
3053
            .uri(&format!("/dataset/{dataset_name}"))
1✔
3054
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3055
            .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string())))
1✔
3056
            .append_header((header::CONTENT_TYPE, "application/json"));
1✔
3057

3058
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3059

3060
        assert_eq!(res.status(), 200, "response: {res:?}");
1✔
3061

3062
        assert!(db.load_dataset(&dataset_id).await.is_err());
1✔
3063

3064
        Ok(())
1✔
3065
    }
1✔
3066

3067
    #[ge_context::test]
1✔
3068
    async fn it_deletes_volume_dataset(app_ctx: PostgresContext<NoTls>) -> Result<()> {
1✔
3069
        let volume = VolumeName("test_data".to_string());
1✔
3070

1✔
3071
        let mut meta_data = create_ndvi_meta_data();
1✔
3072

1✔
3073
        // make path relative to volume
1✔
3074
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
3075

1✔
3076
        let create = CreateDataset {
1✔
3077
            data_path: DataPath::Volume(volume.clone()),
1✔
3078
            definition: DatasetDefinition {
1✔
3079
                properties: AddDataset {
1✔
3080
                    name: None,
1✔
3081
                    display_name: "ndvi".to_string(),
1✔
3082
                    description: "ndvi".to_string(),
1✔
3083
                    source_operator: "GdalSource".to_string(),
1✔
3084
                    symbology: None,
1✔
3085
                    provenance: None,
1✔
3086
                    tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
3087
                },
1✔
3088
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
3089
            },
1✔
3090
        };
1✔
3091

3092
        let session = admin_login(&app_ctx).await;
1✔
3093
        let ctx = app_ctx.session_context(session.clone());
1✔
3094

1✔
3095
        let db = ctx.db();
1✔
3096

3097
        let req = actix_web::test::TestRequest::post()
1✔
3098
            .uri("/dataset")
1✔
3099
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3100
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
3101
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
3102
            .set_payload(serde_json::to_string(&create)?);
1✔
3103
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3104

3105
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
3106
        let dataset_id = db
1✔
3107
            .resolve_dataset_name_to_id(&dataset_name)
1✔
3108
            .await
1✔
3109
            .unwrap()
1✔
3110
            .unwrap();
1✔
3111

1✔
3112
        assert!(db.load_dataset(&dataset_id).await.is_ok());
1✔
3113

3114
        let req = actix_web::test::TestRequest::delete()
1✔
3115
            .uri(&format!("/dataset/{dataset_name}"))
1✔
3116
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3117
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
3118
            .append_header((header::CONTENT_TYPE, "application/json"));
1✔
3119

3120
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3121

3122
        assert_eq!(res.status(), 200);
1✔
3123

3124
        assert!(db.load_dataset(&dataset_id).await.is_err());
1✔
3125

3126
        Ok(())
1✔
3127
    }
1✔
3128
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc