• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

geo-engine / geoengine / 12767614094

14 Jan 2025 12:26PM UTC coverage: 90.64% (+0.06%) from 90.576%
12767614094

push

github

web-flow
Merge pull request #1006 from geo-engine/migrate-pro-api

Migrate-pro-api

1106 of 1152 new or added lines in 24 files covered. (96.01%)

248 existing lines in 13 files now uncovered.

133501 of 147287 relevant lines covered (90.64%)

54652.85 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.7
/services/src/api/handlers/datasets.rs
1
use crate::{
2
    api::model::{
3
        operators::{GdalLoadingInfoTemporalSlice, GdalMetaDataList},
4
        responses::datasets::{errors::*, DatasetNameResponse},
5
        services::{
6
            AddDataset, CreateDataset, DataPath, DatasetDefinition, MetaDataDefinition,
7
            MetaDataSuggestion, Provenances, UpdateDataset,
8
        },
9
    },
10
    contexts::{ApplicationContext, SessionContext},
11
    datasets::{
12
        listing::{DatasetListOptions, DatasetProvider},
13
        storage::{AutoCreateDataset, DatasetStore, SuggestMetaData},
14
        upload::{
15
            AdjustFilePath, Upload, UploadDb, UploadId, UploadRootPath, Volume, VolumeName, Volumes,
16
        },
17
        DatasetName,
18
    },
19
    error::{self, Error, Result},
20
    pro::{
21
        contexts::{ProApplicationContext, ProGeoEngineDb},
22
        permissions::{Permission, PermissionDb, Role},
23
    },
24
    projects::Symbology,
25
    util::{
26
        config::{get_config_element, Data},
27
        extractors::{ValidatedJson, ValidatedQuery},
28
        path_with_base_path,
29
    },
30
};
31
use actix_web::{web, FromRequest, HttpResponse, HttpResponseBuilder, Responder};
32
use gdal::{
33
    vector::{Layer, LayerAccess, OGRFieldType},
34
    Dataset, DatasetOptions,
35
};
36
use geoengine_datatypes::{
37
    collections::VectorDataType,
38
    error::BoxedResultExt,
39
    primitives::{
40
        CacheTtlSeconds, FeatureDataType, Measurement, TimeInterval, VectorQueryRectangle,
41
    },
42
    spatial_reference::{SpatialReference, SpatialReferenceOption},
43
};
44
use geoengine_operators::{
45
    engine::{StaticMetaData, VectorColumnInfo, VectorResultDescriptor},
46
    source::{
47
        OgrSourceColumnSpec, OgrSourceDataset, OgrSourceDatasetTimeType, OgrSourceDurationSpec,
48
        OgrSourceErrorSpec, OgrSourceTimeFormat,
49
    },
50
    util::gdal::{
51
        gdal_open_dataset, gdal_open_dataset_ex, gdal_parameters_from_dataset,
52
        raster_descriptor_from_dataset,
53
    },
54
};
55
use serde::{Deserialize, Serialize};
56
use snafu::ResultExt;
57
use std::{
58
    collections::HashMap,
59
    convert::{TryFrom, TryInto},
60
    path::Path,
61
};
62
use utoipa::{ToResponse, ToSchema};
63

64
pub(crate) fn init_dataset_routes<C>(cfg: &mut web::ServiceConfig)
339✔
65
where
339✔
66
    C: ProApplicationContext,
339✔
67
    <<C as ApplicationContext>::SessionContext as SessionContext>::GeoEngineDB: ProGeoEngineDb,
339✔
68
    C::Session: FromRequest,
339✔
69
{
339✔
70
    cfg.service(
339✔
71
        web::scope("/dataset")
339✔
72
            .service(
339✔
73
                web::resource("/suggest").route(web::post().to(suggest_meta_data_handler::<C>)),
339✔
74
            )
339✔
75
            .service(web::resource("/auto").route(web::post().to(auto_create_dataset_handler::<C>)))
339✔
76
            .service(
339✔
77
                web::resource("/volumes/{volume_name}/files/{file_name}/layers")
339✔
78
                    .route(web::get().to(list_volume_file_layers_handler::<C>)),
339✔
79
            )
339✔
80
            .service(web::resource("/volumes").route(web::get().to(list_volumes_handler::<C>)))
339✔
81
            .service(
339✔
82
                web::resource("/{dataset}/loadingInfo")
339✔
83
                    .route(web::get().to(get_loading_info_handler::<C>))
339✔
84
                    .route(web::put().to(update_loading_info_handler::<C>)),
339✔
85
            )
339✔
86
            .service(
339✔
87
                web::resource("/{dataset}/symbology")
339✔
88
                    .route(web::put().to(update_dataset_symbology_handler::<C>)),
339✔
89
            )
339✔
90
            .service(
339✔
91
                web::resource("/{dataset}/provenance")
339✔
92
                    .route(web::put().to(update_dataset_provenance_handler::<C>)),
339✔
93
            )
339✔
94
            .service(
339✔
95
                web::resource("/{dataset}")
339✔
96
                    .route(web::get().to(get_dataset_handler::<C>))
339✔
97
                    .route(web::post().to(update_dataset_handler::<C>))
339✔
98
                    .route(web::delete().to(delete_dataset_handler::<C>)),
339✔
99
            )
339✔
100
            .service(web::resource("").route(web::post().to(create_dataset_handler::<C>))), // must come last to not match other routes
339✔
101
    )
339✔
102
    .service(web::resource("/datasets").route(web::get().to(list_datasets_handler::<C>)));
339✔
103
}
339✔
104

105
/// Lists available volumes.
106
#[utoipa::path(
4✔
107
    tag = "Datasets",
4✔
108
    get,
4✔
109
    path = "/dataset/volumes",
4✔
110
    responses(
4✔
111
        (status = 200, description = "OK", body = [Volume],
4✔
112
            example = json!([
4✔
113
                {
4✔
114
                    "name": "test_data",
4✔
115
                    "path": "./test_data/"
4✔
116
                }
4✔
117
            ])
4✔
118
        ),
4✔
119
        (status = 401, response = crate::api::model::responses::UnauthorizedAdminResponse)
4✔
120
    ),
4✔
121
    security(
4✔
122
        ("session_token" = [])
4✔
123
    )
4✔
124
)]
4✔
125
#[allow(clippy::unused_async)]
126
pub async fn list_volumes_handler<C: ApplicationContext>(
×
127
    app_ctx: web::Data<C>,
×
128
    session: C::Session,
×
129
) -> Result<impl Responder> {
×
130
    let volumes = app_ctx.session_context(session).volumes()?;
×
131
    Ok(web::Json(volumes))
×
132
}
×
133

134
/// Lists available datasets.
135
#[utoipa::path(
4✔
136
    tag = "Datasets",
4✔
137
    get,
4✔
138
    path = "/datasets",
4✔
139
    responses(
4✔
140
        (status = 200, description = "OK", body = [DatasetListing],
4✔
141
            example = json!([
4✔
142
                {
4✔
143
                    "id": {
4✔
144
                        "internal": "9c874b9e-cea0-4553-b727-a13cb26ae4bb"
4✔
145
                    },
4✔
146
                    "name": "Germany",
4✔
147
                    "description": "Boundaries of Germany",
4✔
148
                    "tags": [],
4✔
149
                    "sourceOperator": "OgrSource",
4✔
150
                    "resultDescriptor": {
4✔
151
                        "vector": {
4✔
152
                            "dataType": "MultiPolygon",
4✔
153
                            "spatialReference": "EPSG:4326",
4✔
154
                            "columns": {}
4✔
155
                        }
4✔
156
                    }
4✔
157
                }
4✔
158
            ])
4✔
159
        ),
4✔
160
        (status = 400, response = crate::api::model::responses::BadRequestQueryResponse),
4✔
161
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
162
    ),
4✔
163
    params(
4✔
164
        DatasetListOptions
4✔
165
    ),
4✔
166
    security(
4✔
167
        ("session_token" = [])
4✔
168
    )
4✔
169
)]
4✔
170
pub async fn list_datasets_handler<C: ApplicationContext>(
1✔
171
    session: C::Session,
1✔
172
    app_ctx: web::Data<C>,
1✔
173
    options: ValidatedQuery<DatasetListOptions>,
1✔
174
) -> Result<impl Responder> {
1✔
175
    let options = options.into_inner();
1✔
176
    let list = app_ctx
1✔
177
        .session_context(session)
1✔
178
        .db()
1✔
179
        .list_datasets(options)
1✔
180
        .await?;
1✔
181
    Ok(web::Json(list))
1✔
182
}
1✔
183

184
/// Retrieves details about a dataset using the internal name.
185
#[utoipa::path(
4✔
186
    tag = "Datasets",
4✔
187
    get,
4✔
188
    path = "/dataset/{dataset}",
4✔
189
    responses(
4✔
190
        (status = 200, description = "OK", body = Dataset,
4✔
191
            example = json!({
4✔
192
                "id": {
4✔
193
                    "internal": "9c874b9e-cea0-4553-b727-a13cb26ae4bb"
4✔
194
                },
4✔
195
                "name": "Germany",
4✔
196
                "description": "Boundaries of Germany",
4✔
197
                "resultDescriptor": {
4✔
198
                    "vector": {
4✔
199
                        "dataType": "MultiPolygon",
4✔
200
                        "spatialReference": "EPSG:4326",
4✔
201
                        "columns": {}
4✔
202
                    }
4✔
203
                },
4✔
204
                "sourceOperator": "OgrSource"
4✔
205
            })
4✔
206
        ),
4✔
207
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
4✔
208
            ("Referenced an unknown dataset" = (value = json!({
4✔
209
                "error": "CannotLoadDataset",
4✔
210
                "message": "CannotLoadDataset: UnknownDatasetName"
4✔
211
            })))
4✔
212
        )),
4✔
213
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
214
    ),
4✔
215
    params(
4✔
216
        ("dataset" = DatasetName, description = "Dataset Name")
4✔
217
    ),
4✔
218
    security(
4✔
219
        ("session_token" = [])
4✔
220
    )
4✔
221
)]
4✔
222
pub async fn get_dataset_handler<C: ApplicationContext>(
3✔
223
    dataset: web::Path<DatasetName>,
3✔
224
    session: C::Session,
3✔
225
    app_ctx: web::Data<C>,
3✔
226
) -> Result<impl Responder, GetDatasetError> {
3✔
227
    let session_ctx = app_ctx.session_context(session).db();
3✔
228

3✔
229
    let real_dataset = dataset.into_inner();
3✔
230

231
    let dataset_id = session_ctx
3✔
232
        .resolve_dataset_name_to_id(&real_dataset)
3✔
233
        .await
3✔
234
        .context(CannotLoadDataset)?;
3✔
235

236
    // handle the case where the dataset name is not known
237
    let dataset_id = dataset_id
3✔
238
        .ok_or(error::Error::UnknownDatasetName {
3✔
239
            dataset_name: real_dataset.to_string(),
3✔
240
        })
3✔
241
        .context(CannotLoadDataset)?;
3✔
242

243
    let dataset = session_ctx
3✔
244
        .load_dataset(&dataset_id)
3✔
245
        .await
3✔
246
        .context(CannotLoadDataset)?;
3✔
247

248
    Ok(web::Json(dataset))
3✔
249
}
3✔
250

251
/// Update details about a dataset using the internal name.
252
#[utoipa::path(
4✔
253
    tag = "Datasets",
4✔
254
    post,
4✔
255
    path = "/dataset/{dataset}",
4✔
256
    request_body = UpdateDataset,
4✔
257
    responses(
4✔
258
        (status = 200, description = "OK" ),
4✔
259
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
4✔
260
            ("Referenced an unknown dataset" = (value = json!({
4✔
261
                "error": "CannotLoadDataset",
4✔
262
                "message": "CannotLoadDataset: UnknownDatasetName"
4✔
263
            })))
4✔
264
        )),
4✔
265
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
266
    ),
4✔
267
    params(
4✔
268
        ("dataset" = DatasetName, description = "Dataset Name"),
4✔
269
    ),
4✔
270
    security(
4✔
271
        ("session_token" = [])
4✔
272
    )
4✔
273
)]
4✔
274
pub async fn update_dataset_handler<C: ApplicationContext>(
1✔
275
    dataset: web::Path<DatasetName>,
1✔
276
    session: C::Session,
1✔
277
    app_ctx: web::Data<C>,
1✔
278
    update: ValidatedJson<UpdateDataset>,
1✔
279
) -> Result<impl Responder, UpdateDatasetError> {
1✔
280
    let session_ctx = app_ctx.session_context(session).db();
1✔
281

1✔
282
    let real_dataset = dataset.into_inner();
1✔
283

284
    let dataset_id = session_ctx
1✔
285
        .resolve_dataset_name_to_id(&real_dataset)
1✔
286
        .await
1✔
287
        .context(CannotLoadDatasetForUpdate)?;
1✔
288

289
    // handle the case where the dataset name is not known
290
    let dataset_id = dataset_id
1✔
291
        .ok_or(error::Error::UnknownDatasetName {
1✔
292
            dataset_name: real_dataset.to_string(),
1✔
293
        })
1✔
294
        .context(CannotLoadDatasetForUpdate)?;
1✔
295

296
    session_ctx
1✔
297
        .update_dataset(dataset_id, update.into_inner())
1✔
298
        .await
1✔
299
        .context(CannotUpdateDataset)?;
1✔
300

301
    Ok(HttpResponse::Ok())
1✔
302
}
1✔
303

304
/// Retrieves the loading information of a dataset
305
#[utoipa::path(
4✔
306
    tag = "Datasets",
4✔
307
    get,
4✔
308
    path = "/dataset/{dataset}/loadingInfo",
4✔
309
    responses(
4✔
310
        (status = 200, description = "OK", body = MetaDataDefinition)
4✔
311
    ),
4✔
312
    params(
4✔
313
        ("dataset" = DatasetName, description = "Dataset Name")
4✔
314
    ),
4✔
315
    security(
4✔
316
        ("session_token" = [])
4✔
317
    )
4✔
318
)]
4✔
319
pub async fn get_loading_info_handler<C: ApplicationContext>(
1✔
320
    dataset: web::Path<DatasetName>,
1✔
321
    session: C::Session,
1✔
322
    app_ctx: web::Data<C>,
1✔
323
) -> Result<impl Responder> {
1✔
324
    let session_ctx = app_ctx.session_context(session).db();
1✔
325

1✔
326
    let real_dataset = dataset.into_inner();
1✔
327

328
    let dataset_id = session_ctx
1✔
329
        .resolve_dataset_name_to_id(&real_dataset)
1✔
330
        .await?;
1✔
331

332
    // handle the case where the dataset name is not known
333
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
334
        dataset_name: real_dataset.to_string(),
1✔
335
    })?;
1✔
336

337
    let dataset = session_ctx.load_loading_info(&dataset_id).await?;
1✔
338

339
    Ok(web::Json(dataset))
1✔
340
}
1✔
341

342
/// Updates the dataset's loading info
343
#[utoipa::path(
4✔
344
    tag = "Datasets",
4✔
345
    put,
4✔
346
    path = "/dataset/{dataset}/loadingInfo",
4✔
347
    request_body = MetaDataDefinition,
4✔
348
    responses(
4✔
349
        (status = 200, description = "OK"),
4✔
350
        (status = 400, description = "Bad request", body = ErrorResponse),
4✔
351
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
352
    ),
4✔
353
    params(
4✔
354
        ("dataset" = DatasetName, description = "Dataset Name"),
4✔
355
    ),
4✔
356
    security(
4✔
357
        ("session_token" = [])
4✔
358
    )
4✔
359
)]
4✔
360
pub async fn update_loading_info_handler<C: ApplicationContext>(
1✔
361
    session: C::Session,
1✔
362
    app_ctx: web::Data<C>,
1✔
363
    dataset: web::Path<DatasetName>,
1✔
364
    meta_data: web::Json<MetaDataDefinition>,
1✔
365
) -> Result<HttpResponse> {
1✔
366
    let session_ctx = app_ctx.session_context(session).db();
1✔
367

1✔
368
    let real_dataset = dataset.into_inner();
1✔
369

370
    let dataset_id = session_ctx
1✔
371
        .resolve_dataset_name_to_id(&real_dataset)
1✔
372
        .await?;
1✔
373

374
    // handle the case where the dataset name is not known
375
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
376
        dataset_name: real_dataset.to_string(),
1✔
377
    })?;
1✔
378

379
    session_ctx
1✔
380
        .update_dataset_loading_info(dataset_id, &meta_data.into_inner().into())
1✔
381
        .await?;
1✔
382

383
    Ok(HttpResponse::Ok().finish())
1✔
384
}
1✔
385

386
/// Updates the dataset's symbology
387
#[utoipa::path(
4✔
388
    tag = "Datasets",
4✔
389
    put,
4✔
390
    path = "/dataset/{dataset}/symbology",
4✔
391
    request_body = Symbology,
4✔
392
    responses(
4✔
393
        (status = 200, description = "OK"),
4✔
394
        (status = 400, description = "Bad request", body = ErrorResponse),
4✔
395
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
396
    ),
4✔
397
    params(
4✔
398
        ("dataset" = DatasetName, description = "Dataset Name"),
4✔
399
    ),
4✔
400
    security(
4✔
401
        ("session_token" = [])
4✔
402
    )
4✔
403
)]
4✔
404
pub async fn update_dataset_symbology_handler<C: ApplicationContext>(
1✔
405
    session: C::Session,
1✔
406
    app_ctx: web::Data<C>,
1✔
407
    dataset: web::Path<DatasetName>,
1✔
408
    symbology: web::Json<Symbology>,
1✔
409
) -> Result<impl Responder> {
1✔
410
    let session_ctx = app_ctx.session_context(session).db();
1✔
411

1✔
412
    let real_dataset = dataset.into_inner();
1✔
413

414
    let dataset_id = session_ctx
1✔
415
        .resolve_dataset_name_to_id(&real_dataset)
1✔
416
        .await?;
1✔
417

418
    // handle the case where the dataset name is not known
419
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
420
        dataset_name: real_dataset.to_string(),
1✔
421
    })?;
1✔
422

423
    session_ctx
1✔
424
        .update_dataset_symbology(dataset_id, &symbology.into_inner())
1✔
425
        .await?;
1✔
426

427
    Ok(HttpResponse::Ok())
1✔
428
}
1✔
429

430
// Updates the dataset's provenance
431
#[utoipa::path(
4✔
432
    tag = "Datasets",
4✔
433
    put,
4✔
434
    path = "/dataset/{dataset}/provenance",
4✔
435
    request_body = Provenances,
4✔
436
    responses(
4✔
437
        (status = 200, description = "OK"),
4✔
438
        (status = 400, description = "Bad request", body = ErrorResponse),
4✔
439
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
440
    ),
4✔
441
    params(
4✔
442
        ("dataset" = DatasetName, description = "Dataset Name"),
4✔
443
    ),
4✔
444
    security(
4✔
445
        ("session_token" = [])
4✔
446
    )
4✔
447
)]
4✔
448
pub async fn update_dataset_provenance_handler<C: ApplicationContext>(
1✔
449
    session: C::Session,
1✔
450
    app_ctx: web::Data<C>,
1✔
451
    dataset: web::Path<DatasetName>,
1✔
452
    provenance: ValidatedJson<Provenances>,
1✔
453
) -> Result<HttpResponseBuilder> {
1✔
454
    let session_ctx = app_ctx.session_context(session).db();
1✔
455

1✔
456
    let real_dataset = dataset.into_inner();
1✔
457

458
    let dataset_id = session_ctx
1✔
459
        .resolve_dataset_name_to_id(&real_dataset)
1✔
460
        .await?;
1✔
461

462
    // handle the case where the dataset name is not known
463
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
1✔
464
        dataset_name: real_dataset.to_string(),
1✔
465
    })?;
1✔
466

467
    let provenance = provenance
1✔
468
        .into_inner()
1✔
469
        .provenances
1✔
470
        .into_iter()
1✔
471
        .map(Into::into)
1✔
472
        .collect::<Vec<_>>();
1✔
473

1✔
474
    session_ctx
1✔
475
        .update_dataset_provenance(dataset_id, &provenance)
1✔
476
        .await?;
1✔
477

478
    Ok(HttpResponse::Ok())
1✔
479
}
1✔
480

481
pub async fn create_upload_dataset<C: ApplicationContext>(
2✔
482
    session: C::Session,
2✔
483
    app_ctx: web::Data<C>,
2✔
484
    upload_id: UploadId,
2✔
485
    mut definition: DatasetDefinition,
2✔
486
) -> Result<web::Json<DatasetNameResponse>, CreateDatasetError> {
2✔
487
    let db = app_ctx.session_context(session).db();
2✔
488
    let upload = db.load_upload(upload_id).await.context(UploadNotFound)?;
2✔
489

490
    add_tag(&mut definition.properties, "upload".to_owned());
2✔
491

2✔
492
    adjust_meta_data_path(&mut definition.meta_data, &upload)
2✔
493
        .context(CannotResolveUploadFilePath)?;
2✔
494

495
    let result = db
2✔
496
        .add_dataset(definition.properties.into(), definition.meta_data.into())
2✔
497
        .await
2✔
498
        .context(CannotCreateDataset)?;
2✔
499

500
    Ok(web::Json(result.name.into()))
2✔
501
}
2✔
502

503
pub fn adjust_meta_data_path<A: AdjustFilePath>(
6✔
504
    meta: &mut MetaDataDefinition,
6✔
505
    adjust: &A,
6✔
506
) -> Result<()> {
6✔
507
    match meta {
6✔
508
        MetaDataDefinition::MockMetaData(_) => {}
×
509
        MetaDataDefinition::OgrMetaData(m) => {
2✔
510
            m.loading_info.file_name = adjust.adjust_file_path(&m.loading_info.file_name)?;
2✔
511
        }
512
        MetaDataDefinition::GdalMetaDataRegular(m) => {
4✔
513
            m.params.file_path = adjust.adjust_file_path(&m.params.file_path)?;
4✔
514
        }
515
        MetaDataDefinition::GdalStatic(m) => {
×
516
            m.params.file_path = adjust.adjust_file_path(&m.params.file_path)?;
×
517
        }
518
        MetaDataDefinition::GdalMetadataNetCdfCf(m) => {
×
519
            m.params.file_path = adjust.adjust_file_path(&m.params.file_path)?;
×
520
        }
521
        MetaDataDefinition::GdalMetaDataList(m) => {
×
522
            for p in &mut m.params {
×
523
                if let Some(ref mut params) = p.params {
×
524
                    params.file_path = adjust.adjust_file_path(&params.file_path)?;
×
525
                }
×
526
            }
527
        }
528
    }
529
    Ok(())
6✔
530
}
6✔
531

532
/// Add the upload tag to the dataset properties.
533
/// If the tag already exists, it will not be added again.
534
pub fn add_tag(properties: &mut AddDataset, tag: String) {
2✔
535
    if let Some(ref mut tags) = properties.tags {
2✔
536
        if !tags.contains(&tag) {
×
537
            tags.push(tag);
×
538
        }
×
539
    } else {
2✔
540
        properties.tags = Some(vec![tag]);
2✔
541
    }
2✔
542
}
2✔
543

544
/// Creates a new dataset using previously uploaded files.
545
/// The format of the files will be automatically detected when possible.
546
#[utoipa::path(
4✔
547
    tag = "Datasets",
4✔
548
    post,
4✔
549
    path = "/dataset/auto",
4✔
550
    request_body = AutoCreateDataset,
4✔
551
    responses(
4✔
552
        (status = 200, response = DatasetNameResponse),
4✔
553
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
4✔
554
            ("Body is invalid json" = (value = json!({
4✔
555
                "error": "BodyDeserializeError",
4✔
556
                "message": "expected `,` or `}` at line 13 column 7"
4✔
557
            }))),
4✔
558
            ("Failed to read body" = (value = json!({
4✔
559
                "error": "Payload",
4✔
560
                "message": "Error that occur during reading payload: Can not decode content-encoding."
4✔
561
            }))),
4✔
562
            ("Referenced an unknown upload" = (value = json!({
4✔
563
                "error": "UnknownUploadId",
4✔
564
                "message": "Unknown upload id"
4✔
565
            }))),
4✔
566
            ("Dataset name is empty" = (value = json!({
4✔
567
                "error": "InvalidDatasetName",
4✔
568
                "message": "Invalid dataset name"
4✔
569
            }))),
4✔
570
            ("Upload filename is invalid" = (value = json!({
4✔
571
                "error": "InvalidUploadFileName",
4✔
572
                "message": "Invalid upload file name"
4✔
573
            }))),
4✔
574
            ("File does not exist" = (value = json!({
4✔
575
                "error": "GdalError",
4✔
576
                "message": "GdalError: GDAL method 'GDALOpenEx' returned a NULL pointer. Error msg: 'upload/0bdd1062-7796-4d44-a655-e548144281a6/asdf: No such file or directory'"
4✔
577
            }))),
4✔
578
            ("Dataset has no auto-importable layer" = (value = json!({
4✔
579
                "error": "DatasetHasNoAutoImportableLayer",
4✔
580
                "message": "Dataset has no auto importable layer"
4✔
581
            })))
4✔
582
        )),
4✔
583
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse),
4✔
584
        (status = 413, response = crate::api::model::responses::PayloadTooLargeResponse),
4✔
585
        (status = 415, response = crate::api::model::responses::UnsupportedMediaTypeForJsonResponse)
4✔
586
    ),
4✔
587
    security(
4✔
588
        ("session_token" = [])
4✔
589
    )
4✔
590
)]
4✔
591
pub async fn auto_create_dataset_handler<C: ApplicationContext>(
×
592
    session: C::Session,
×
593
    app_ctx: web::Data<C>,
×
594
    create: ValidatedJson<AutoCreateDataset>,
×
595
) -> Result<web::Json<DatasetNameResponse>> {
×
596
    let db = app_ctx.session_context(session).db();
×
597
    let upload = db.load_upload(create.upload).await?;
×
598

599
    let create = create.into_inner();
×
600

601
    let main_file_path = upload.id.root_path()?.join(&create.main_file);
×
602
    let meta_data = auto_detect_vector_meta_data_definition(&main_file_path, &create.layer_name)?;
×
603
    let meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
×
604

×
605
    let properties = AddDataset {
×
606
        name: None,
×
607
        display_name: create.dataset_name,
×
608
        description: create.dataset_description,
×
609
        source_operator: meta_data.source_operator_type().to_owned(),
×
610
        symbology: None,
×
611
        provenance: None,
×
612
        tags: Some(vec!["upload".to_owned(), "auto".to_owned()]),
×
613
    };
×
614

615
    let result = db.add_dataset(properties.into(), meta_data).await?;
×
616

617
    Ok(web::Json(result.name.into()))
×
618
}
×
619

620
/// Inspects an upload and suggests metadata that can be used when creating a new dataset based on it.
621
/// Tries to automatically detect the main file and layer name if not specified.
622
#[utoipa::path(
4✔
623
    tag = "Datasets",
4✔
624
    post,
4✔
625
    path = "/dataset/suggest",
4✔
626
    request_body = SuggestMetaData,
4✔
627
    responses(
4✔
628
        (status = 200, description = "OK", body = MetaDataSuggestion,
4✔
629
            example = json!({
4✔
630
                "mainFile": "germany_polygon.gpkg",
4✔
631
                "metaData": {
4✔
632
                    "type": "OgrMetaData",
4✔
633
                    "loadingInfo": {
4✔
634
                        "fileName": "upload/23c9ea9e-15d6-453b-a243-1390967a5669/germany_polygon.gpkg",
4✔
635
                        "layerName": "test_germany",
4✔
636
                        "dataType": "MultiPolygon",
4✔
637
                        "time": {
4✔
638
                            "type": "none"
4✔
639
                        },
4✔
640
                        "defaultGeometry": null,
4✔
641
                        "columns": {
4✔
642
                            "formatSpecifics": null,
4✔
643
                            "x": "",
4✔
644
                            "y": null,
4✔
645
                            "int": [],
4✔
646
                            "float": [],
4✔
647
                            "text": [],
4✔
648
                            "bool": [],
4✔
649
                            "datetime": [],
4✔
650
                            "rename": null
4✔
651
                        },
4✔
652
                        "forceOgrTimeFilter": false,
4✔
653
                        "forceOgrSpatialFilter": false,
4✔
654
                        "onError": "ignore",
4✔
655
                        "sqlQuery": null,
4✔
656
                        "attributeQuery": null
4✔
657
                    },
4✔
658
                    "resultDescriptor": {
4✔
659
                        "dataType": "MultiPolygon",
4✔
660
                        "spatialReference": "EPSG:4326",
4✔
661
                        "columns": {},
4✔
662
                        "time": null,
4✔
663
                        "bbox": null
4✔
664
                    }
4✔
665
                }
4✔
666
            })
4✔
667
        ),
4✔
668
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
4✔
669
            ("Missing field in query string" = (value = json!({
4✔
670
                "error": "UnableToParseQueryString",
4✔
671
                "message": "Unable to parse query string: missing field `offset`"
4✔
672
            }))),
4✔
673
            ("Number in query string contains letters" = (value = json!({
4✔
674
                "error": "UnableToParseQueryString",
4✔
675
                "message": "Unable to parse query string: invalid digit found in string"
4✔
676
            }))),
4✔
677
            ("Referenced an unknown upload" = (value = json!({
4✔
678
                "error": "UnknownUploadId",
4✔
679
                "message": "Unknown upload id"
4✔
680
            }))),
4✔
681
            ("No suitable mainfile found" = (value = json!({
4✔
682
                "error": "NoMainFileCandidateFound",
4✔
683
                "message": "No main file candidate found"
4✔
684
            }))),
4✔
685
            ("File does not exist" = (value = json!({
4✔
686
                "error": "GdalError",
4✔
687
                "message": "GdalError: GDAL method 'GDALOpenEx' returned a NULL pointer. Error msg: 'upload/0bdd1062-7796-4d44-a655-e548144281a6/asdf: No such file or directory'"
4✔
688
            }))),
4✔
689
            ("Dataset has no auto-importable layer" = (value = json!({
4✔
690
                "error": "DatasetHasNoAutoImportableLayer",
4✔
691
                "message": "Dataset has no auto importable layer"
4✔
692
            })))
4✔
693
        )),
4✔
694
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
695
    ),
4✔
696
    security(
4✔
697
        ("session_token" = [])
4✔
698
    )
4✔
699
)]
4✔
700
pub async fn suggest_meta_data_handler<C: ApplicationContext>(
1✔
701
    session: C::Session,
1✔
702
    app_ctx: web::Data<C>,
1✔
703
    suggest: web::Json<SuggestMetaData>,
1✔
704
) -> Result<impl Responder> {
1✔
705
    let suggest = suggest.into_inner();
1✔
706

707
    let (root_path, main_file) = match suggest.data_path {
1✔
708
        DataPath::Upload(upload) => {
1✔
709
            let upload = app_ctx
1✔
710
                .session_context(session)
1✔
711
                .db()
1✔
712
                .load_upload(upload)
1✔
713
                .await?;
1✔
714

715
            let main_file = suggest
1✔
716
                .main_file
1✔
717
                .or_else(|| suggest_main_file(&upload))
1✔
718
                .ok_or(error::Error::NoMainFileCandidateFound)?;
1✔
719

720
            let root_path = upload.id.root_path()?;
1✔
721

722
            (root_path, main_file)
1✔
723
        }
724
        DataPath::Volume(volume) => {
×
725
            let main_file = suggest
×
726
                .main_file
×
727
                .ok_or(error::Error::NoMainFileCandidateFound)?;
×
728

729
            let volumes = Volumes::default();
×
730

731
            let root_path = volumes.volumes.iter().find(|v| v.name == volume).ok_or(
×
732
                crate::error::Error::UnknownVolumeName {
×
733
                    volume_name: volume.0,
×
734
                },
×
735
            )?;
×
736

737
            (root_path.path.clone(), main_file)
×
738
        }
739
    };
740

741
    let layer_name = suggest.layer_name;
1✔
742

743
    let main_file_path = path_with_base_path(&root_path, Path::new(&main_file))?;
1✔
744

745
    let dataset = gdal_open_dataset(&main_file_path)?;
1✔
746

747
    if dataset.layer_count() > 0 {
1✔
748
        let meta_data = auto_detect_vector_meta_data_definition(&main_file_path, &layer_name)?;
1✔
749

750
        let layer_name = meta_data.loading_info.layer_name.clone();
1✔
751

1✔
752
        let meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
753

1✔
754
        Ok(web::Json(MetaDataSuggestion {
1✔
755
            main_file,
1✔
756
            layer_name,
1✔
757
            meta_data: meta_data.into(),
1✔
758
        }))
1✔
759
    } else {
760
        let mut gdal_params =
×
761
            gdal_parameters_from_dataset(&dataset, 1, &main_file_path, None, None)?;
×
762
        if let Ok(relative_path) = gdal_params.file_path.strip_prefix(root_path) {
×
763
            gdal_params.file_path = relative_path.to_path_buf();
×
764
        }
×
765
        let result_descriptor = raster_descriptor_from_dataset(&dataset, 1)?;
×
766

767
        Ok(web::Json(MetaDataSuggestion {
×
768
            main_file,
×
769
            layer_name: String::new(),
×
770
            meta_data: MetaDataDefinition::GdalMetaDataList(GdalMetaDataList {
×
771
                result_descriptor: result_descriptor.into(),
×
772
                params: vec![GdalLoadingInfoTemporalSlice {
×
773
                    time: TimeInterval::default().into(),
×
774
                    params: Some(gdal_params.into()),
×
775
                    cache_ttl: CacheTtlSeconds::default().into(),
×
776
                }],
×
777
            }),
×
778
        }))
×
779
    }
780
}
1✔
781

782
fn suggest_main_file(upload: &Upload) -> Option<String> {
1✔
783
    let known_extensions = ["csv", "shp", "json", "geojson", "gpkg", "sqlite"]; // TODO: rasters
1✔
784

1✔
785
    if upload.files.len() == 1 {
1✔
786
        return Some(upload.files[0].name.clone());
1✔
787
    }
×
788

×
789
    let mut sorted_files = upload.files.clone();
×
790
    sorted_files.sort_by(|a, b| b.byte_size.cmp(&a.byte_size));
×
791

792
    for file in sorted_files {
×
793
        if known_extensions.iter().any(|ext| file.name.ends_with(ext)) {
×
794
            return Some(file.name);
×
795
        }
×
796
    }
797
    None
×
798
}
1✔
799

800
#[allow(clippy::ref_option)]
801
fn select_layer_from_dataset<'a>(
8✔
802
    dataset: &'a Dataset,
8✔
803
    layer_name: &Option<String>,
8✔
804
) -> Result<Layer<'a>> {
8✔
805
    if let Some(ref layer_name) = layer_name {
8✔
806
        dataset.layer_by_name(layer_name).map_err(|_| {
×
807
            crate::error::Error::DatasetInvalidLayerName {
×
808
                layer_name: layer_name.clone(),
×
809
            }
×
810
        })
×
811
    } else {
812
        dataset
8✔
813
            .layer(0)
8✔
814
            .map_err(|_| crate::error::Error::DatasetHasNoAutoImportableLayer)
8✔
815
    }
816
}
8✔
817

818
#[allow(clippy::ref_option)]
819
fn auto_detect_vector_meta_data_definition(
7✔
820
    main_file_path: &Path,
7✔
821
    layer_name: &Option<String>,
7✔
822
) -> Result<StaticMetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>> {
7✔
823
    let dataset = gdal_open_dataset(main_file_path)?;
7✔
824

825
    auto_detect_vector_meta_data_definition_from_dataset(&dataset, main_file_path, layer_name)
7✔
826
}
7✔
827

828
#[allow(clippy::ref_option)]
829
fn auto_detect_vector_meta_data_definition_from_dataset(
7✔
830
    dataset: &Dataset,
7✔
831
    main_file_path: &Path,
7✔
832
    layer_name: &Option<String>,
7✔
833
) -> Result<StaticMetaData<OgrSourceDataset, VectorResultDescriptor, VectorQueryRectangle>> {
7✔
834
    let layer = select_layer_from_dataset(dataset, layer_name)?;
7✔
835

836
    let columns_map = detect_columns(&layer);
7✔
837
    let columns_vecs = column_map_to_column_vecs(&columns_map);
7✔
838

7✔
839
    let mut geometry = detect_vector_geometry(&layer);
7✔
840
    let mut x = String::new();
7✔
841
    let mut y: Option<String> = None;
7✔
842

7✔
843
    if geometry.data_type == VectorDataType::Data {
7✔
844
        // help Gdal detecting geometry
845
        if let Some(auto_detect) = gdal_autodetect(main_file_path, &columns_vecs.text) {
1✔
846
            let layer = select_layer_from_dataset(&auto_detect.dataset, layer_name)?;
1✔
847
            geometry = detect_vector_geometry(&layer);
1✔
848
            if geometry.data_type != VectorDataType::Data {
1✔
849
                x = auto_detect.x;
1✔
850
                y = auto_detect.y;
1✔
851
            }
1✔
852
        }
×
853
    }
6✔
854

855
    let time = detect_time_type(&columns_vecs);
7✔
856

7✔
857
    Ok(StaticMetaData::<_, _, VectorQueryRectangle> {
7✔
858
        loading_info: OgrSourceDataset {
7✔
859
            file_name: main_file_path.into(),
7✔
860
            layer_name: geometry.layer_name.unwrap_or_else(|| layer.name()),
7✔
861
            data_type: Some(geometry.data_type),
7✔
862
            time,
7✔
863
            default_geometry: None,
7✔
864
            columns: Some(OgrSourceColumnSpec {
7✔
865
                format_specifics: None,
7✔
866
                x,
7✔
867
                y,
7✔
868
                int: columns_vecs.int,
7✔
869
                float: columns_vecs.float,
7✔
870
                text: columns_vecs.text,
7✔
871
                bool: vec![],
7✔
872
                datetime: columns_vecs.date,
7✔
873
                rename: None,
7✔
874
            }),
7✔
875
            force_ogr_time_filter: false,
7✔
876
            force_ogr_spatial_filter: false,
7✔
877
            on_error: OgrSourceErrorSpec::Ignore,
7✔
878
            sql_query: None,
7✔
879
            attribute_query: None,
7✔
880
            cache_ttl: CacheTtlSeconds::default(),
7✔
881
        },
7✔
882
        result_descriptor: VectorResultDescriptor {
7✔
883
            data_type: geometry.data_type,
7✔
884
            spatial_reference: geometry.spatial_reference,
7✔
885
            columns: columns_map
7✔
886
                .into_iter()
7✔
887
                .filter_map(|(k, v)| {
18✔
888
                    v.try_into()
18✔
889
                        .map(|v| {
18✔
890
                            (
18✔
891
                                k,
18✔
892
                                VectorColumnInfo {
18✔
893
                                    data_type: v,
18✔
894
                                    measurement: Measurement::Unitless,
18✔
895
                                },
18✔
896
                            )
18✔
897
                        })
18✔
898
                        .ok()
18✔
899
                }) // ignore all columns here that don't have a corresponding type in our collections
18✔
900
                .collect(),
7✔
901
            time: None,
7✔
902
            bbox: None,
7✔
903
        },
7✔
904
        phantom: Default::default(),
7✔
905
    })
7✔
906
}
7✔
907

908
/// create Gdal dataset with autodetect parameters based on available columns
909
fn gdal_autodetect(path: &Path, columns: &[String]) -> Option<GdalAutoDetect> {
1✔
910
    let columns_lower = columns.iter().map(|s| s.to_lowercase()).collect::<Vec<_>>();
3✔
911

1✔
912
    // TODO: load candidates from config
1✔
913
    let xy = [("x", "y"), ("lon", "lat"), ("longitude", "latitude")];
1✔
914

915
    for (x, y) in xy {
3✔
916
        let mut found_x = None;
3✔
917
        let mut found_y = None;
3✔
918

919
        for (column_lower, column) in columns_lower.iter().zip(columns) {
8✔
920
            if x == column_lower {
8✔
921
                found_x = Some(column);
1✔
922
            }
7✔
923

924
            if y == column_lower {
8✔
925
                found_y = Some(column);
1✔
926
            }
7✔
927

928
            if let (Some(x), Some(y)) = (found_x, found_y) {
8✔
929
                let mut dataset_options = DatasetOptions::default();
1✔
930

1✔
931
                let open_opts = &[
1✔
932
                    &format!("X_POSSIBLE_NAMES={x}"),
1✔
933
                    &format!("Y_POSSIBLE_NAMES={y}"),
1✔
934
                    "AUTODETECT_TYPE=YES",
1✔
935
                ];
1✔
936

1✔
937
                dataset_options.open_options = Some(open_opts);
1✔
938

1✔
939
                return gdal_open_dataset_ex(path, dataset_options)
1✔
940
                    .ok()
1✔
941
                    .map(|dataset| GdalAutoDetect {
1✔
942
                        dataset,
1✔
943
                        x: x.clone(),
1✔
944
                        y: Some(y.clone()),
1✔
945
                    });
1✔
946
            }
7✔
947
        }
948
    }
949

950
    // TODO: load candidates from config
951
    let geoms = ["geom", "wkt"];
×
952
    for geom in geoms {
×
953
        for (column_lower, column) in columns_lower.iter().zip(columns) {
×
954
            if geom == column_lower {
×
955
                let mut dataset_options = DatasetOptions::default();
×
956

×
957
                let open_opts = &[
×
958
                    &format!("GEOM_POSSIBLE_NAMES={column}"),
×
959
                    "AUTODETECT_TYPE=YES",
×
960
                ];
×
961

×
962
                dataset_options.open_options = Some(open_opts);
×
963

×
964
                return gdal_open_dataset_ex(path, dataset_options)
×
965
                    .ok()
×
966
                    .map(|dataset| GdalAutoDetect {
×
967
                        dataset,
×
968
                        x: geom.to_owned(),
×
969
                        y: None,
×
970
                    });
×
971
            }
×
972
        }
973
    }
974

975
    None
×
976
}
1✔
977

978
fn detect_time_type(columns: &Columns) -> OgrSourceDatasetTimeType {
7✔
979
    // TODO: load candidate names from config
7✔
980
    let known_start = [
7✔
981
        "start",
7✔
982
        "time",
7✔
983
        "begin",
7✔
984
        "date",
7✔
985
        "time_start",
7✔
986
        "start time",
7✔
987
        "date_start",
7✔
988
        "start date",
7✔
989
        "datetime",
7✔
990
        "date_time",
7✔
991
        "date time",
7✔
992
        "event",
7✔
993
        "timestamp",
7✔
994
        "time_from",
7✔
995
        "t1",
7✔
996
        "t",
7✔
997
    ];
7✔
998
    let known_end = [
7✔
999
        "end",
7✔
1000
        "stop",
7✔
1001
        "time2",
7✔
1002
        "date2",
7✔
1003
        "time_end",
7✔
1004
        "time_stop",
7✔
1005
        "time end",
7✔
1006
        "time stop",
7✔
1007
        "end time",
7✔
1008
        "stop time",
7✔
1009
        "date_end",
7✔
1010
        "date_stop",
7✔
1011
        "date end",
7✔
1012
        "date stop",
7✔
1013
        "end date",
7✔
1014
        "stop date",
7✔
1015
        "time_to",
7✔
1016
        "t2",
7✔
1017
    ];
7✔
1018
    let known_duration = ["duration", "length", "valid for", "valid_for"];
7✔
1019

7✔
1020
    let mut start = None;
7✔
1021
    let mut end = None;
7✔
1022
    for column in &columns.date {
11✔
1023
        if known_start.contains(&column.as_ref()) && start.is_none() {
7✔
1024
            start = Some(column);
4✔
1025
        } else if known_end.contains(&column.as_ref()) && end.is_none() {
4✔
1026
            end = Some(column);
3✔
1027
        }
3✔
1028

1029
        if start.is_some() && end.is_some() {
7✔
1030
            break;
3✔
1031
        }
4✔
1032
    }
1033

1034
    let duration = columns
7✔
1035
        .int
7✔
1036
        .iter()
7✔
1037
        .find(|c| known_duration.contains(&c.as_ref()));
7✔
1038

7✔
1039
    match (start, end, duration) {
7✔
1040
        (Some(start), Some(end), _) => OgrSourceDatasetTimeType::StartEnd {
3✔
1041
            start_field: start.clone(),
3✔
1042
            start_format: OgrSourceTimeFormat::Auto,
3✔
1043
            end_field: end.clone(),
3✔
1044
            end_format: OgrSourceTimeFormat::Auto,
3✔
1045
        },
3✔
1046
        (Some(start), None, Some(duration)) => OgrSourceDatasetTimeType::StartDuration {
1✔
1047
            start_field: start.clone(),
1✔
1048
            start_format: OgrSourceTimeFormat::Auto,
1✔
1049
            duration_field: duration.clone(),
1✔
1050
        },
1✔
1051
        (Some(start), None, None) => OgrSourceDatasetTimeType::Start {
×
1052
            start_field: start.clone(),
×
1053
            start_format: OgrSourceTimeFormat::Auto,
×
1054
            duration: OgrSourceDurationSpec::Zero,
×
1055
        },
×
1056
        _ => OgrSourceDatasetTimeType::None,
3✔
1057
    }
1058
}
7✔
1059

1060
fn detect_vector_geometry(layer: &Layer) -> DetectedGeometry {
8✔
1061
    for g in layer.defn().geom_fields() {
8✔
1062
        if let Ok(data_type) = VectorDataType::try_from_ogr_type_code(g.field_type()) {
7✔
1063
            return DetectedGeometry {
7✔
1064
                layer_name: Some(layer.name()),
7✔
1065
                data_type,
7✔
1066
                spatial_reference: g
7✔
1067
                    .spatial_ref()
7✔
1068
                    .context(error::Gdal)
7✔
1069
                    .and_then(|s| {
7✔
1070
                        let s: Result<SpatialReference> = s.try_into().map_err(Into::into);
6✔
1071
                        s
6✔
1072
                    })
7✔
1073
                    .map(Into::into)
7✔
1074
                    .unwrap_or(SpatialReferenceOption::Unreferenced),
7✔
1075
            };
7✔
1076
        }
×
1077
    }
1078

1079
    // fallback type if no geometry was found
1080
    DetectedGeometry {
1✔
1081
        layer_name: Some(layer.name()),
1✔
1082
        data_type: VectorDataType::Data,
1✔
1083
        spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1084
    }
1✔
1085
}
8✔
1086

1087
struct GdalAutoDetect {
1088
    dataset: Dataset,
1089
    x: String,
1090
    y: Option<String>,
1091
}
1092

1093
struct DetectedGeometry {
1094
    layer_name: Option<String>,
1095
    data_type: VectorDataType,
1096
    spatial_reference: SpatialReferenceOption,
1097
}
1098

1099
struct Columns {
1100
    int: Vec<String>,
1101
    float: Vec<String>,
1102
    text: Vec<String>,
1103
    date: Vec<String>,
1104
}
1105

1106
enum ColumnDataType {
1107
    Int,
1108
    Float,
1109
    Text,
1110
    Date,
1111
    Unknown,
1112
}
1113

1114
impl TryFrom<ColumnDataType> for FeatureDataType {
1115
    type Error = error::Error;
1116

1117
    fn try_from(value: ColumnDataType) -> Result<Self, Self::Error> {
18✔
1118
        match value {
18✔
1119
            ColumnDataType::Int => Ok(Self::Int),
3✔
1120
            ColumnDataType::Float => Ok(Self::Float),
1✔
1121
            ColumnDataType::Text => Ok(Self::Text),
7✔
1122
            ColumnDataType::Date => Ok(Self::DateTime),
7✔
1123
            ColumnDataType::Unknown => Err(error::Error::NoFeatureDataTypeForColumnDataType),
×
1124
        }
1125
    }
18✔
1126
}
1127

1128
impl TryFrom<ColumnDataType> for crate::api::model::datatypes::FeatureDataType {
1129
    type Error = error::Error;
1130

1131
    fn try_from(value: ColumnDataType) -> Result<Self, Self::Error> {
×
1132
        match value {
×
1133
            ColumnDataType::Int => Ok(Self::Int),
×
1134
            ColumnDataType::Float => Ok(Self::Float),
×
1135
            ColumnDataType::Text => Ok(Self::Text),
×
1136
            ColumnDataType::Date => Ok(Self::DateTime),
×
1137
            ColumnDataType::Unknown => Err(error::Error::NoFeatureDataTypeForColumnDataType),
×
1138
        }
1139
    }
×
1140
}
1141

1142
fn detect_columns(layer: &Layer) -> HashMap<String, ColumnDataType> {
7✔
1143
    let mut columns = HashMap::default();
7✔
1144

1145
    for field in layer.defn().fields() {
18✔
1146
        let field_type = field.field_type();
18✔
1147

1148
        let data_type = match field_type {
18✔
1149
            OGRFieldType::OFTInteger | OGRFieldType::OFTInteger64 => ColumnDataType::Int,
3✔
1150
            OGRFieldType::OFTReal => ColumnDataType::Float,
1✔
1151
            OGRFieldType::OFTString => ColumnDataType::Text,
7✔
1152
            OGRFieldType::OFTDate | OGRFieldType::OFTDateTime => ColumnDataType::Date,
7✔
1153
            _ => ColumnDataType::Unknown,
×
1154
        };
1155

1156
        columns.insert(field.name(), data_type);
18✔
1157
    }
1158

1159
    columns
7✔
1160
}
7✔
1161

1162
fn column_map_to_column_vecs(columns: &HashMap<String, ColumnDataType>) -> Columns {
7✔
1163
    let mut int = Vec::new();
7✔
1164
    let mut float = Vec::new();
7✔
1165
    let mut text = Vec::new();
7✔
1166
    let mut date = Vec::new();
7✔
1167

1168
    for (k, v) in columns {
25✔
1169
        match v {
18✔
1170
            ColumnDataType::Int => int.push(k.clone()),
3✔
1171
            ColumnDataType::Float => float.push(k.clone()),
1✔
1172
            ColumnDataType::Text => text.push(k.clone()),
7✔
1173
            ColumnDataType::Date => date.push(k.clone()),
7✔
1174
            ColumnDataType::Unknown => {}
×
1175
        }
1176
    }
1177

1178
    Columns {
7✔
1179
        int,
7✔
1180
        float,
7✔
1181
        text,
7✔
1182
        date,
7✔
1183
    }
7✔
1184
}
7✔
1185

1186
/// Delete a dataset
1187
#[utoipa::path(
4✔
1188
    tag = "Datasets",
4✔
1189
    delete,
4✔
1190
    path = "/dataset/{dataset}",
4✔
1191
    responses(
4✔
1192
        (status = 200, description = "OK"),
4✔
1193
        (status = 400, description = "Bad request", body = ErrorResponse, examples(
4✔
1194
            ("Referenced an unknown dataset" = (value = json!({
4✔
1195
                "error": "UnknownDatasetName",
4✔
1196
                "message": "Unknown dataset name"
4✔
1197
            }))),
4✔
1198
            ("Given dataset can only be deleted by owner" = (value = json!({
4✔
1199
                "error": "OperationRequiresOwnerPermission",
4✔
1200
                "message": "Operation requires owner permission"
4✔
1201
            })))
4✔
1202
        )),
4✔
1203
        (status = 401, response = crate::api::model::responses::UnauthorizedUserResponse)
4✔
1204
    ),
4✔
1205
    params(
4✔
1206
        ("dataset" = DatasetName, description = "Dataset id")
4✔
1207
    ),
4✔
1208
    security(
4✔
1209
        ("session_token" = [])
4✔
1210
    )
4✔
1211
)]
4✔
1212
pub async fn delete_dataset_handler<C: ApplicationContext>(
3✔
1213
    dataset: web::Path<DatasetName>,
3✔
1214
    session: C::Session,
3✔
1215
    app_ctx: web::Data<C>,
3✔
1216
) -> Result<HttpResponse> {
3✔
1217
    let session_ctx = app_ctx.session_context(session).db();
3✔
1218

3✔
1219
    let real_dataset = dataset.into_inner();
3✔
1220

1221
    let dataset_id = session_ctx
3✔
1222
        .resolve_dataset_name_to_id(&real_dataset)
3✔
1223
        .await?;
3✔
1224

1225
    // handle the case where the dataset name is not known
1226
    let dataset_id = dataset_id.ok_or(error::Error::UnknownDatasetName {
3✔
1227
        dataset_name: real_dataset.to_string(),
3✔
1228
    })?;
3✔
1229

1230
    session_ctx.delete_dataset(dataset_id).await?;
3✔
1231

1232
    Ok(actix_web::HttpResponse::Ok().finish())
3✔
1233
}
3✔
1234

1235
#[derive(Deserialize, Serialize, ToSchema, ToResponse)]
2✔
1236
pub struct VolumeFileLayersResponse {
1237
    layers: Vec<String>,
1238
}
1239

1240
/// List the layers of a file in a volume.
1241
#[utoipa::path(
4✔
1242
    tag = "Datasets",
4✔
1243
    get,
4✔
1244
    path = "/dataset/volumes/{volume_name}/files/{file_name}/layers",
4✔
1245
    responses(
4✔
1246
        (status = 200, body = VolumeFileLayersResponse,
4✔
1247
             example = json!({"layers": ["layer1", "layer2"]}))
4✔
1248
    ),
4✔
1249
    params(
4✔
1250
        ("volume_name" = VolumeName, description = "Volume name"),
4✔
1251
        ("file_name" = String, description = "File name")
4✔
1252
    ),
4✔
1253
    security(
4✔
1254
        ("session_token" = [])
4✔
1255
    )
4✔
1256
)]
4✔
1257
pub async fn list_volume_file_layers_handler<C: ApplicationContext>(
1✔
1258
    path: web::Path<(VolumeName, String)>,
1✔
1259
    session: C::Session,
1✔
1260
    app_ctx: web::Data<C>,
1✔
1261
) -> Result<impl Responder> {
1✔
1262
    let (volume_name, file_name) = path.into_inner();
1✔
1263

1✔
1264
    let session_ctx = app_ctx.session_context(session);
1✔
1265
    let volumes = session_ctx.volumes()?;
1✔
1266

1267
    let volume = volumes.iter().find(|v| v.name == volume_name.0).ok_or(
1✔
1268
        crate::error::Error::UnknownVolumeName {
1✔
1269
            volume_name: volume_name.0.clone(),
1✔
1270
        },
1✔
1271
    )?;
1✔
1272

1273
    let Some(volume_path) = volume.path.as_ref() else {
1✔
1274
        return Err(crate::error::Error::CannotAccessVolumePath {
×
1275
            volume_name: volume_name.0.clone(),
×
1276
        });
×
1277
    };
1278

1279
    let file_path = path_with_base_path(Path::new(volume_path), Path::new(&file_name))?;
1✔
1280

1281
    let layers = crate::util::spawn_blocking(move || {
1✔
1282
        let dataset = gdal_open_dataset(&file_path)?;
1✔
1283

1284
        // TODO: hide system/internal layer like "layer_styles"
1285
        Result::<_, Error>::Ok(dataset.layers().map(|l| l.name()).collect::<Vec<_>>())
3✔
1286
    })
1✔
1287
    .await??;
1✔
1288

1289
    Ok(web::Json(VolumeFileLayersResponse { layers }))
1✔
1290
}
1✔
1291

1292
/// Creates a new dataset referencing files.
1293
/// Users can reference previously uploaded files.
1294
/// Admins can reference files from a volume.
1295
#[utoipa::path(
4✔
1296
    tag = "Datasets",
4✔
1297
    post,
4✔
1298
    path = "/dataset", 
4✔
1299
    request_body = CreateDataset,
4✔
1300
    responses(
4✔
1301
        (status = 200, response = DatasetNameResponse),
4✔
1302
    ),
4✔
1303
    security(
4✔
1304
        ("session_token" = [])
4✔
1305
    )
4✔
1306
)]
4✔
1307
async fn create_dataset_handler<C: ProApplicationContext>(
6✔
1308
    session: C::Session,
6✔
1309
    app_ctx: web::Data<C>,
6✔
1310
    create: web::Json<CreateDataset>,
6✔
1311
) -> Result<web::Json<DatasetNameResponse>, CreateDatasetError>
6✔
1312
where
6✔
1313
    <<C as ApplicationContext>::SessionContext as SessionContext>::GeoEngineDB: ProGeoEngineDb,
6✔
1314
{
6✔
1315
    let create = create.into_inner();
6✔
1316
    match create {
6✔
1317
        CreateDataset {
1318
            data_path: DataPath::Volume(upload),
4✔
1319
            definition,
4✔
1320
        } => create_system_dataset(session, app_ctx, upload, definition).await,
4✔
1321
        CreateDataset {
1322
            data_path: DataPath::Upload(volume),
2✔
1323
            definition,
2✔
1324
        } => create_upload_dataset(session, app_ctx, volume, definition).await,
2✔
1325
    }
1326
}
6✔
1327

1328
async fn create_system_dataset<C: ProApplicationContext>(
4✔
1329
    session: C::Session,
4✔
1330
    app_ctx: web::Data<C>,
4✔
1331
    volume_name: VolumeName,
4✔
1332
    mut definition: DatasetDefinition,
4✔
1333
) -> Result<web::Json<DatasetNameResponse>, CreateDatasetError>
4✔
1334
where
4✔
1335
    <<C as ApplicationContext>::SessionContext as SessionContext>::GeoEngineDB: ProGeoEngineDb,
4✔
1336
{
4✔
1337
    let volumes = get_config_element::<Data>()
4✔
1338
        .context(CannotAccessConfig)?
4✔
1339
        .volumes;
1340
    let volume_path = volumes
4✔
1341
        .get(&volume_name)
4✔
1342
        .ok_or(CreateDatasetError::UnknownVolume)?;
4✔
1343
    let volume = Volume {
4✔
1344
        name: volume_name,
4✔
1345
        path: volume_path.clone(),
4✔
1346
    };
4✔
1347

4✔
1348
    adjust_meta_data_path(&mut definition.meta_data, &volume)
4✔
1349
        .context(CannotResolveUploadFilePath)?;
4✔
1350

1351
    let db = app_ctx.session_context(session).db();
4✔
1352

1353
    let dataset = db
4✔
1354
        .add_dataset(definition.properties.into(), definition.meta_data.into())
4✔
1355
        .await
4✔
1356
        .context(CannotCreateDataset)?;
4✔
1357

1358
    db.add_permission(
4✔
1359
        Role::registered_user_role_id(),
4✔
1360
        dataset.id,
4✔
1361
        Permission::Read,
4✔
1362
    )
4✔
1363
    .await
4✔
1364
    .boxed_context(crate::error::PermissionDb)
4✔
1365
    .context(DatabaseAccess)?;
4✔
1366

1367
    db.add_permission(Role::anonymous_role_id(), dataset.id, Permission::Read)
4✔
1368
        .await
4✔
1369
        .boxed_context(crate::error::PermissionDb)
4✔
1370
        .context(DatabaseAccess)?;
4✔
1371

1372
    Ok(web::Json(dataset.name.into()))
4✔
1373
}
4✔
1374

1375
#[cfg(test)]
1376
mod tests {
1377
    use super::*;
1378
    use crate::api::model::datatypes::NamedData;
1379
    use crate::api::model::responses::datasets::DatasetNameResponse;
1380
    use crate::api::model::responses::IdResponse;
1381
    use crate::api::model::services::{DatasetDefinition, Provenance};
1382
    use crate::contexts::{Session, SessionId};
1383
    use crate::datasets::storage::DatasetStore;
1384
    use crate::datasets::upload::{UploadId, VolumeName};
1385
    use crate::datasets::DatasetIdAndName;
1386
    use crate::error::Result;
1387
    use crate::pro::contexts::ProPostgresContext;
1388
    use crate::pro::ge_context;
1389
    use crate::pro::users::UserAuth;
1390
    use crate::pro::util::tests::admin_login;
1391
    use crate::projects::{PointSymbology, RasterSymbology, Symbology};
1392
    use crate::test_data;
1393
    use crate::util::tests::{
1394
        add_pro_file_definition_to_datasets, read_body_json, read_body_string, send_test_request,
1395
        MockQueryContext, SetMultipartBody, TestDataUploads,
1396
    };
1397
    use actix_web;
1398
    use actix_web::http::header;
1399
    use actix_web_httpauth::headers::authorization::Bearer;
1400
    use futures::TryStreamExt;
1401
    use geoengine_datatypes::collections::{
1402
        GeometryCollection, MultiPointCollection, VectorDataType,
1403
    };
1404
    use geoengine_datatypes::operations::image::{RasterColorizer, RgbaColor};
1405
    use geoengine_datatypes::primitives::{BoundingBox2D, ColumnSelection, SpatialResolution};
1406
    use geoengine_datatypes::raster::{GridShape2D, TilingSpecification};
1407
    use geoengine_datatypes::spatial_reference::SpatialReferenceOption;
1408
    use geoengine_operators::engine::{
1409
        ExecutionContext, InitializedVectorOperator, QueryProcessor, StaticMetaData,
1410
        VectorOperator, VectorResultDescriptor, WorkflowOperatorPath,
1411
    };
1412
    use geoengine_operators::source::{
1413
        OgrSource, OgrSourceDataset, OgrSourceErrorSpec, OgrSourceParameters,
1414
    };
1415
    use geoengine_operators::util::gdal::create_ndvi_meta_data;
1416
    use serde_json::{json, Value};
1417
    use tokio_postgres::NoTls;
1418

1419
    #[ge_context::test]
2✔
1420
    #[allow(clippy::too_many_lines)]
1421
    async fn test_list_datasets(app_ctx: ProPostgresContext<NoTls>) {
1✔
1422
        let session = admin_login(&app_ctx).await;
1✔
1423
        let ctx = app_ctx.session_context(session.clone());
1✔
1424

1✔
1425
        let descriptor = VectorResultDescriptor {
1✔
1426
            data_type: VectorDataType::MultiPoint,
1✔
1427
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
1428
            columns: Default::default(),
1✔
1429
            time: None,
1✔
1430
            bbox: None,
1✔
1431
        };
1✔
1432

1✔
1433
        let ds = AddDataset {
1✔
1434
            name: Some(DatasetName::new(None, "My_Dataset")),
1✔
1435
            display_name: "OgrDataset".to_string(),
1✔
1436
            description: "My Ogr dataset".to_string(),
1✔
1437
            source_operator: "OgrSource".to_string(),
1✔
1438
            symbology: None,
1✔
1439
            provenance: None,
1✔
1440
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
1441
        };
1✔
1442

1✔
1443
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1444
            loading_info: OgrSourceDataset {
1✔
1445
                file_name: Default::default(),
1✔
1446
                layer_name: String::new(),
1✔
1447
                data_type: None,
1✔
1448
                time: Default::default(),
1✔
1449
                default_geometry: None,
1✔
1450
                columns: None,
1✔
1451
                force_ogr_time_filter: false,
1✔
1452
                force_ogr_spatial_filter: false,
1✔
1453
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1454
                sql_query: None,
1✔
1455
                attribute_query: None,
1✔
1456
                cache_ttl: CacheTtlSeconds::default(),
1✔
1457
            },
1✔
1458
            result_descriptor: descriptor.clone(),
1✔
1459
            phantom: Default::default(),
1✔
1460
        });
1✔
1461

1✔
1462
        let db = ctx.db();
1✔
1463
        let DatasetIdAndName { id: id1, name: _ } = db.add_dataset(ds.into(), meta).await.unwrap();
1✔
1464

1✔
1465
        let ds = AddDataset {
1✔
1466
            name: Some(DatasetName::new(None, "My_Dataset2")),
1✔
1467
            display_name: "OgrDataset2".to_string(),
1✔
1468
            description: "My Ogr dataset2".to_string(),
1✔
1469
            source_operator: "OgrSource".to_string(),
1✔
1470
            symbology: Some(Symbology::Point(PointSymbology::default())),
1✔
1471
            provenance: None,
1✔
1472
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
1473
        };
1✔
1474

1✔
1475
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1476
            loading_info: OgrSourceDataset {
1✔
1477
                file_name: Default::default(),
1✔
1478
                layer_name: String::new(),
1✔
1479
                data_type: None,
1✔
1480
                time: Default::default(),
1✔
1481
                default_geometry: None,
1✔
1482
                columns: None,
1✔
1483
                force_ogr_time_filter: false,
1✔
1484
                force_ogr_spatial_filter: false,
1✔
1485
                on_error: OgrSourceErrorSpec::Ignore,
1✔
1486
                sql_query: None,
1✔
1487
                attribute_query: None,
1✔
1488
                cache_ttl: CacheTtlSeconds::default(),
1✔
1489
            },
1✔
1490
            result_descriptor: descriptor,
1✔
1491
            phantom: Default::default(),
1✔
1492
        });
1✔
1493

1494
        let DatasetIdAndName { id: id2, name: _ } = db.add_dataset(ds.into(), meta).await.unwrap();
1✔
1495

1✔
1496
        let req = actix_web::test::TestRequest::get()
1✔
1497
            .uri(&format!(
1✔
1498
                "/datasets?{}",
1✔
1499
                &serde_urlencoded::to_string([
1✔
1500
                    ("order", "NameAsc"),
1✔
1501
                    ("offset", "0"),
1✔
1502
                    ("limit", "2"),
1✔
1503
                ])
1✔
1504
                .unwrap()
1✔
1505
            ))
1✔
1506
            .append_header((header::CONTENT_LENGTH, 0))
1✔
1507
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
1508
        let res = send_test_request(req, app_ctx).await;
1✔
1509

1510
        assert_eq!(res.status(), 200);
1✔
1511

1512
        assert_eq!(
1✔
1513
            read_body_json(res).await,
1✔
1514
            json!([ {
1✔
1515
                "id": id1,
1✔
1516
                "name": "My_Dataset",
1✔
1517
                "displayName": "OgrDataset",
1✔
1518
                "description": "My Ogr dataset",
1✔
1519
                "tags": ["upload", "test"],
1✔
1520
                "sourceOperator": "OgrSource",
1✔
1521
                "resultDescriptor": {
1✔
1522
                    "type": "vector",
1✔
1523
                    "dataType": "MultiPoint",
1✔
1524
                    "spatialReference": "",
1✔
1525
                    "columns": {},
1✔
1526
                    "time": null,
1✔
1527
                    "bbox": null
1✔
1528
                },
1✔
1529
                "symbology": null
1✔
1530
            },{
1✔
1531
                "id": id2,
1✔
1532
                "name": "My_Dataset2",
1✔
1533
                "displayName": "OgrDataset2",
1✔
1534
                "description": "My Ogr dataset2",
1✔
1535
                "tags": ["upload", "test"],
1✔
1536
                "sourceOperator": "OgrSource",
1✔
1537
                "resultDescriptor": {
1✔
1538
                    "type": "vector",
1✔
1539
                    "dataType": "MultiPoint",
1✔
1540
                    "spatialReference": "",
1✔
1541
                    "columns": {},
1✔
1542
                    "time": null,
1✔
1543
                    "bbox": null
1✔
1544
                },
1✔
1545
                "symbology": {
1✔
1546
                    "type": "point",
1✔
1547
                    "radius": {
1✔
1548
                        "type": "static",
1✔
1549
                        "value": 10
1✔
1550
                    },
1✔
1551
                    "fillColor": {
1✔
1552
                        "type": "static",
1✔
1553
                        "color": [255, 255, 255, 255]
1✔
1554
                    },
1✔
1555
                    "stroke": {
1✔
1556
                        "width": {
1✔
1557
                            "type": "static",
1✔
1558
                            "value": 1
1✔
1559
                        },
1✔
1560
                        "color": {
1✔
1561
                            "type": "static",
1✔
1562
                            "color": [0, 0, 0, 255]
1✔
1563
                        }
1✔
1564
                    },
1✔
1565
                    "text": null
1✔
1566
                }
1✔
1567
            }])
1✔
1568
        );
1569
    }
1✔
1570

1571
    async fn upload_ne_10m_ports_files(
2✔
1572
        app_ctx: ProPostgresContext<NoTls>,
2✔
1573
        session_id: SessionId,
2✔
1574
    ) -> Result<UploadId> {
2✔
1575
        let files = vec![
2✔
1576
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp").to_path_buf(),
2✔
1577
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.shx").to_path_buf(),
2✔
1578
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.prj").to_path_buf(),
2✔
1579
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.dbf").to_path_buf(),
2✔
1580
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.cpg").to_path_buf(),
2✔
1581
        ];
2✔
1582

2✔
1583
        let req = actix_web::test::TestRequest::post()
2✔
1584
            .uri("/upload")
2✔
1585
            .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string())))
2✔
1586
            .set_multipart_files(&files);
2✔
1587
        let res = send_test_request(req, app_ctx).await;
2✔
1588
        assert_eq!(res.status(), 200);
2✔
1589

1590
        let upload: IdResponse<UploadId> = actix_web::test::read_body_json(res).await;
2✔
1591
        let root = upload.id.root_path()?;
2✔
1592

1593
        for file in files {
12✔
1594
            let file_name = file.file_name().unwrap();
10✔
1595
            assert!(root.join(file_name).exists());
10✔
1596
        }
1597

1598
        Ok(upload.id)
2✔
1599
    }
2✔
1600

1601
    pub async fn construct_dataset_from_upload(
2✔
1602
        app_ctx: ProPostgresContext<NoTls>,
2✔
1603
        upload_id: UploadId,
2✔
1604
        session_id: SessionId,
2✔
1605
    ) -> DatasetName {
2✔
1606
        let s = json!({
2✔
1607
            "dataPath": {
2✔
1608
                "upload": upload_id
2✔
1609
            },
2✔
1610
            "definition": {
2✔
1611
                "properties": {
2✔
1612
                    "name": null,
2✔
1613
                    "displayName": "Uploaded Natural Earth 10m Ports",
2✔
1614
                    "description": "Ports from Natural Earth",
2✔
1615
                    "sourceOperator": "OgrSource"
2✔
1616
                },
2✔
1617
                "metaData": {
2✔
1618
                    "type": "OgrMetaData",
2✔
1619
                    "loadingInfo": {
2✔
1620
                        "fileName": "ne_10m_ports.shp",
2✔
1621
                        "layerName": "ne_10m_ports",
2✔
1622
                        "dataType": "MultiPoint",
2✔
1623
                        "time": {
2✔
1624
                            "type": "none"
2✔
1625
                        },
2✔
1626
                        "columns": {
2✔
1627
                            "x": "",
2✔
1628
                            "y": null,
2✔
1629
                            "float": ["natlscale"],
2✔
1630
                            "int": ["scalerank"],
2✔
1631
                            "text": ["featurecla", "name", "website"],
2✔
1632
                            "bool": [],
2✔
1633
                            "datetime": []
2✔
1634
                        },
2✔
1635
                        "forceOgrTimeGilter": false,
2✔
1636
                        "onError": "ignore",
2✔
1637
                        "provenance": null
2✔
1638
                    },
2✔
1639
                    "resultDescriptor": {
2✔
1640
                        "dataType": "MultiPoint",
2✔
1641
                        "spatialReference": "EPSG:4326",
2✔
1642
                        "columns": {
2✔
1643
                            "website": {
2✔
1644
                                "dataType": "text",
2✔
1645
                                "measurement": {
2✔
1646
                                    "type": "unitless"
2✔
1647
                                }
2✔
1648
                            },
2✔
1649
                            "name": {
2✔
1650
                                "dataType": "text",
2✔
1651
                                "measurement": {
2✔
1652
                                    "type": "unitless"
2✔
1653
                                }
2✔
1654
                            },
2✔
1655
                            "natlscale": {
2✔
1656
                                "dataType": "float",
2✔
1657
                                "measurement": {
2✔
1658
                                    "type": "unitless"
2✔
1659
                                }
2✔
1660
                            },
2✔
1661
                            "scalerank": {
2✔
1662
                                "dataType": "int",
2✔
1663
                                "measurement": {
2✔
1664
                                    "type": "unitless"
2✔
1665
                                }
2✔
1666
                            },
2✔
1667
                            "featurecla": {
2✔
1668
                                "dataType": "text",
2✔
1669
                                "measurement": {
2✔
1670
                                    "type": "unitless"
2✔
1671
                                }
2✔
1672
                            }
2✔
1673
                        }
2✔
1674
                    }
2✔
1675
                }
2✔
1676
            }
2✔
1677
        });
2✔
1678

2✔
1679
        let req = actix_web::test::TestRequest::post()
2✔
1680
            .uri("/dataset")
2✔
1681
            .append_header((header::CONTENT_LENGTH, 0))
2✔
1682
            .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string())))
2✔
1683
            .set_json(s);
2✔
1684
        let res = send_test_request(req, app_ctx).await;
2✔
1685
        assert_eq!(res.status(), 200, "response: {res:?}");
2✔
1686

1687
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
2✔
1688
        dataset_name
2✔
1689
    }
2✔
1690

1691
    async fn make_ogr_source<C: ExecutionContext>(
1✔
1692
        exe_ctx: &C,
1✔
1693
        named_data: NamedData,
1✔
1694
    ) -> Result<Box<dyn InitializedVectorOperator>> {
1✔
1695
        OgrSource {
1✔
1696
            params: OgrSourceParameters {
1✔
1697
                data: named_data.into(),
1✔
1698
                attribute_projection: None,
1✔
1699
                attribute_filters: None,
1✔
1700
            },
1✔
1701
        }
1✔
1702
        .boxed()
1✔
1703
        .initialize(WorkflowOperatorPath::initialize_root(), exe_ctx)
1✔
1704
        .await
1✔
1705
        .map_err(Into::into)
1✔
1706
    }
1✔
1707

1708
    #[ge_context::test]
2✔
1709
    async fn it_creates_system_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
1710
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
1711

1✔
1712
        let volume = VolumeName("test_data".to_string());
1✔
1713

1✔
1714
        let mut meta_data = create_ndvi_meta_data();
1✔
1715

1✔
1716
        // make path relative to volume
1✔
1717
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
1718

1✔
1719
        let create = CreateDataset {
1✔
1720
            data_path: DataPath::Volume(volume.clone()),
1✔
1721
            definition: DatasetDefinition {
1✔
1722
                properties: AddDataset {
1✔
1723
                    name: None,
1✔
1724
                    display_name: "ndvi".to_string(),
1✔
1725
                    description: "ndvi".to_string(),
1✔
1726
                    source_operator: "GdalSource".to_string(),
1✔
1727
                    symbology: None,
1✔
1728
                    provenance: None,
1✔
1729
                    tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
1730
                },
1✔
1731
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
1732
            },
1✔
1733
        };
1✔
1734

1735
        // create via admin session
1736
        let req = actix_web::test::TestRequest::post()
1✔
1737
            .uri("/dataset")
1✔
1738
            .append_header((header::CONTENT_LENGTH, 0))
1✔
1739
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
1740
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
1741
            .set_payload(serde_json::to_string(&create)?);
1✔
1742
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
1743
        assert_eq!(res.status(), 200);
1✔
1744

1745
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
1746

1747
        // assert dataset is accessible via regular session
1748
        let req = actix_web::test::TestRequest::get()
1✔
1749
            .uri(&format!("/dataset/{dataset_name}"))
1✔
1750
            .append_header((header::CONTENT_LENGTH, 0))
1✔
1751
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
1752
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
1753
            .set_payload(serde_json::to_string(&create)?);
1✔
1754

1755
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
1756
        assert_eq!(res.status(), 200);
1✔
1757

1758
        Ok(())
1✔
1759
    }
1✔
1760

1761
    #[test]
1762
    fn it_auto_detects() {
1✔
1763
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
1764
            test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp"),
1✔
1765
            &None,
1✔
1766
        )
1✔
1767
        .unwrap();
1✔
1768
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
1769

1770
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
1771
        {
1772
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
1773
                columns.text.sort();
1✔
1774
            }
1✔
1775
        }
×
1776

1777
        assert_eq!(
1✔
1778
            meta_data,
1✔
1779
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1780
                loading_info: OgrSourceDataset {
1✔
1781
                    file_name: test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp").into(),
1✔
1782
                    layer_name: "ne_10m_ports".to_string(),
1✔
1783
                    data_type: Some(VectorDataType::MultiPoint),
1✔
1784
                    time: OgrSourceDatasetTimeType::None,
1✔
1785
                    default_geometry: None,
1✔
1786
                    columns: Some(OgrSourceColumnSpec {
1✔
1787
                        format_specifics: None,
1✔
1788
                        x: String::new(),
1✔
1789
                        y: None,
1✔
1790
                        int: vec!["scalerank".to_string()],
1✔
1791
                        float: vec!["natlscale".to_string()],
1✔
1792
                        text: vec![
1✔
1793
                            "featurecla".to_string(),
1✔
1794
                            "name".to_string(),
1✔
1795
                            "website".to_string(),
1✔
1796
                        ],
1✔
1797
                        bool: vec![],
1✔
1798
                        datetime: vec![],
1✔
1799
                        rename: None,
1✔
1800
                    }),
1✔
1801
                    force_ogr_time_filter: false,
1✔
1802
                    force_ogr_spatial_filter: false,
1✔
1803
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1804
                    sql_query: None,
1✔
1805
                    attribute_query: None,
1✔
1806
                    cache_ttl: CacheTtlSeconds::default()
1✔
1807
                },
1✔
1808
                result_descriptor: VectorResultDescriptor {
1✔
1809
                    data_type: VectorDataType::MultiPoint,
1✔
1810
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1811
                    columns: [
1✔
1812
                        (
1✔
1813
                            "name".to_string(),
1✔
1814
                            VectorColumnInfo {
1✔
1815
                                data_type: FeatureDataType::Text,
1✔
1816
                                measurement: Measurement::Unitless
1✔
1817
                            }
1✔
1818
                        ),
1✔
1819
                        (
1✔
1820
                            "scalerank".to_string(),
1✔
1821
                            VectorColumnInfo {
1✔
1822
                                data_type: FeatureDataType::Int,
1✔
1823
                                measurement: Measurement::Unitless
1✔
1824
                            }
1✔
1825
                        ),
1✔
1826
                        (
1✔
1827
                            "website".to_string(),
1✔
1828
                            VectorColumnInfo {
1✔
1829
                                data_type: FeatureDataType::Text,
1✔
1830
                                measurement: Measurement::Unitless
1✔
1831
                            }
1✔
1832
                        ),
1✔
1833
                        (
1✔
1834
                            "natlscale".to_string(),
1✔
1835
                            VectorColumnInfo {
1✔
1836
                                data_type: FeatureDataType::Float,
1✔
1837
                                measurement: Measurement::Unitless
1✔
1838
                            }
1✔
1839
                        ),
1✔
1840
                        (
1✔
1841
                            "featurecla".to_string(),
1✔
1842
                            VectorColumnInfo {
1✔
1843
                                data_type: FeatureDataType::Text,
1✔
1844
                                measurement: Measurement::Unitless
1✔
1845
                            }
1✔
1846
                        ),
1✔
1847
                    ]
1✔
1848
                    .iter()
1✔
1849
                    .cloned()
1✔
1850
                    .collect(),
1✔
1851
                    time: None,
1✔
1852
                    bbox: None,
1✔
1853
                },
1✔
1854
                phantom: Default::default(),
1✔
1855
            })
1✔
1856
        );
1✔
1857
    }
1✔
1858

1859
    #[test]
1860
    fn it_detects_time_json() {
1✔
1861
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
1862
            test_data!("vector/data/points_with_iso_time.json"),
1✔
1863
            &None,
1✔
1864
        )
1✔
1865
        .unwrap();
1✔
1866

1✔
1867
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
1868

1869
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
1870
        {
1871
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
1872
                columns.datetime.sort();
1✔
1873
            }
1✔
1874
        }
×
1875

1876
        assert_eq!(
1✔
1877
            meta_data,
1✔
1878
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1879
                loading_info: OgrSourceDataset {
1✔
1880
                    file_name: test_data!("vector/data/points_with_iso_time.json").into(),
1✔
1881
                    layer_name: "points_with_iso_time".to_string(),
1✔
1882
                    data_type: Some(VectorDataType::MultiPoint),
1✔
1883
                    time: OgrSourceDatasetTimeType::StartEnd {
1✔
1884
                        start_field: "time_start".to_owned(),
1✔
1885
                        start_format: OgrSourceTimeFormat::Auto,
1✔
1886
                        end_field: "time_end".to_owned(),
1✔
1887
                        end_format: OgrSourceTimeFormat::Auto,
1✔
1888
                    },
1✔
1889
                    default_geometry: None,
1✔
1890
                    columns: Some(OgrSourceColumnSpec {
1✔
1891
                        format_specifics: None,
1✔
1892
                        x: String::new(),
1✔
1893
                        y: None,
1✔
1894
                        float: vec![],
1✔
1895
                        int: vec![],
1✔
1896
                        text: vec![],
1✔
1897
                        bool: vec![],
1✔
1898
                        datetime: vec!["time_end".to_owned(), "time_start".to_owned()],
1✔
1899
                        rename: None,
1✔
1900
                    }),
1✔
1901
                    force_ogr_time_filter: false,
1✔
1902
                    force_ogr_spatial_filter: false,
1✔
1903
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1904
                    sql_query: None,
1✔
1905
                    attribute_query: None,
1✔
1906
                    cache_ttl: CacheTtlSeconds::default()
1✔
1907
                },
1✔
1908
                result_descriptor: VectorResultDescriptor {
1✔
1909
                    data_type: VectorDataType::MultiPoint,
1✔
1910
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1911
                    columns: [
1✔
1912
                        (
1✔
1913
                            "time_start".to_owned(),
1✔
1914
                            VectorColumnInfo {
1✔
1915
                                data_type: FeatureDataType::DateTime,
1✔
1916
                                measurement: Measurement::Unitless
1✔
1917
                            }
1✔
1918
                        ),
1✔
1919
                        (
1✔
1920
                            "time_end".to_owned(),
1✔
1921
                            VectorColumnInfo {
1✔
1922
                                data_type: FeatureDataType::DateTime,
1✔
1923
                                measurement: Measurement::Unitless
1✔
1924
                            }
1✔
1925
                        )
1✔
1926
                    ]
1✔
1927
                    .iter()
1✔
1928
                    .cloned()
1✔
1929
                    .collect(),
1✔
1930
                    time: None,
1✔
1931
                    bbox: None,
1✔
1932
                },
1✔
1933
                phantom: Default::default()
1✔
1934
            })
1✔
1935
        );
1✔
1936
    }
1✔
1937

1938
    #[test]
1939
    fn it_detects_time_gpkg() {
1✔
1940
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
1941
            test_data!("vector/data/points_with_time.gpkg"),
1✔
1942
            &None,
1✔
1943
        )
1✔
1944
        .unwrap();
1✔
1945

1✔
1946
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
1947

1948
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
1949
        {
1950
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
1951
                columns.datetime.sort();
1✔
1952
            }
1✔
1953
        }
×
1954

1955
        assert_eq!(
1✔
1956
            meta_data,
1✔
1957
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
1958
                loading_info: OgrSourceDataset {
1✔
1959
                    file_name: test_data!("vector/data/points_with_time.gpkg").into(),
1✔
1960
                    layer_name: "points_with_time".to_string(),
1✔
1961
                    data_type: Some(VectorDataType::MultiPoint),
1✔
1962
                    time: OgrSourceDatasetTimeType::StartEnd {
1✔
1963
                        start_field: "time_start".to_owned(),
1✔
1964
                        start_format: OgrSourceTimeFormat::Auto,
1✔
1965
                        end_field: "time_end".to_owned(),
1✔
1966
                        end_format: OgrSourceTimeFormat::Auto,
1✔
1967
                    },
1✔
1968
                    default_geometry: None,
1✔
1969
                    columns: Some(OgrSourceColumnSpec {
1✔
1970
                        format_specifics: None,
1✔
1971
                        x: String::new(),
1✔
1972
                        y: None,
1✔
1973
                        float: vec![],
1✔
1974
                        int: vec![],
1✔
1975
                        text: vec![],
1✔
1976
                        bool: vec![],
1✔
1977
                        datetime: vec!["time_end".to_owned(), "time_start".to_owned()],
1✔
1978
                        rename: None,
1✔
1979
                    }),
1✔
1980
                    force_ogr_time_filter: false,
1✔
1981
                    force_ogr_spatial_filter: false,
1✔
1982
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
1983
                    sql_query: None,
1✔
1984
                    attribute_query: None,
1✔
1985
                    cache_ttl: CacheTtlSeconds::default()
1✔
1986
                },
1✔
1987
                result_descriptor: VectorResultDescriptor {
1✔
1988
                    data_type: VectorDataType::MultiPoint,
1✔
1989
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
1990
                    columns: [
1✔
1991
                        (
1✔
1992
                            "time_start".to_owned(),
1✔
1993
                            VectorColumnInfo {
1✔
1994
                                data_type: FeatureDataType::DateTime,
1✔
1995
                                measurement: Measurement::Unitless
1✔
1996
                            }
1✔
1997
                        ),
1✔
1998
                        (
1✔
1999
                            "time_end".to_owned(),
1✔
2000
                            VectorColumnInfo {
1✔
2001
                                data_type: FeatureDataType::DateTime,
1✔
2002
                                measurement: Measurement::Unitless
1✔
2003
                            }
1✔
2004
                        )
1✔
2005
                    ]
1✔
2006
                    .iter()
1✔
2007
                    .cloned()
1✔
2008
                    .collect(),
1✔
2009
                    time: None,
1✔
2010
                    bbox: None,
1✔
2011
                },
1✔
2012
                phantom: Default::default(),
1✔
2013
            })
1✔
2014
        );
1✔
2015
    }
1✔
2016

2017
    #[test]
2018
    fn it_detects_time_shp() {
1✔
2019
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
2020
            test_data!("vector/data/points_with_date.shp"),
1✔
2021
            &None,
1✔
2022
        )
1✔
2023
        .unwrap();
1✔
2024

1✔
2025
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
2026

2027
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
2028
        {
2029
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
2030
                columns.datetime.sort();
1✔
2031
            }
1✔
2032
        }
×
2033

2034
        assert_eq!(
1✔
2035
            meta_data,
1✔
2036
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2037
                loading_info: OgrSourceDataset {
1✔
2038
                    file_name: test_data!("vector/data/points_with_date.shp").into(),
1✔
2039
                    layer_name: "points_with_date".to_string(),
1✔
2040
                    data_type: Some(VectorDataType::MultiPoint),
1✔
2041
                    time: OgrSourceDatasetTimeType::StartEnd {
1✔
2042
                        start_field: "time_start".to_owned(),
1✔
2043
                        start_format: OgrSourceTimeFormat::Auto,
1✔
2044
                        end_field: "time_end".to_owned(),
1✔
2045
                        end_format: OgrSourceTimeFormat::Auto,
1✔
2046
                    },
1✔
2047
                    default_geometry: None,
1✔
2048
                    columns: Some(OgrSourceColumnSpec {
1✔
2049
                        format_specifics: None,
1✔
2050
                        x: String::new(),
1✔
2051
                        y: None,
1✔
2052
                        float: vec![],
1✔
2053
                        int: vec![],
1✔
2054
                        text: vec![],
1✔
2055
                        bool: vec![],
1✔
2056
                        datetime: vec!["time_end".to_owned(), "time_start".to_owned()],
1✔
2057
                        rename: None,
1✔
2058
                    }),
1✔
2059
                    force_ogr_time_filter: false,
1✔
2060
                    force_ogr_spatial_filter: false,
1✔
2061
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2062
                    sql_query: None,
1✔
2063
                    attribute_query: None,
1✔
2064
                    cache_ttl: CacheTtlSeconds::default()
1✔
2065
                },
1✔
2066
                result_descriptor: VectorResultDescriptor {
1✔
2067
                    data_type: VectorDataType::MultiPoint,
1✔
2068
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2069
                    columns: [
1✔
2070
                        (
1✔
2071
                            "time_end".to_owned(),
1✔
2072
                            VectorColumnInfo {
1✔
2073
                                data_type: FeatureDataType::DateTime,
1✔
2074
                                measurement: Measurement::Unitless
1✔
2075
                            }
1✔
2076
                        ),
1✔
2077
                        (
1✔
2078
                            "time_start".to_owned(),
1✔
2079
                            VectorColumnInfo {
1✔
2080
                                data_type: FeatureDataType::DateTime,
1✔
2081
                                measurement: Measurement::Unitless
1✔
2082
                            }
1✔
2083
                        )
1✔
2084
                    ]
1✔
2085
                    .iter()
1✔
2086
                    .cloned()
1✔
2087
                    .collect(),
1✔
2088
                    time: None,
1✔
2089
                    bbox: None,
1✔
2090
                },
1✔
2091
                phantom: Default::default(),
1✔
2092
            })
1✔
2093
        );
1✔
2094
    }
1✔
2095

2096
    #[test]
2097
    fn it_detects_time_start_duration() {
1✔
2098
        let meta_data = auto_detect_vector_meta_data_definition(
1✔
2099
            test_data!("vector/data/points_with_iso_start_duration.json"),
1✔
2100
            &None,
1✔
2101
        )
1✔
2102
        .unwrap();
1✔
2103

1✔
2104
        let meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
2105

1✔
2106
        assert_eq!(
1✔
2107
            meta_data,
1✔
2108
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2109
                loading_info: OgrSourceDataset {
1✔
2110
                    file_name: test_data!("vector/data/points_with_iso_start_duration.json").into(),
1✔
2111
                    layer_name: "points_with_iso_start_duration".to_string(),
1✔
2112
                    data_type: Some(VectorDataType::MultiPoint),
1✔
2113
                    time: OgrSourceDatasetTimeType::StartDuration {
1✔
2114
                        start_field: "time_start".to_owned(),
1✔
2115
                        start_format: OgrSourceTimeFormat::Auto,
1✔
2116
                        duration_field: "duration".to_owned(),
1✔
2117
                    },
1✔
2118
                    default_geometry: None,
1✔
2119
                    columns: Some(OgrSourceColumnSpec {
1✔
2120
                        format_specifics: None,
1✔
2121
                        x: String::new(),
1✔
2122
                        y: None,
1✔
2123
                        float: vec![],
1✔
2124
                        int: vec!["duration".to_owned()],
1✔
2125
                        text: vec![],
1✔
2126
                        bool: vec![],
1✔
2127
                        datetime: vec!["time_start".to_owned()],
1✔
2128
                        rename: None,
1✔
2129
                    }),
1✔
2130
                    force_ogr_time_filter: false,
1✔
2131
                    force_ogr_spatial_filter: false,
1✔
2132
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2133
                    sql_query: None,
1✔
2134
                    attribute_query: None,
1✔
2135
                    cache_ttl: CacheTtlSeconds::default()
1✔
2136
                },
1✔
2137
                result_descriptor: VectorResultDescriptor {
1✔
2138
                    data_type: VectorDataType::MultiPoint,
1✔
2139
                    spatial_reference: SpatialReference::epsg_4326().into(),
1✔
2140
                    columns: [
1✔
2141
                        (
1✔
2142
                            "time_start".to_owned(),
1✔
2143
                            VectorColumnInfo {
1✔
2144
                                data_type: FeatureDataType::DateTime,
1✔
2145
                                measurement: Measurement::Unitless
1✔
2146
                            }
1✔
2147
                        ),
1✔
2148
                        (
1✔
2149
                            "duration".to_owned(),
1✔
2150
                            VectorColumnInfo {
1✔
2151
                                data_type: FeatureDataType::Int,
1✔
2152
                                measurement: Measurement::Unitless
1✔
2153
                            }
1✔
2154
                        )
1✔
2155
                    ]
1✔
2156
                    .iter()
1✔
2157
                    .cloned()
1✔
2158
                    .collect(),
1✔
2159
                    time: None,
1✔
2160
                    bbox: None,
1✔
2161
                },
1✔
2162
                phantom: Default::default()
1✔
2163
            })
1✔
2164
        );
1✔
2165
    }
1✔
2166

2167
    #[test]
2168
    fn it_detects_csv() {
1✔
2169
        let meta_data =
1✔
2170
            auto_detect_vector_meta_data_definition(test_data!("vector/data/lonlat.csv"), &None)
1✔
2171
                .unwrap();
1✔
2172

1✔
2173
        let mut meta_data = crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data);
1✔
2174

2175
        if let crate::datasets::storage::MetaDataDefinition::OgrMetaData(meta_data) = &mut meta_data
1✔
2176
        {
2177
            if let Some(columns) = &mut meta_data.loading_info.columns {
1✔
2178
                columns.text.sort();
1✔
2179
            }
1✔
2180
        }
×
2181

2182
        assert_eq!(
1✔
2183
            meta_data,
1✔
2184
            crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2185
                loading_info: OgrSourceDataset {
1✔
2186
                    file_name: test_data!("vector/data/lonlat.csv").into(),
1✔
2187
                    layer_name: "lonlat".to_string(),
1✔
2188
                    data_type: Some(VectorDataType::MultiPoint),
1✔
2189
                    time: OgrSourceDatasetTimeType::None,
1✔
2190
                    default_geometry: None,
1✔
2191
                    columns: Some(OgrSourceColumnSpec {
1✔
2192
                        format_specifics: None,
1✔
2193
                        x: "Longitude".to_string(),
1✔
2194
                        y: Some("Latitude".to_string()),
1✔
2195
                        float: vec![],
1✔
2196
                        int: vec![],
1✔
2197
                        text: vec![
1✔
2198
                            "Latitude".to_string(),
1✔
2199
                            "Longitude".to_string(),
1✔
2200
                            "Name".to_string()
1✔
2201
                        ],
1✔
2202
                        bool: vec![],
1✔
2203
                        datetime: vec![],
1✔
2204
                        rename: None,
1✔
2205
                    }),
1✔
2206
                    force_ogr_time_filter: false,
1✔
2207
                    force_ogr_spatial_filter: false,
1✔
2208
                    on_error: OgrSourceErrorSpec::Ignore,
1✔
2209
                    sql_query: None,
1✔
2210
                    attribute_query: None,
1✔
2211
                    cache_ttl: CacheTtlSeconds::default()
1✔
2212
                },
1✔
2213
                result_descriptor: VectorResultDescriptor {
1✔
2214
                    data_type: VectorDataType::MultiPoint,
1✔
2215
                    spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2216
                    columns: [
1✔
2217
                        (
1✔
2218
                            "Latitude".to_string(),
1✔
2219
                            VectorColumnInfo {
1✔
2220
                                data_type: FeatureDataType::Text,
1✔
2221
                                measurement: Measurement::Unitless
1✔
2222
                            }
1✔
2223
                        ),
1✔
2224
                        (
1✔
2225
                            "Longitude".to_string(),
1✔
2226
                            VectorColumnInfo {
1✔
2227
                                data_type: FeatureDataType::Text,
1✔
2228
                                measurement: Measurement::Unitless
1✔
2229
                            }
1✔
2230
                        ),
1✔
2231
                        (
1✔
2232
                            "Name".to_string(),
1✔
2233
                            VectorColumnInfo {
1✔
2234
                                data_type: FeatureDataType::Text,
1✔
2235
                                measurement: Measurement::Unitless
1✔
2236
                            }
1✔
2237
                        )
1✔
2238
                    ]
1✔
2239
                    .iter()
1✔
2240
                    .cloned()
1✔
2241
                    .collect(),
1✔
2242
                    time: None,
1✔
2243
                    bbox: None,
1✔
2244
                },
1✔
2245
                phantom: Default::default()
1✔
2246
            })
1✔
2247
        );
1✔
2248
    }
1✔
2249

2250
    #[ge_context::test]
2✔
2251
    async fn get_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2252
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2253
        let ctx = app_ctx.session_context(session.clone());
1✔
2254

1✔
2255
        let descriptor = VectorResultDescriptor {
1✔
2256
            data_type: VectorDataType::Data,
1✔
2257
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2258
            columns: Default::default(),
1✔
2259
            time: None,
1✔
2260
            bbox: None,
1✔
2261
        };
1✔
2262

1✔
2263
        let ds = AddDataset {
1✔
2264
            name: None,
1✔
2265
            display_name: "OgrDataset".to_string(),
1✔
2266
            description: "My Ogr dataset".to_string(),
1✔
2267
            source_operator: "OgrSource".to_string(),
1✔
2268
            symbology: None,
1✔
2269
            provenance: None,
1✔
2270
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2271
        };
1✔
2272

1✔
2273
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2274
            loading_info: OgrSourceDataset {
1✔
2275
                file_name: Default::default(),
1✔
2276
                layer_name: String::new(),
1✔
2277
                data_type: None,
1✔
2278
                time: Default::default(),
1✔
2279
                default_geometry: None,
1✔
2280
                columns: None,
1✔
2281
                force_ogr_time_filter: false,
1✔
2282
                force_ogr_spatial_filter: false,
1✔
2283
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2284
                sql_query: None,
1✔
2285
                attribute_query: None,
1✔
2286
                cache_ttl: CacheTtlSeconds::default(),
1✔
2287
            },
1✔
2288
            result_descriptor: descriptor,
1✔
2289
            phantom: Default::default(),
1✔
2290
        });
1✔
2291

1✔
2292
        let db = ctx.db();
1✔
2293
        let DatasetIdAndName {
2294
            id,
1✔
2295
            name: dataset_name,
1✔
2296
        } = db.add_dataset(ds.into(), meta).await?;
1✔
2297

2298
        let req = actix_web::test::TestRequest::get()
1✔
2299
            .uri(&format!("/dataset/{dataset_name}"))
1✔
2300
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2301
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
2302
        let res = send_test_request(req, app_ctx).await;
1✔
2303

2304
        let res_status = res.status();
1✔
2305
        let res_body = serde_json::from_str::<Value>(&read_body_string(res).await).unwrap();
1✔
2306
        assert_eq!(res_status, 200, "{res_body}");
1✔
2307

2308
        assert_eq!(
1✔
2309
            res_body,
1✔
2310
            json!({
1✔
2311
                "name": dataset_name,
1✔
2312
                "id": id,
1✔
2313
                "displayName": "OgrDataset",
1✔
2314
                "description": "My Ogr dataset",
1✔
2315
                "resultDescriptor": {
1✔
2316
                    "type": "vector",
1✔
2317
                    "dataType": "Data",
1✔
2318
                    "spatialReference": "",
1✔
2319
                    "columns": {},
1✔
2320
                    "time": null,
1✔
2321
                    "bbox": null
1✔
2322
                },
1✔
2323
                "sourceOperator": "OgrSource",
1✔
2324
                "symbology": null,
1✔
2325
                "provenance": null,
1✔
2326
                "tags": ["upload", "test"],
1✔
2327
            })
1✔
2328
        );
1✔
2329

2330
        Ok(())
1✔
2331
    }
1✔
2332

2333
    #[ge_context::test]
2✔
2334
    #[allow(clippy::too_many_lines)]
2335
    async fn it_suggests_metadata(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2336
        let mut test_data = TestDataUploads::default(); // remember created folder and remove them on drop
1✔
2337

2338
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2339

1✔
2340
        let body = vec![(
1✔
2341
            "test.json",
1✔
2342
            r#"{
1✔
2343
                "type": "FeatureCollection",
1✔
2344
                "features": [
1✔
2345
                  {
1✔
2346
                    "type": "Feature",
1✔
2347
                    "geometry": {
1✔
2348
                      "type": "Point",
1✔
2349
                      "coordinates": [
1✔
2350
                        1,
1✔
2351
                        1
1✔
2352
                      ]
1✔
2353
                    },
1✔
2354
                    "properties": {
1✔
2355
                      "name": "foo",
1✔
2356
                      "id": 1
1✔
2357
                    }
1✔
2358
                  },
1✔
2359
                  {
1✔
2360
                    "type": "Feature",
1✔
2361
                    "geometry": {
1✔
2362
                      "type": "Point",
1✔
2363
                      "coordinates": [
1✔
2364
                        2,
1✔
2365
                        2
1✔
2366
                      ]
1✔
2367
                    },
1✔
2368
                    "properties": {
1✔
2369
                      "name": "bar",
1✔
2370
                      "id": 2
1✔
2371
                    }
1✔
2372
                  }
1✔
2373
                ]
1✔
2374
              }"#,
1✔
2375
        )];
1✔
2376

1✔
2377
        let req = actix_web::test::TestRequest::post()
1✔
2378
            .uri("/upload")
1✔
2379
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2380
            .set_multipart(body.clone());
1✔
2381

2382
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
2383

2384
        assert_eq!(res.status(), 200);
1✔
2385

2386
        let upload: IdResponse<UploadId> = actix_web::test::read_body_json(res).await;
1✔
2387
        test_data.uploads.push(upload.id);
1✔
2388

1✔
2389
        let upload_content =
1✔
2390
            std::fs::read_to_string(upload.id.root_path().unwrap().join("test.json")).unwrap();
1✔
2391

1✔
2392
        assert_eq!(&upload_content, body[0].1);
1✔
2393

2394
        let req = actix_web::test::TestRequest::post()
1✔
2395
            .uri("/dataset/suggest")
1✔
2396
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2397
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2398
            .set_json(SuggestMetaData {
1✔
2399
                data_path: DataPath::Upload(upload.id),
1✔
2400
                layer_name: None,
1✔
2401
                main_file: None,
1✔
2402
            });
1✔
2403
        let res = send_test_request(req, app_ctx).await;
1✔
2404

2405
        let res_status = res.status();
1✔
2406
        let res_body = read_body_string(res).await;
1✔
2407
        assert_eq!(res_status, 200, "{res_body}");
1✔
2408

2409
        assert_eq!(
1✔
2410
            serde_json::from_str::<serde_json::Value>(&res_body).unwrap(),
1✔
2411
            json!({
1✔
2412
              "mainFile": "test.json",
1✔
2413
              "layerName": "test",
1✔
2414
              "metaData": {
1✔
2415
                "type": "OgrMetaData",
1✔
2416
                "loadingInfo": {
1✔
2417
                  "fileName": format!("test_upload/{}/test.json", upload.id),
1✔
2418
                  "layerName": "test",
1✔
2419
                  "dataType": "MultiPoint",
1✔
2420
                  "time": {
1✔
2421
                    "type": "none"
1✔
2422
                  },
1✔
2423
                  "defaultGeometry": null,
1✔
2424
                  "columns": {
1✔
2425
                    "formatSpecifics": null,
1✔
2426
                    "x": "",
1✔
2427
                    "y": null,
1✔
2428
                    "int": [
1✔
2429
                      "id"
1✔
2430
                    ],
1✔
2431
                    "float": [],
1✔
2432
                    "text": [
1✔
2433
                      "name"
1✔
2434
                    ],
1✔
2435
                    "bool": [],
1✔
2436
                    "datetime": [],
1✔
2437
                    "rename": null
1✔
2438
                  },
1✔
2439
                  "forceOgrTimeFilter": false,
1✔
2440
                  "forceOgrSpatialFilter": false,
1✔
2441
                  "onError": "ignore",
1✔
2442
                  "sqlQuery": null,
1✔
2443
                  "attributeQuery": null,
1✔
2444
                  "cacheTtl": 0,
1✔
2445
                },
1✔
2446
                "resultDescriptor": {
1✔
2447
                  "dataType": "MultiPoint",
1✔
2448
                  "spatialReference": "EPSG:4326",
1✔
2449
                  "columns": {
1✔
2450
                    "id": {
1✔
2451
                      "dataType": "int",
1✔
2452
                      "measurement": {
1✔
2453
                        "type": "unitless"
1✔
2454
                      }
1✔
2455
                    },
1✔
2456
                    "name": {
1✔
2457
                      "dataType": "text",
1✔
2458
                      "measurement": {
1✔
2459
                        "type": "unitless"
1✔
2460
                      }
1✔
2461
                    }
1✔
2462
                  },
1✔
2463
                  "time": null,
1✔
2464
                  "bbox": null
1✔
2465
                }
1✔
2466
              }
1✔
2467
            })
1✔
2468
        );
1✔
2469

2470
        Ok(())
1✔
2471
    }
1✔
2472

2473
    #[ge_context::test]
2✔
2474
    async fn it_deletes_system_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2475
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2476
        let ctx = app_ctx.session_context(session.clone());
1✔
2477

1✔
2478
        let volume = VolumeName("test_data".to_string());
1✔
2479

1✔
2480
        let mut meta_data = create_ndvi_meta_data();
1✔
2481

1✔
2482
        // make path relative to volume
1✔
2483
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
2484

1✔
2485
        let create = CreateDataset {
1✔
2486
            data_path: DataPath::Volume(volume.clone()),
1✔
2487
            definition: DatasetDefinition {
1✔
2488
                properties: AddDataset {
1✔
2489
                    name: None,
1✔
2490
                    display_name: "ndvi".to_string(),
1✔
2491
                    description: "ndvi".to_string(),
1✔
2492
                    source_operator: "GdalSource".to_string(),
1✔
2493
                    symbology: None,
1✔
2494
                    provenance: None,
1✔
2495
                    tags: None,
1✔
2496
                },
1✔
2497
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
2498
            },
1✔
2499
        };
1✔
2500

2501
        let req = actix_web::test::TestRequest::post()
1✔
2502
            .uri("/dataset")
1✔
2503
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2504
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2505
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
2506
            .set_payload(serde_json::to_string(&create)?);
1✔
2507
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
2508

2509
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
2510

2511
        let db = ctx.db();
1✔
2512
        let dataset_id = db
1✔
2513
            .resolve_dataset_name_to_id(&dataset_name)
1✔
2514
            .await
1✔
2515
            .unwrap()
1✔
2516
            .unwrap();
1✔
2517
        assert!(db.load_dataset(&dataset_id).await.is_ok());
1✔
2518

2519
        let req = actix_web::test::TestRequest::delete()
1✔
2520
            .uri(&format!("/dataset/{dataset_name}"))
1✔
2521
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2522
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2523
            .append_header((header::CONTENT_TYPE, "application/json"));
1✔
2524

2525
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
2526

2527
        assert_eq!(res.status(), 200);
1✔
2528

2529
        assert!(db.load_dataset(&dataset_id).await.is_err());
1✔
2530

2531
        Ok(())
1✔
2532
    }
1✔
2533

2534
    #[ge_context::test]
2✔
2535
    async fn it_gets_loading_info(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2536
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2537
        let ctx = app_ctx.session_context(session.clone());
1✔
2538

1✔
2539
        let descriptor = VectorResultDescriptor {
1✔
2540
            data_type: VectorDataType::Data,
1✔
2541
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2542
            columns: Default::default(),
1✔
2543
            time: None,
1✔
2544
            bbox: None,
1✔
2545
        };
1✔
2546

1✔
2547
        let ds = AddDataset {
1✔
2548
            name: None,
1✔
2549
            display_name: "OgrDataset".to_string(),
1✔
2550
            description: "My Ogr dataset".to_string(),
1✔
2551
            source_operator: "OgrSource".to_string(),
1✔
2552
            symbology: None,
1✔
2553
            provenance: None,
1✔
2554
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2555
        };
1✔
2556

1✔
2557
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2558
            loading_info: OgrSourceDataset {
1✔
2559
                file_name: Default::default(),
1✔
2560
                layer_name: String::new(),
1✔
2561
                data_type: None,
1✔
2562
                time: Default::default(),
1✔
2563
                default_geometry: None,
1✔
2564
                columns: None,
1✔
2565
                force_ogr_time_filter: false,
1✔
2566
                force_ogr_spatial_filter: false,
1✔
2567
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2568
                sql_query: None,
1✔
2569
                attribute_query: None,
1✔
2570
                cache_ttl: CacheTtlSeconds::default(),
1✔
2571
            },
1✔
2572
            result_descriptor: descriptor,
1✔
2573
            phantom: Default::default(),
1✔
2574
        });
1✔
2575

1✔
2576
        let db = ctx.db();
1✔
2577
        let DatasetIdAndName {
2578
            id: _,
2579
            name: dataset_name,
1✔
2580
        } = db.add_dataset(ds.into(), meta).await?;
1✔
2581

2582
        let req = actix_web::test::TestRequest::get()
1✔
2583
            .uri(&format!("/dataset/{dataset_name}/loadingInfo"))
1✔
2584
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2585
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
2586
        let res = send_test_request(req, app_ctx).await;
1✔
2587

2588
        let res_status = res.status();
1✔
2589
        let res_body = serde_json::from_str::<Value>(&read_body_string(res).await).unwrap();
1✔
2590
        assert_eq!(res_status, 200, "{res_body}");
1✔
2591

2592
        assert_eq!(
1✔
2593
            res_body,
1✔
2594
            json!({
1✔
2595
                "loadingInfo":  {
1✔
2596
                    "attributeQuery": null,
1✔
2597
                    "cacheTtl": 0,
1✔
2598
                    "columns": null,
1✔
2599
                    "dataType": null,
1✔
2600
                    "defaultGeometry": null,
1✔
2601
                    "fileName": "",
1✔
2602
                    "forceOgrSpatialFilter": false,
1✔
2603
                    "forceOgrTimeFilter": false,
1✔
2604
                    "layerName": "",
1✔
2605
                    "onError": "ignore",
1✔
2606
                    "sqlQuery": null,
1✔
2607
                    "time":  {
1✔
2608
                        "type": "none"
1✔
2609
                    }
1✔
2610
                },
1✔
2611
                 "resultDescriptor":  {
1✔
2612
                    "bbox": null,
1✔
2613
                    "columns":  {},
1✔
2614
                    "dataType": "Data",
1✔
2615
                    "spatialReference": "",
1✔
2616
                    "time": null
1✔
2617
                },
1✔
2618
                "type": "OgrMetaData"
1✔
2619
            })
1✔
2620
        );
1✔
2621

2622
        Ok(())
1✔
2623
    }
1✔
2624

2625
    #[ge_context::test]
2✔
2626
    async fn it_updates_loading_info(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2627
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2628
        let ctx = app_ctx.session_context(session.clone());
1✔
2629

1✔
2630
        let descriptor = VectorResultDescriptor {
1✔
2631
            data_type: VectorDataType::Data,
1✔
2632
            spatial_reference: SpatialReferenceOption::Unreferenced,
1✔
2633
            columns: Default::default(),
1✔
2634
            time: None,
1✔
2635
            bbox: None,
1✔
2636
        };
1✔
2637

1✔
2638
        let ds = AddDataset {
1✔
2639
            name: None,
1✔
2640
            display_name: "OgrDataset".to_string(),
1✔
2641
            description: "My Ogr dataset".to_string(),
1✔
2642
            source_operator: "OgrSource".to_string(),
1✔
2643
            symbology: None,
1✔
2644
            provenance: None,
1✔
2645
            tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
2646
        };
1✔
2647

1✔
2648
        let meta = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2649
            loading_info: OgrSourceDataset {
1✔
2650
                file_name: Default::default(),
1✔
2651
                layer_name: String::new(),
1✔
2652
                data_type: None,
1✔
2653
                time: Default::default(),
1✔
2654
                default_geometry: None,
1✔
2655
                columns: None,
1✔
2656
                force_ogr_time_filter: false,
1✔
2657
                force_ogr_spatial_filter: false,
1✔
2658
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2659
                sql_query: None,
1✔
2660
                attribute_query: None,
1✔
2661
                cache_ttl: CacheTtlSeconds::default(),
1✔
2662
            },
1✔
2663
            result_descriptor: descriptor.clone(),
1✔
2664
            phantom: Default::default(),
1✔
2665
        });
1✔
2666

1✔
2667
        let db = ctx.db();
1✔
2668
        let DatasetIdAndName {
2669
            id,
1✔
2670
            name: dataset_name,
1✔
2671
        } = db.add_dataset(ds.into(), meta).await?;
1✔
2672

2673
        let update = crate::datasets::storage::MetaDataDefinition::OgrMetaData(StaticMetaData {
1✔
2674
            loading_info: OgrSourceDataset {
1✔
2675
                file_name: "foo.bar".into(),
1✔
2676
                layer_name: "baz".to_string(),
1✔
2677
                data_type: None,
1✔
2678
                time: Default::default(),
1✔
2679
                default_geometry: None,
1✔
2680
                columns: None,
1✔
2681
                force_ogr_time_filter: false,
1✔
2682
                force_ogr_spatial_filter: false,
1✔
2683
                on_error: OgrSourceErrorSpec::Ignore,
1✔
2684
                sql_query: None,
1✔
2685
                attribute_query: None,
1✔
2686
                cache_ttl: CacheTtlSeconds::default(),
1✔
2687
            },
1✔
2688
            result_descriptor: descriptor,
1✔
2689
            phantom: Default::default(),
1✔
2690
        });
1✔
2691

1✔
2692
        let req = actix_web::test::TestRequest::put()
1✔
2693
            .uri(&format!("/dataset/{dataset_name}/loadingInfo"))
1✔
2694
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2695
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2696
            .set_json(update.clone());
1✔
2697

2698
        let res = send_test_request(req, app_ctx).await;
1✔
2699
        assert_eq!(res.status(), 200);
1✔
2700

2701
        let loading_info = db.load_loading_info(&id).await.unwrap();
1✔
2702

1✔
2703
        assert_eq!(loading_info, update);
1✔
2704

2705
        Ok(())
1✔
2706
    }
1✔
2707

2708
    #[ge_context::test]
2✔
2709
    async fn it_gets_updates_symbology(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2710
        let session = admin_login(&app_ctx).await;
1✔
2711
        let ctx = app_ctx.session_context(session.clone());
1✔
2712

2713
        let DatasetIdAndName {
2714
            id: dataset_id,
1✔
2715
            name: dataset_name,
1✔
2716
        } = add_pro_file_definition_to_datasets(&ctx.db(), test_data!("dataset_defs/ndvi.json"))
1✔
2717
            .await;
1✔
2718

2719
        let symbology = Symbology::Raster(RasterSymbology {
1✔
2720
            opacity: 1.0,
1✔
2721
            raster_colorizer: RasterColorizer::SingleBand {
1✔
2722
                band: 0,
1✔
2723
                band_colorizer: geoengine_datatypes::operations::image::Colorizer::linear_gradient(
1✔
2724
                    vec![
1✔
2725
                        (0.0, RgbaColor::white())
1✔
2726
                            .try_into()
1✔
2727
                            .expect("valid breakpoint"),
1✔
2728
                        (10_000.0, RgbaColor::black())
1✔
2729
                            .try_into()
1✔
2730
                            .expect("valid breakpoint"),
1✔
2731
                    ],
1✔
2732
                    RgbaColor::transparent(),
1✔
2733
                    RgbaColor::white(),
1✔
2734
                    RgbaColor::black(),
1✔
2735
                )
1✔
2736
                .expect("valid colorizer"),
1✔
2737
            },
1✔
2738
        });
1✔
2739

1✔
2740
        let req = actix_web::test::TestRequest::put()
1✔
2741
            .uri(&format!("/dataset/{dataset_name}/symbology"))
1✔
2742
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2743
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2744
            .set_json(symbology.clone());
1✔
2745
        let res = send_test_request(req, app_ctx).await;
1✔
2746

2747
        let res_status = res.status();
1✔
2748
        assert_eq!(res_status, 200);
1✔
2749

2750
        let dataset = ctx.db().load_dataset(&dataset_id).await?;
1✔
2751

2752
        assert_eq!(dataset.symbology, Some(symbology));
1✔
2753

2754
        Ok(())
1✔
2755
    }
1✔
2756

2757
    #[ge_context::test()]
2✔
2758
    async fn it_updates_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2759
        let session = admin_login(&app_ctx).await;
1✔
2760
        let ctx = app_ctx.session_context(session.clone());
1✔
2761

2762
        let DatasetIdAndName {
2763
            id: dataset_id,
1✔
2764
            name: dataset_name,
1✔
2765
        } = add_pro_file_definition_to_datasets(&ctx.db(), test_data!("dataset_defs/ndvi.json"))
1✔
2766
            .await;
1✔
2767

2768
        let update: UpdateDataset = UpdateDataset {
1✔
2769
            name: DatasetName::new(None, "new_name"),
1✔
2770
            display_name: "new display name".to_string(),
1✔
2771
            description: "new description".to_string(),
1✔
2772
            tags: vec!["foo".to_string(), "bar".to_string()],
1✔
2773
        };
1✔
2774

1✔
2775
        let req = actix_web::test::TestRequest::post()
1✔
2776
            .uri(&format!("/dataset/{dataset_name}"))
1✔
2777
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2778
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2779
            .set_json(update.clone());
1✔
2780
        let res = send_test_request(req, app_ctx).await;
1✔
2781

2782
        let res_status = res.status();
1✔
2783
        assert_eq!(res_status, 200);
1✔
2784

2785
        let dataset = ctx.db().load_dataset(&dataset_id).await?;
1✔
2786

2787
        assert_eq!(dataset.name, update.name);
1✔
2788
        assert_eq!(dataset.display_name, update.display_name);
1✔
2789
        assert_eq!(dataset.description, update.description);
1✔
2790
        assert_eq!(dataset.tags, Some(update.tags));
1✔
2791

2792
        Ok(())
1✔
2793
    }
1✔
2794

2795
    #[ge_context::test()]
2✔
2796
    async fn it_updates_provenance(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2797
        let session = admin_login(&app_ctx).await;
1✔
2798
        let ctx = app_ctx.session_context(session.clone());
1✔
2799

2800
        let DatasetIdAndName {
2801
            id: dataset_id,
1✔
2802
            name: dataset_name,
1✔
2803
        } = add_pro_file_definition_to_datasets(&ctx.db(), test_data!("dataset_defs/ndvi.json"))
1✔
2804
            .await;
1✔
2805

2806
        let provenances: Provenances = Provenances {
1✔
2807
            provenances: vec![Provenance {
1✔
2808
                citation: "foo".to_string(),
1✔
2809
                license: "bar".to_string(),
1✔
2810
                uri: "http://example.com".to_string(),
1✔
2811
            }],
1✔
2812
        };
1✔
2813

1✔
2814
        let req = actix_web::test::TestRequest::put()
1✔
2815
            .uri(&format!("/dataset/{dataset_name}/provenance"))
1✔
2816
            .append_header((header::CONTENT_LENGTH, 0))
1✔
2817
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
2818
            .set_json(provenances.clone());
1✔
2819
        let res = send_test_request(req, app_ctx).await;
1✔
2820

2821
        let res_status = res.status();
1✔
2822
        assert_eq!(res_status, 200);
1✔
2823

2824
        let dataset = ctx.db().load_dataset(&dataset_id).await?;
1✔
2825

2826
        assert_eq!(
1✔
2827
            dataset.provenance,
1✔
2828
            Some(
1✔
2829
                provenances
1✔
2830
                    .provenances
1✔
2831
                    .into_iter()
1✔
2832
                    .map(Into::into)
1✔
2833
                    .collect()
1✔
2834
            )
1✔
2835
        );
1✔
2836

2837
        Ok(())
1✔
2838
    }
1✔
2839

2840
    // TODO: better way to get to the root of the project
2841
    struct TestWorkdirChanger {
2842
        package_dir: &'static str,
2843
        modified: bool,
2844
    }
2845

2846
    impl TestWorkdirChanger {
2847
        fn go_to_workspace(package_dir: &'static str) -> Self {
1✔
2848
            let mut working_dir = std::env::current_dir().unwrap();
1✔
2849

1✔
2850
            if !working_dir.ends_with(package_dir) {
1✔
NEW
2851
                return Self {
×
NEW
2852
                    package_dir,
×
NEW
2853
                    modified: false,
×
NEW
2854
                };
×
2855
            }
1✔
2856

1✔
2857
            working_dir.pop();
1✔
2858

1✔
2859
            std::env::set_current_dir(working_dir).unwrap();
1✔
2860

1✔
2861
            Self {
1✔
2862
                package_dir,
1✔
2863
                modified: true,
1✔
2864
            }
1✔
2865
        }
1✔
2866
    }
2867

2868
    impl Drop for TestWorkdirChanger {
2869
        fn drop(&mut self) {
1✔
2870
            if !self.modified {
1✔
NEW
2871
                return;
×
2872
            }
1✔
2873

1✔
2874
            let mut working_dir = std::env::current_dir().unwrap();
1✔
2875
            working_dir.push(self.package_dir);
1✔
2876
            std::env::set_current_dir(working_dir).unwrap();
1✔
2877
        }
1✔
2878
    }
2879

2880
    #[ge_context::test(test_execution = "serial")]
2✔
2881
    async fn it_lists_layers(app_ctx: ProPostgresContext<NoTls>) {
1✔
2882
        let changed_workdir = TestWorkdirChanger::go_to_workspace("services");
1✔
2883

2884
        let session = admin_login(&app_ctx).await;
1✔
2885

2886
        let volume_name = "test_data";
1✔
2887
        let file_name = "vector%2Fdata%2Ftwo_layers.gpkg";
1✔
2888

1✔
2889
        let req = actix_web::test::TestRequest::get()
1✔
2890
            .uri(&format!(
1✔
2891
                "/dataset/volumes/{volume_name}/files/{file_name}/layers"
1✔
2892
            ))
1✔
2893
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
2894

2895
        let res = send_test_request(req, app_ctx).await;
1✔
2896

2897
        assert_eq!(res.status(), 200, "{res:?}");
1✔
2898

2899
        let layers: VolumeFileLayersResponse = actix_web::test::read_body_json(res).await;
1✔
2900

2901
        assert_eq!(
1✔
2902
            layers.layers,
1✔
2903
            vec![
1✔
2904
                "points_with_time".to_string(),
1✔
2905
                "points_with_time_and_more".to_string(),
1✔
2906
                "layer_styles".to_string() // TOOO: remove once internal/system layers are hidden
1✔
2907
            ]
1✔
2908
        );
1✔
2909

2910
        drop(changed_workdir);
1✔
2911
    }
1✔
2912

2913
    /// override the pixel size since this test was designed for 600 x 600 pixel tiles
2914
    fn create_dataset_tiling_specification() -> TilingSpecification {
1✔
2915
        TilingSpecification {
1✔
2916
            origin_coordinate: (0., 0.).into(),
1✔
2917
            tile_size_in_pixels: GridShape2D::new([600, 600]),
1✔
2918
        }
1✔
2919
    }
1✔
2920

2921
    #[ge_context::test(tiling_spec = "create_dataset_tiling_specification")]
2✔
2922
    async fn create_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2923
        let mut test_data = TestDataUploads::default(); // remember created folder and remove them on drop
1✔
2924

2925
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2926
        let ctx = app_ctx.session_context(session.clone());
1✔
2927

2928
        let upload_id = upload_ne_10m_ports_files(app_ctx.clone(), session.id()).await?;
1✔
2929
        test_data.uploads.push(upload_id);
1✔
2930

2931
        let dataset_name =
1✔
2932
            construct_dataset_from_upload(app_ctx.clone(), upload_id, session.id()).await;
1✔
2933
        let exe_ctx = ctx.execution_context()?;
1✔
2934

2935
        let source = make_ogr_source(
1✔
2936
            &exe_ctx,
1✔
2937
            geoengine_datatypes::dataset::NamedData::from(dataset_name).into(),
1✔
2938
        )
1✔
2939
        .await?;
1✔
2940

2941
        let query_processor = source.query_processor()?.multi_point().unwrap();
1✔
2942
        let query_ctx = ctx.mock_query_context()?;
1✔
2943

2944
        let query = query_processor
1✔
2945
            .query(
1✔
2946
                VectorQueryRectangle {
1✔
2947
                    spatial_bounds: BoundingBox2D::new((1.85, 50.88).into(), (4.82, 52.95).into())?,
1✔
2948
                    time_interval: Default::default(),
1✔
2949
                    spatial_resolution: SpatialResolution::new(1., 1.)?,
1✔
2950
                    attributes: ColumnSelection::all(),
1✔
2951
                },
1✔
2952
                &query_ctx,
1✔
2953
            )
1✔
2954
            .await
1✔
2955
            .unwrap();
1✔
2956

2957
        let result: Vec<MultiPointCollection> = query.try_collect().await?;
1✔
2958

2959
        let coords = result[0].coordinates();
1✔
2960
        assert_eq!(coords.len(), 10);
1✔
2961
        assert_eq!(
1✔
2962
            coords,
1✔
2963
            &[
1✔
2964
                [2.933_686_69, 51.23].into(),
1✔
2965
                [3.204_593_64_f64, 51.336_388_89].into(),
1✔
2966
                [4.651_413_428, 51.805_833_33].into(),
1✔
2967
                [4.11, 51.95].into(),
1✔
2968
                [4.386_160_188, 50.886_111_11].into(),
1✔
2969
                [3.767_373_38, 51.114_444_44].into(),
1✔
2970
                [4.293_757_362, 51.297_777_78].into(),
1✔
2971
                [1.850_176_678, 50.965_833_33].into(),
1✔
2972
                [2.170_906_949, 51.021_666_67].into(),
1✔
2973
                [4.292_873_969, 51.927_222_22].into(),
1✔
2974
            ]
1✔
2975
        );
1✔
2976

2977
        Ok(())
1✔
2978
    }
1✔
2979

2980
    #[ge_context::test]
2✔
2981
    async fn it_creates_volume_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
2982
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
2983

1✔
2984
        let volume = VolumeName("test_data".to_string());
1✔
2985

1✔
2986
        let mut meta_data = create_ndvi_meta_data();
1✔
2987

1✔
2988
        // make path relative to volume
1✔
2989
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
2990

1✔
2991
        let create = CreateDataset {
1✔
2992
            data_path: DataPath::Volume(volume.clone()),
1✔
2993
            definition: DatasetDefinition {
1✔
2994
                properties: AddDataset {
1✔
2995
                    name: None,
1✔
2996
                    display_name: "ndvi".to_string(),
1✔
2997
                    description: "ndvi".to_string(),
1✔
2998
                    source_operator: "GdalSource".to_string(),
1✔
2999
                    symbology: None,
1✔
3000
                    provenance: None,
1✔
3001
                    tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
3002
                },
1✔
3003
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
3004
            },
1✔
3005
        };
1✔
3006

3007
        // create via admin session
3008
        let admin_session = admin_login(&app_ctx).await;
1✔
3009
        let req = actix_web::test::TestRequest::post()
1✔
3010
            .uri("/dataset")
1✔
3011
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3012
            .append_header((
1✔
3013
                header::AUTHORIZATION,
1✔
3014
                Bearer::new(admin_session.id().to_string()),
1✔
3015
            ))
1✔
3016
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
3017
            .set_json(create);
1✔
3018
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3019
        assert_eq!(res.status(), 200);
1✔
3020

3021
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
3022

3023
        let req = actix_web::test::TestRequest::get()
1✔
3024
            .uri(&format!("/dataset/{dataset_name}"))
1✔
3025
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3026
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())));
1✔
3027

3028
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3029
        assert_eq!(res.status(), 200);
1✔
3030

3031
        Ok(())
1✔
3032
    }
1✔
3033

3034
    #[ge_context::test]
2✔
3035
    async fn it_deletes_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
3036
        let mut test_data = TestDataUploads::default(); // remember created folder and remove them on drop
1✔
3037

3038
        let session = app_ctx.create_anonymous_session().await.unwrap();
1✔
3039
        let session_id = session.id();
1✔
3040
        let ctx = app_ctx.session_context(session);
1✔
3041

3042
        let upload_id = upload_ne_10m_ports_files(app_ctx.clone(), session_id).await?;
1✔
3043
        test_data.uploads.push(upload_id);
1✔
3044

3045
        let dataset_name =
1✔
3046
            construct_dataset_from_upload(app_ctx.clone(), upload_id, session_id).await;
1✔
3047

3048
        let db = ctx.db();
1✔
3049
        let dataset_id = db
1✔
3050
            .resolve_dataset_name_to_id(&dataset_name)
1✔
3051
            .await
1✔
3052
            .unwrap()
1✔
3053
            .unwrap();
1✔
3054

1✔
3055
        assert!(db.load_dataset(&dataset_id).await.is_ok());
1✔
3056

3057
        let req = actix_web::test::TestRequest::delete()
1✔
3058
            .uri(&format!("/dataset/{dataset_name}"))
1✔
3059
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3060
            .append_header((header::AUTHORIZATION, Bearer::new(session_id.to_string())))
1✔
3061
            .append_header((header::CONTENT_TYPE, "application/json"));
1✔
3062

3063
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3064

3065
        assert_eq!(res.status(), 200, "response: {res:?}");
1✔
3066

3067
        assert!(db.load_dataset(&dataset_id).await.is_err());
1✔
3068

3069
        Ok(())
1✔
3070
    }
1✔
3071

3072
    #[ge_context::test]
2✔
3073
    async fn it_deletes_volume_dataset(app_ctx: ProPostgresContext<NoTls>) -> Result<()> {
1✔
3074
        let volume = VolumeName("test_data".to_string());
1✔
3075

1✔
3076
        let mut meta_data = create_ndvi_meta_data();
1✔
3077

1✔
3078
        // make path relative to volume
1✔
3079
        meta_data.params.file_path = "raster/modis_ndvi/MOD13A2_M_NDVI_%_START_TIME_%.TIFF".into();
1✔
3080

1✔
3081
        let create = CreateDataset {
1✔
3082
            data_path: DataPath::Volume(volume.clone()),
1✔
3083
            definition: DatasetDefinition {
1✔
3084
                properties: AddDataset {
1✔
3085
                    name: None,
1✔
3086
                    display_name: "ndvi".to_string(),
1✔
3087
                    description: "ndvi".to_string(),
1✔
3088
                    source_operator: "GdalSource".to_string(),
1✔
3089
                    symbology: None,
1✔
3090
                    provenance: None,
1✔
3091
                    tags: Some(vec!["upload".to_owned(), "test".to_owned()]),
1✔
3092
                },
1✔
3093
                meta_data: MetaDataDefinition::GdalMetaDataRegular(meta_data.into()),
1✔
3094
            },
1✔
3095
        };
1✔
3096

3097
        let session = admin_login(&app_ctx).await;
1✔
3098
        let ctx = app_ctx.session_context(session.clone());
1✔
3099

1✔
3100
        let db = ctx.db();
1✔
3101

3102
        let req = actix_web::test::TestRequest::post()
1✔
3103
            .uri("/dataset")
1✔
3104
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3105
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
3106
            .append_header((header::CONTENT_TYPE, "application/json"))
1✔
3107
            .set_payload(serde_json::to_string(&create)?);
1✔
3108
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3109

3110
        let DatasetNameResponse { dataset_name } = actix_web::test::read_body_json(res).await;
1✔
3111
        let dataset_id = db
1✔
3112
            .resolve_dataset_name_to_id(&dataset_name)
1✔
3113
            .await
1✔
3114
            .unwrap()
1✔
3115
            .unwrap();
1✔
3116

1✔
3117
        assert!(db.load_dataset(&dataset_id).await.is_ok());
1✔
3118

3119
        let req = actix_web::test::TestRequest::delete()
1✔
3120
            .uri(&format!("/dataset/{dataset_name}"))
1✔
3121
            .append_header((header::CONTENT_LENGTH, 0))
1✔
3122
            .append_header((header::AUTHORIZATION, Bearer::new(session.id().to_string())))
1✔
3123
            .append_header((header::CONTENT_TYPE, "application/json"));
1✔
3124

3125
        let res = send_test_request(req, app_ctx.clone()).await;
1✔
3126

3127
        assert_eq!(res.status(), 200);
1✔
3128

3129
        assert!(db.load_dataset(&dataset_id).await.is_err());
1✔
3130

3131
        Ok(())
1✔
3132
    }
1✔
3133
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc