• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #23675

06 Nov 2024 04:11PM CUT coverage: 22.4% (+0.5%) from 21.856%
#23675

Pull #10945

github

stevenwinship
Merge branch 'develop' into 10888-add-api-for-comparing-dataset-versions
Pull Request #10945: Add API endpoint for comparing Dataset Versions

106 of 135 new or added lines in 4 files covered. (78.52%)

693 existing lines in 2 files now uncovered.

19361 of 86432 relevant lines covered (22.4%)

0.22 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.16
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4
import edu.harvard.iq.dataverse.*;
5
import edu.harvard.iq.dataverse.DatasetLock.Reason;
6
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
7
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
8
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
9
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
10
import edu.harvard.iq.dataverse.authorization.DataverseRole;
11
import edu.harvard.iq.dataverse.authorization.Permission;
12
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
13
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
14
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
15
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
16
import edu.harvard.iq.dataverse.authorization.users.User;
17
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
18
import edu.harvard.iq.dataverse.dataaccess.*;
19
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
20
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
21
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
22
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
23
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
24
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
25
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
26
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
27
import edu.harvard.iq.dataverse.engine.command.Command;
28
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
29
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
30
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
31
import edu.harvard.iq.dataverse.engine.command.impl.*;
32
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
33
import edu.harvard.iq.dataverse.export.ExportService;
34
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
35
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
36
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
37
import edu.harvard.iq.dataverse.globus.GlobusUtil;
38
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
39
import edu.harvard.iq.dataverse.makedatacount.*;
40
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
41
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
42
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
43
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
44
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
46
import edu.harvard.iq.dataverse.search.IndexServiceBean;
47
import edu.harvard.iq.dataverse.settings.FeatureFlags;
48
import edu.harvard.iq.dataverse.settings.JvmSettings;
49
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
50
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
51
import edu.harvard.iq.dataverse.util.*;
52
import edu.harvard.iq.dataverse.util.bagit.OREMap;
53
import edu.harvard.iq.dataverse.util.json.*;
54
import edu.harvard.iq.dataverse.workflow.Workflow;
55
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
56
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
57
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
58
import jakarta.ejb.EJB;
59
import jakarta.ejb.EJBException;
60
import jakarta.inject.Inject;
61
import jakarta.json.*;
62
import jakarta.json.stream.JsonParsingException;
63
import jakarta.servlet.http.HttpServletRequest;
64
import jakarta.servlet.http.HttpServletResponse;
65
import jakarta.ws.rs.*;
66
import jakarta.ws.rs.container.ContainerRequestContext;
67
import jakarta.ws.rs.core.*;
68
import jakarta.ws.rs.core.Response.Status;
69
import org.apache.commons.lang3.StringUtils;
70
import org.eclipse.microprofile.openapi.annotations.Operation;
71
import org.eclipse.microprofile.openapi.annotations.media.Content;
72
import org.eclipse.microprofile.openapi.annotations.media.Schema;
73
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
74
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
75
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
76
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
77
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
78
import org.glassfish.jersey.media.multipart.FormDataParam;
79

80
import java.io.IOException;
81
import java.io.InputStream;
82
import java.net.URI;
83
import java.sql.Timestamp;
84
import java.text.MessageFormat;
85
import java.text.SimpleDateFormat;
86
import java.time.LocalDate;
87
import java.time.LocalDateTime;
88
import java.time.ZoneId;
89
import java.time.format.DateTimeFormatter;
90
import java.time.format.DateTimeParseException;
91
import java.util.*;
92
import java.util.Map.Entry;
93
import java.util.concurrent.ExecutionException;
94
import java.util.function.Predicate;
95
import java.util.logging.Level;
96
import java.util.logging.Logger;
97
import java.util.regex.Pattern;
98
import java.util.stream.Collectors;
99

100
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
101
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
102
import edu.harvard.iq.dataverse.dataset.DatasetType;
103
import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean;
104
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
105
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
106
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
107
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
108

109
@Path("datasets")
110
public class Datasets extends AbstractApiBean {
×
111

112
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
113
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
114
    
115
    @Inject DataverseSession session;
116

117
    @EJB
118
    DatasetServiceBean datasetService;
119

120
    @EJB
121
    DataverseServiceBean dataverseService;
122
    
123
    @EJB
124
    GlobusServiceBean globusService;
125

126
    @EJB
127
    UserNotificationServiceBean userNotificationService;
128
    
129
    @EJB
130
    PermissionServiceBean permissionService;
131
    
132
    @EJB
133
    AuthenticationServiceBean authenticationServiceBean;
134
    
135
    @EJB
136
    DDIExportServiceBean ddiExportService;
137

138
    @EJB
139
    MetadataBlockServiceBean metadataBlockService;
140
    
141
    @EJB
142
    DataFileServiceBean fileService;
143

144
    @EJB
145
    IngestServiceBean ingestService;
146

147
    @EJB
148
    EjbDataverseEngine commandEngine;
149
    
150
    @EJB
151
    IndexServiceBean indexService;
152

153
    @EJB
154
    S3PackageImporter s3PackageImporter;
155
     
156
    @EJB
157
    SettingsServiceBean settingsService;
158

159
    // TODO: Move to AbstractApiBean
160
    @EJB
161
    DatasetMetricsServiceBean datasetMetricsSvc;
162
    
163
    @EJB
164
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
165

166
    @EJB
167
    EmbargoServiceBean embargoService;
168

169
    @EJB
170
    RetentionServiceBean retentionService;
171

172
    @Inject
173
    MakeDataCountLoggingServiceBean mdcLogService;
174
    
175
    @Inject
176
    DataverseRequestServiceBean dvRequestService;
177

178
    @Inject
179
    WorkflowServiceBean wfService;
180
    
181
    @Inject
182
    DataverseRoleServiceBean dataverseRoleService;
183

184
    @EJB
185
    DatasetVersionServiceBean datasetversionService;
186

187
    @Inject
188
    PrivateUrlServiceBean privateUrlService;
189

190
    @Inject
191
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
192

193
    @Inject
194
    DatasetTypeServiceBean datasetTypeSvc;
195

196
    /**
197
     * Used to consolidate the way we parse and handle dataset versions.
198
     * @param <T> 
199
     */
200
    public interface DsVersionHandler<T> {
201
        T handleLatest();
202
        T handleDraft();
203
        T handleSpecific( long major, long minor );
204
        T handleLatestPublished();
205
    }
206
    
207
    @GET
208
    @AuthRequired
209
    @Path("{id}")
210
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
211
        return response( req -> {
×
212
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true)));
×
213
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
214
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
215
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
216
            if((latest != null) && latest.isReleased()) {
×
217
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
218
                mdcLogService.logEntry(entry);
×
219
            }
220
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
221
        }, getRequestUser(crc));
×
222
    }
223
    
224
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
225
    // to obtain the dataset that we are trying to export - which would handle
226
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
227
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
228
    @GET
229
    @Path("/export")
230
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" })
231
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
232

233
        try {
234
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
235
            if (dataset == null) {
×
236
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
237
            }
238
            
239
            ExportService instance = ExportService.getInstance();
×
240
            
241
            InputStream is = instance.getExport(dataset, exporter);
×
242
           
243
            String mediaType = instance.getMediaType(exporter);
×
244
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
245
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
246
            mdcLogService.logEntry(entry);
×
247
            
248
            return Response.ok()
×
249
                    .entity(is)
×
250
                    .type(mediaType).
×
251
                    build();
×
252
        } catch (Exception wr) {
×
253
            logger.warning(wr.getMessage());
×
254
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
255
        }
256
    }
257

258
    @DELETE
259
    @AuthRequired
260
    @Path("{id}")
261
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
262
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
263
        // (and there's a comment that says "TODO: remove this command")
264
        // do we need an exposed API call for it? 
265
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
266
        // if the dataset only has 1 version... In other words, the functionality 
267
        // currently provided by this API is covered between the "deleteDraftVersion" and
268
        // "destroyDataset" API calls.  
269
        // (The logic below follows the current implementation of the underlying 
270
        // commands!)
271

272
        User u = getRequestUser(crc);
×
273
        return response( req -> {
×
274
            Dataset doomed = findDatasetOrDie(id);
×
275
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
276
            boolean destroy = false;
×
277
            
278
            if (doomed.getVersions().size() == 1) {
×
279
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
280
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
281
                }
282
                destroy = true;
×
283
            } else {
284
                if (!doomedVersion.isDraft()) {
×
285
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
286
                }
287
            }
288
            
289
            // Gather the locations of the physical files that will need to be 
290
            // deleted once the destroy command execution has been finalized:
291
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
292
            
293
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
294
            
295
            // If we have gotten this far, the destroy command has succeeded, 
296
            // so we can finalize it by permanently deleting the physical files:
297
            // (DataFileService will double-check that the datafiles no 
298
            // longer exist in the database, before attempting to delete 
299
            // the physical files)
300
            if (!deleteStorageLocations.isEmpty()) {
×
301
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
302
            }
303
            
304
            return ok("Dataset " + id + " deleted");
×
305
        }, u);
306
    }
307
        
308
    @DELETE
309
    @AuthRequired
310
    @Path("{id}/destroy")
311
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
312

313
        User u = getRequestUser(crc);
×
314
        return response(req -> {
×
315
            // first check if dataset is released, and if so, if user is a superuser
316
            Dataset doomed = findDatasetOrDie(id);
×
317

318
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
319
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
320
            }
321

322
            // Gather the locations of the physical files that will need to be 
323
            // deleted once the destroy command execution has been finalized:
324
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
325

326
            execCommand(new DestroyDatasetCommand(doomed, req));
×
327

328
            // If we have gotten this far, the destroy command has succeeded, 
329
            // so we can finalize permanently deleting the physical files:
330
            // (DataFileService will double-check that the datafiles no 
331
            // longer exist in the database, before attempting to delete 
332
            // the physical files)
333
            if (!deleteStorageLocations.isEmpty()) {
×
334
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
335
            }
336

337
            return ok("Dataset " + id + " destroyed");
×
338
        }, u);
339
    }
340
    
341
    @DELETE
342
    @AuthRequired
343
    @Path("{id}/versions/{versionId}")
344
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
345
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
346
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
347
        }
348

349
        return response( req -> {
×
350
            Dataset dataset = findDatasetOrDie(id);
×
351
            DatasetVersion doomed = dataset.getLatestVersion();
×
352
            
353
            if (!doomed.isDraft()) {
×
354
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
355
            }
356
            
357
            // Gather the locations of the physical files that will need to be 
358
            // deleted once the destroy command execution has been finalized:
359
            
360
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
361
            
362
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
363
            
364
            // If we have gotten this far, the delete command has succeeded - 
365
            // by either deleting the Draft version of a published dataset, 
366
            // or destroying an unpublished one. 
367
            // This means we can finalize permanently deleting the physical files:
368
            // (DataFileService will double-check that the datafiles no 
369
            // longer exist in the database, before attempting to delete 
370
            // the physical files)
371
            if (!deleteStorageLocations.isEmpty()) {
×
372
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
373
            }
374
            
375
            return ok("Draft version of dataset " + id + " deleted");
×
376
        }, getRequestUser(crc));
×
377
    }
378
        
379
    @DELETE
380
    @AuthRequired
381
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
382
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
383
                boolean index = true;
×
384
        return response(req -> {
×
385
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
386
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
387
        }, getRequestUser(crc));
×
388
    }
389
        
390
    @PUT
391
    @AuthRequired
392
    @Path("{id}/citationdate")
393
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
394
        return response( req -> {
×
395
            if ( dsfTypeName.trim().isEmpty() ){
×
396
                return badRequest("Please provide a dataset field type in the requst body.");
×
397
            }
398
            DatasetFieldType dsfType = null;
×
399
            if (!":publicationDate".equals(dsfTypeName)) {
×
400
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
401
                if (dsfType == null) {
×
402
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
403
                }
404
            }
405

406
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
407
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
408
        }, getRequestUser(crc));
×
409
    }
410
    
411
    @DELETE
412
    @AuthRequired
413
    @Path("{id}/citationdate")
414
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
415
        return response( req -> {
×
416
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
417
            return ok("Citation Date for dataset " + id + " set to default");
×
418
        }, getRequestUser(crc));
×
419
    }
420
    
421
    @GET
422
    @AuthRequired
423
    @Path("{id}/versions")
424
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
425

426
        return response( req -> {
×
427
            Dataset dataset = findDatasetOrDie(id);
×
428
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
429

430
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
431
                                .stream()
×
432
                                .map( d -> json(d, deepLookup) )
×
433
                                .collect(toJsonArray()));
×
434
        }, getRequestUser(crc));
×
435
    }
436
    
437
    @GET
438
    @AuthRequired
439
    @Path("{id}/versions/{versionId}")
440
    public Response getVersion(@Context ContainerRequestContext crc,
441
                               @PathParam("id") String datasetId,
442
                               @PathParam("versionId") String versionId,
443
                               @QueryParam("excludeFiles") Boolean excludeFiles,
444
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
445
                               @QueryParam("returnOwners") boolean returnOwners,
446
                               @Context UriInfo uriInfo,
447
                               @Context HttpHeaders headers) {
448
        return response( req -> {
×
449
            
450
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
451
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
452
            
453
            Dataset dataset = findDatasetOrDie(datasetId);
×
454
            DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, 
×
455
                                                                            versionId, 
456
                                                                            dataset, 
457
                                                                            uriInfo, 
458
                                                                            headers, 
459
                                                                            includeDeaccessioned,
460
                                                                            checkPerms);
461

462
            if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) {
×
463
                return notFound("Dataset version not found");
×
464
            }
465

466
            if (excludeFiles == null ? true : !excludeFiles) {
×
467
                requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId());
×
468
            }
469

470
            JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
×
471
                                                 null, 
472
                                                 excludeFiles == null ? true : !excludeFiles, 
×
473
                                                 returnOwners);
474
            return ok(jsonBuilder);
×
475

476
        }, getRequestUser(crc));
×
477
    }
478

479
    @GET
480
    @AuthRequired
481
    @Path("{id}/versions/{versionId}/files")
482
    public Response getVersionFiles(@Context ContainerRequestContext crc,
483
                                    @PathParam("id") String datasetId,
484
                                    @PathParam("versionId") String versionId,
485
                                    @QueryParam("limit") Integer limit,
486
                                    @QueryParam("offset") Integer offset,
487
                                    @QueryParam("contentType") String contentType,
488
                                    @QueryParam("accessStatus") String accessStatus,
489
                                    @QueryParam("categoryName") String categoryName,
490
                                    @QueryParam("tabularTagName") String tabularTagName,
491
                                    @QueryParam("searchText") String searchText,
492
                                    @QueryParam("orderCriteria") String orderCriteria,
493
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
494
                                    @Context UriInfo uriInfo,
495
                                    @Context HttpHeaders headers) {
496
        return response(req -> {
×
497
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned);
×
498
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
499
            try {
500
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
501
            } catch (IllegalArgumentException e) {
×
502
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
503
            }
×
504
            FileSearchCriteria fileSearchCriteria;
505
            try {
506
                fileSearchCriteria = new FileSearchCriteria(
×
507
                        contentType,
508
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
509
                        categoryName,
510
                        tabularTagName,
511
                        searchText
512
                );
513
            } catch (IllegalArgumentException e) {
×
514
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
515
            }
×
516
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
517
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
518
        }, getRequestUser(crc));
×
519
    }
520

521
    @GET
522
    @AuthRequired
523
    @Path("{id}/versions/{versionId}/files/counts")
524
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
525
                                         @PathParam("id") String datasetId,
526
                                         @PathParam("versionId") String versionId,
527
                                         @QueryParam("contentType") String contentType,
528
                                         @QueryParam("accessStatus") String accessStatus,
529
                                         @QueryParam("categoryName") String categoryName,
530
                                         @QueryParam("tabularTagName") String tabularTagName,
531
                                         @QueryParam("searchText") String searchText,
532
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
533
                                         @Context UriInfo uriInfo,
534
                                         @Context HttpHeaders headers) {
535
        return response(req -> {
×
536
            FileSearchCriteria fileSearchCriteria;
537
            try {
538
                fileSearchCriteria = new FileSearchCriteria(
×
539
                        contentType,
540
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
541
                        categoryName,
542
                        tabularTagName,
543
                        searchText
544
                );
545
            } catch (IllegalArgumentException e) {
×
546
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
547
            }
×
548
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
549
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
550
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
551
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
552
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
553
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
554
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
555
            return ok(jsonObjectBuilder);
×
556
        }, getRequestUser(crc));
×
557
    }
558

559
    @GET
560
    @AuthRequired
561
    @Path("{id}/dirindex")
562
    @Produces("text/html")
563
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
564

565
        folderName = folderName == null ? "" : folderName;
×
566
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
567
        
568
        DatasetVersion version;
569
        try {
570
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
571
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
572
        } catch (WrappedResponse wr) {
×
573
            return wr.getResponse();
×
574
        }
×
575
        
576
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
577
        
578
        // return "NOT FOUND" if there is no such folder in the dataset version:
579
        
580
        if ("".equals(output)) {
×
581
            return notFound("Folder " + folderName + " does not exist");
×
582
        }
583
        
584
        
585
        String indexFileName = folderName.equals("") ? ".index.html"
×
586
                : ".index-" + folderName.replace('/', '_') + ".html";
×
587
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
588

589
        
590
        return Response.ok()
×
591
                .entity(output)
×
592
                //.type("application/html").
593
                .build();
×
594
    }
595
    
596
    @GET
597
    @AuthRequired
598
    @Path("{id}/versions/{versionId}/metadata")
599
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
600
        return response( req -> ok(
×
601
                    jsonByBlocks(
×
602
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
603
                                .getDatasetFields())), getRequestUser(crc));
×
604
    }
605
    
606
    @GET
607
    @AuthRequired
608
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
609
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
610
                                            @PathParam("id") String datasetId,
611
                                            @PathParam("versionNumber") String versionNumber,
612
                                            @PathParam("block") String blockName,
613
                                            @Context UriInfo uriInfo,
614
                                            @Context HttpHeaders headers) {
615
        
616
        return response( req -> {
×
617
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
618
            
619
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
620
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
621
                if ( p.getKey().getName().equals(blockName) ) {
×
622
                    return ok(json(p.getKey(), p.getValue()));
×
623
                }
624
            }
×
625
            return notFound("metadata block named " + blockName + " not found");
×
626
        }, getRequestUser(crc));
×
627
    }
628

629
    /**
630
     * Add Signposting
631
     * @param datasetId
632
     * @param versionId
633
     * @param uriInfo
634
     * @param headers
635
     * @return
636
     */
637
    @GET
638
    @AuthRequired
639
    @Path("{id}/versions/{versionId}/linkset")
640
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
641
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
642
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
643
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
644
        }
645
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
646
        try {
647
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
648
            return Response
×
649
                    .ok(Json.createObjectBuilder()
×
650
                            .add("linkset",
×
651
                                    new SignpostingResources(systemConfig, dsv,
652
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
653
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
654
                                                    .getJsonLinkset())
×
655
                            .build())
×
656
                    .type(MediaType.APPLICATION_JSON).build();
×
657
        } catch (WrappedResponse wr) {
×
658
            return wr.getResponse();
×
659
        }
660
    }
661

662
    @POST
663
    @AuthRequired
664
    @Path("{id}/modifyRegistration")
665
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
666
        return response( req -> {
×
667
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
668
            return ok("Dataset " + id + " target url updated");
×
669
        }, getRequestUser(crc));
×
670
    }
671
    
672
    @POST
673
    @AuthRequired
674
    @Path("/modifyRegistrationAll")
675
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
676
        return response( req -> {
×
677
            datasetService.findAll().forEach( ds -> {
×
678
                try {
679
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
680
                } catch (WrappedResponse ex) {
×
681
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
682
                }
×
683
            });
×
684
            return ok("Update All Dataset target url completed");
×
685
        }, getRequestUser(crc));
×
686
    }
687
    
688
    @POST
689
    @AuthRequired
690
    @Path("{id}/modifyRegistrationMetadata")
691
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
692

693
        try {
694
            Dataset dataset = findDatasetOrDie(id);
×
695
            if (!dataset.isReleased()) {
×
696
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
697
            }
698
        } catch (WrappedResponse ex) {
×
699
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
700
        }
×
701

702
        return response(req -> {
×
703
            Dataset dataset = findDatasetOrDie(id);
×
704
            execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req));
×
705
            List<String> args = Arrays.asList(dataset.getIdentifier());
×
706
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
707
        }, getRequestUser(crc));
×
708
    }
709
    
710
    @POST
711
    @AuthRequired
712
    @Path("/modifyRegistrationPIDMetadataAll")
713
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
714
        return response( req -> {
×
715
            datasetService.findAll().forEach( ds -> {
×
716
                try {
717
                    logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier());
×
718
                    if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) {
×
719
                        if (ds.isReleased()) {
×
720
                            logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered");
×
721
                        }
722
                    } else {
723
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
724
                    }
725
                } catch (WrappedResponse ex) {
×
726
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
727
                }
×
728
            });
×
729
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
730
        }, getRequestUser(crc));
×
731
    }
732
  
733
    @PUT
734
    @AuthRequired
735
    @Path("{id}/versions/{versionId}")
736
    @Consumes(MediaType.APPLICATION_JSON)
737
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
738
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
739
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
740
        }
741
        
742
        try {
743
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
744
            Dataset ds = findDatasetOrDie(id);
×
745
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
746
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
747
            
748
            // clear possibly stale fields from the incoming dataset version.
749
            // creation and modification dates are updated by the commands.
750
            incomingVersion.setId(null);
×
751
            incomingVersion.setVersionNumber(null);
×
752
            incomingVersion.setMinorVersionNumber(null);
×
753
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
754
            incomingVersion.setDataset(ds);
×
755
            incomingVersion.setCreateTime(null);
×
756
            incomingVersion.setLastUpdateTime(null);
×
757
            
758
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
759
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
760
            }
761
            
762
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
763
            
764
            DatasetVersion managedVersion;
765
            if (updateDraft) {
×
766
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
767
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
768
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
769
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
770
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
771
                if (!hasValidTerms) {
×
772
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
773
                }
774
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
775
                managedVersion = managedDataset.getOrCreateEditVersion();
×
776
            } else {
×
777
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
778
                if (!hasValidTerms) {
×
779
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
780
                }
781
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
782
            }
783
            return ok( json(managedVersion, true) );
×
784
                    
785
        } catch (JsonParseException ex) {
×
786
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
787
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
788
            
789
        } catch (WrappedResponse ex) {
×
790
            return ex.getResponse();
×
791
            
792
        }
793
    }
794

795
    @GET
796
    @AuthRequired
797
    @Path("{id}/versions/{versionId}/metadata")
798
    @Produces("application/ld+json, application/json-ld")
799
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
800
        try {
801
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
802
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
803
            OREMap ore = new OREMap(dsv,
×
804
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
805
            return ok(ore.getOREMapBuilder(true));
×
806

807
        } catch (WrappedResponse ex) {
×
808
            ex.printStackTrace();
×
809
            return ex.getResponse();
×
810
        } catch (Exception jpe) {
×
811
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
812
            jpe.printStackTrace();
×
813
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
814
        }
815
    }
816

817
    @GET
818
    @AuthRequired
819
    @Path("{id}/metadata")
820
    @Produces("application/ld+json, application/json-ld")
821
    public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
822
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
823
    }
824

825
    @PUT
826
    @AuthRequired
827
    @Path("{id}/metadata")
828
    @Consumes("application/ld+json, application/json-ld")
829
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
830

831
        try {
832
            Dataset ds = findDatasetOrDie(id);
×
833
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
834
            //Get draft state as of now
835

836
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
837
            //Get the current draft or create a new version to update
838
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
839
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
840
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
841
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
842
            if (!hasValidTerms) {
×
843
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
844
            }
845
            DatasetVersion managedVersion;
846
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
847
            managedVersion = managedDataset.getLatestVersion();
×
848
            String info = updateDraft ? "Version Updated" : "Version Created";
×
849
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
850

851
        } catch (WrappedResponse ex) {
×
852
            return ex.getResponse();
×
853
        } catch (JsonParsingException jpe) {
×
854
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
855
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
856
        }
857
    }
858

859
    @PUT
860
    @AuthRequired
861
    @Path("{id}/metadata/delete")
862
    @Consumes("application/ld+json, application/json-ld")
863
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
864
        try {
865
            Dataset ds = findDatasetOrDie(id);
×
866
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
867
            //Get draft state as of now
868

869
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
870
            //Get the current draft or create a new version to update
871
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
872
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
873
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
874
            DatasetVersion managedVersion;
875
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
876
            managedVersion = managedDataset.getLatestVersion();
×
877
            String info = updateDraft ? "Version Updated" : "Version Created";
×
878
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
879

880
        } catch (WrappedResponse ex) {
×
881
            ex.printStackTrace();
×
882
            return ex.getResponse();
×
883
        } catch (JsonParsingException jpe) {
×
884
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
885
            jpe.printStackTrace();
×
886
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
887
        }
888
    }
889

890
    @PUT
891
    @AuthRequired
892
    @Path("{id}/deleteMetadata")
893
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
894

895
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
896

897
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
898
    }
899

900
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
901
        try {
902

903
            Dataset ds = findDatasetOrDie(id);
×
904
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
905
            //Get the current draft or create a new version to update
906
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
907
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
908
            List<DatasetField> fields = new LinkedList<>();
×
909
            DatasetField singleField = null;
×
910

911
            JsonArray fieldsJson = json.getJsonArray("fields");
×
912
            if (fieldsJson == null) {
×
913
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
914
                fields.add(singleField);
×
915
            } else {
916
                fields = jsonParser().parseMultipleFields(json);
×
917
            }
918

919
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
920

921
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
922
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
923
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
924

925
            for (DatasetField updateField : fields) {
×
926
                boolean found = false;
×
927
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
928
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
929
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
930
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
931
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
932
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
933
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
934
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
935
                                                found = true;
×
936
                                                controlledVocabularyItemsToRemove.add(existing);
×
937
                                            }
938
                                        }
×
939
                                        if (!found) {
×
940
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
941
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
942
                                        }
943
                                    }
×
944
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
945
                                        dsf.getControlledVocabularyValues().remove(remove);
×
946
                                    }
×
947

948
                                } else {
949
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
950
                                        found = true;
×
951
                                        dsf.setSingleControlledVocabularyValue(null);
×
952
                                    }
953

954
                                }
955
                            } else {
956
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
957
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
958
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
959
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
960
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
961
                                                    found = true;
×
962
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
963
                                                }
964
                                            }
×
965
                                            if (!found) {
×
966
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
967
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
968
                                            }
969
                                        }
×
970
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
971
                                            dsf.getDatasetFieldValues().remove(remove);
×
972
                                        });
×
973

974
                                    } else {
975
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
976
                                            found = true;
×
977
                                            dsf.setSingleValue(null);
×
978
                                        }
979

980
                                    }
981
                                } else {
982
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
983
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
984
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
985
                                            String existingString = getCompoundDisplayValue(existing);
×
986
                                            if (existingString.equals(deleteVal)) {
×
987
                                                found = true;
×
988
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
989
                                            }
990
                                        }
×
991
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
992
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
993
                                        });
×
994
                                        if (!found) {
×
995
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
996
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
997
                                        }
998
                                    }
×
999
                                }
1000
                            }
1001
                        } else {
1002
                            found = true;
×
1003
                            dsf.setSingleValue(null);
×
1004
                            dsf.setSingleControlledVocabularyValue(null);
×
1005
                        }
1006
                        break;
×
1007
                    }
1008
                }
×
1009
                if (!found){
×
1010
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
1011
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1012
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1013
                }
1014
            }
×
1015

1016

1017
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1018
            return ok(json(managedVersion, true));
×
1019

1020
        } catch (JsonParseException ex) {
×
1021
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1022
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1023

1024
        } catch (WrappedResponse ex) {
×
1025
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
1026
            return ex.getResponse();
×
1027

1028
        }
1029
    
1030
    }
1031
    
1032
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
1033
        String returnString = "";
×
1034
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
1035
            for (String value : dsf.getValues()) {
×
1036
                if (!(value == null)) {
×
1037
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1038
                }
1039
            }
×
1040
        }
×
1041
        return returnString;
×
1042
    }
1043
    
1044
    @PUT
1045
    @AuthRequired
1046
    @Path("{id}/editMetadata")
1047
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1048

1049
        Boolean replaceData = replace != null;
×
1050
        DataverseRequest req = null;
×
1051
        req = createDataverseRequest(getRequestUser(crc));
×
1052

1053
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1054
    }
1055
    
1056
    
1057
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1058
        try {
1059
           
1060
            Dataset ds = findDatasetOrDie(id);
×
1061
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1062
            //Get the current draft or create a new version to update
1063
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1064
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1065
            List<DatasetField> fields = new LinkedList<>();
×
1066
            DatasetField singleField = null;
×
1067
            
1068
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1069
            if (fieldsJson == null) {
×
1070
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
1071
                fields.add(singleField);
×
1072
            } else {
1073
                fields = jsonParser().parseMultipleFields(json);
×
1074
            }
1075
            
1076

1077
            String valdationErrors = validateDatasetFieldValues(fields);
×
1078

1079
            if (!valdationErrors.isEmpty()) {
×
1080
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1081
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1082
            }
1083

1084
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1085

1086
            //loop through the update fields     
1087
            // and compare to the version fields  
1088
            //if exist add/replace values
1089
            //if not add entire dsf
1090
            for (DatasetField updateField : fields) {
×
1091
                boolean found = false;
×
1092
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1093
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1094
                        found = true;
×
1095
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1096
                            List priorCVV = new ArrayList<>();
×
1097
                            String cvvDisplay = "";
×
1098

1099
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1100
                                cvvDisplay = dsf.getDisplayValue();
×
1101
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1102
                                    priorCVV.add(cvvOld);
×
1103
                                }
×
1104
                            }
1105

1106
                            if (replaceData) {
×
1107
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1108
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1109
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1110
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1111
                                    priorCVV.clear();
×
1112
                                    dsf.getControlledVocabularyValues().clear();
×
1113
                                } else {
1114
                                    dsf.setSingleValue("");
×
1115
                                    dsf.setSingleControlledVocabularyValue(null);
×
1116
                                }
1117
                              cvvDisplay="";
×
1118
                            }
1119
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1120
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1121
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1122
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
1123
                                            priorCVV.add(cvv);
×
1124
                                        }
1125
                                    }
×
1126
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1127
                                } else {
1128
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1129
                                }
1130
                            } else {
1131
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1132
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1133
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1134
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1135
                                                dfv.setDatasetField(dsf);
×
1136
                                                dsf.getDatasetFieldValues().add(dfv);
×
1137
                                            }
1138
                                        }
×
1139
                                    } else {
1140
                                        dsf.setSingleValue(updateField.getValue());
×
1141
                                    }
1142
                                } else {
1143
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1144
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1145
                                            dfcv.setParentDatasetField(dsf);
×
1146
                                            dsf.setDatasetVersion(dsv);
×
1147
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1148
                                        }
1149
                                    }
×
1150
                                }
1151
                            }
1152
                        } else {
×
1153
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
1154
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1155
                            }
1156
                        }
1157
                        break;
1158
                    }
1159
                }
×
1160
                if (!found) {
×
1161
                    updateField.setDatasetVersion(dsv);
×
1162
                    dsv.getDatasetFields().add(updateField);
×
1163
                }
1164
            }
×
1165
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1166

1167
            return ok(json(managedVersion, true));
×
1168

1169
        } catch (JsonParseException ex) {
×
1170
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1171
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1172

1173
        } catch (WrappedResponse ex) {
×
1174
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1175
            return ex.getResponse();
×
1176

1177
        }
1178
    }
1179
    
1180
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1181
        StringBuilder error = new StringBuilder();
×
1182

1183
        for (DatasetField dsf : fields) {
×
1184
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1185
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1186
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1187
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) {
×
1188
                error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1189
            }
1190
        }
×
1191

1192
        if (!error.toString().isEmpty()) {
×
1193
            return (error.toString());
×
1194
        }
1195
        return "";
×
1196
    }
1197
    
1198
    /**
1199
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1200
     */
1201
    @GET
1202
    @AuthRequired
1203
    @Path("{id}/actions/:publish")
1204
    @Deprecated
1205
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1206
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
1207
        return publishDataset(crc, id, type, false);
×
1208
    }
1209

1210
    @POST
1211
    @AuthRequired
1212
    @Path("{id}/actions/:publish")
1213
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1214
        try {
1215
            if (type == null) {
×
1216
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1217
            }
1218
            boolean updateCurrent=false;
×
1219
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1220
            type = type.toLowerCase();
×
1221
            boolean isMinor=false;
×
1222
            switch (type) {
×
1223
                case "minor":
1224
                    isMinor = true;
×
1225
                    break;
×
1226
                case "major":
1227
                    isMinor = false;
×
1228
                    break;
×
1229
                case "updatecurrent":
1230
                    if (user.isSuperuser()) {
×
1231
                        updateCurrent = true;
×
1232
                    } else {
1233
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1234
                    }
1235
                    break;
1236
                default:
1237
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1238
            }
1239

1240
            Dataset ds = findDatasetOrDie(id);
×
1241
            
1242
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1243
            if (!hasValidTerms) {
×
1244
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1245
            }
1246
            
1247
            if (mustBeIndexed) {
×
1248
                logger.fine("IT: " + ds.getIndexTime());
×
1249
                logger.fine("MT: " + ds.getModificationTime());
×
1250
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1251
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1252
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
1253
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1254
                }
1255
                /*
1256
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1257
                 * modification or permission modification times. The checks here are trying to
1258
                 * see if indexing or permissionindexing could be pending, so they check to see
1259
                 * if the relevant modification time is set and if so, whether the index is also
1260
                 * set and if so, if it after the modification time. If the modification time is
1261
                 * set and the index time is null or is before the mod time, the 409/conflict
1262
                 * error is returned.
1263
                 *
1264
                 */
1265
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1266
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
1267
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1268
                }
1269
            }
1270
            if (updateCurrent) {
×
1271
                /*
1272
                 * Note: The code here mirrors that in the
1273
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1274
                 * to the core logic (i.e. beyond updating the messaging about results) should
1275
                 * be applied to the code there as well.
1276
                 */
1277
                String errorMsg = null;
×
1278
                String successMsg = null;
×
1279
                try {
1280
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1281
                    ds = commandEngine.submit(cmd);
×
1282
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1283

1284
                    // If configured, update archive copy as well
1285
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1286
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1287
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
1288
                    if (archiveCommand != null) {
×
1289
                        // Delete the record of any existing copy since it is now out of date/incorrect
1290
                        updateVersion.setArchivalCopyLocation(null);
×
1291
                        /*
1292
                         * Then try to generate and submit an archival copy. Note that running this
1293
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1294
                         * "The attribute [id] of class
1295
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1296
                         * key column in the database. Updates are not allowed." To avoid that, and to
1297
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1298
                         * pulled this out as a separate submit().
1299
                         */
1300
                        try {
1301
                            updateVersion = commandEngine.submit(archiveCommand);
×
1302
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
1303
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1304
                            } else {
1305
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1306
                            }
1307
                        } catch (CommandException ex) {
×
1308
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1309
                            logger.severe(ex.getMessage());
×
1310
                        }
×
1311
                    }
1312
                } catch (CommandException ex) {
×
1313
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1314
                    logger.severe(ex.getMessage());
×
1315
                }
×
1316
                if (errorMsg != null) {
×
1317
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1318
                } else {
1319
                    return Response.ok(Json.createObjectBuilder()
×
1320
                            .add("status", ApiConstants.STATUS_OK)
×
1321
                            .add("status_details", successMsg)
×
1322
                            .add("data", json(ds)).build())
×
1323
                            .type(MediaType.APPLICATION_JSON)
×
1324
                            .build();
×
1325
                }
1326
            } else {
1327
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
1328
                        createDataverseRequest(user),
×
1329
                        isMinor));
1330
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1331
            }
1332
        } catch (WrappedResponse ex) {
×
1333
            return ex.getResponse();
×
1334
        }
1335
    }
1336

1337
    @POST
1338
    @AuthRequired
1339
    @Path("{id}/actions/:releasemigrated")
1340
    @Consumes("application/ld+json, application/json-ld")
1341
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1342
        try {
1343
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1344
            if (!user.isSuperuser()) {
×
1345
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1346
            }
1347

1348
            Dataset ds = findDatasetOrDie(id);
×
1349
            try {
1350
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1351
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1352
                logger.fine("Submitted date: " + pubDate);
×
1353
                LocalDateTime dateTime = null;
×
1354
                if(!StringUtils.isEmpty(pubDate)) {
×
1355
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
1356
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1357
                    //Set version release date
1358
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1359
                }
1360
                // dataset.getPublicationDateFormattedYYYYMMDD())
1361
                // Assign a version number if not set
1362
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1363

1364
                    if (ds.getVersions().size() == 1) {
×
1365
                        // First Release
1366
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1367
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1368
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1369
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
1370
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1371
                    } else {
1372
                        // major, non-first release
1373
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
1374
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1375
                    }
1376
                }
1377
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1378
                    //Also set publication date if this is the first
1379
                    if(dateTime != null) {
×
1380
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1381
                    }
1382
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1383
                    // is null, so set it here.
1384
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1385
                }
1386
            } catch (Exception e) {
×
1387
                logger.fine(e.getMessage());
×
1388
                throw new BadRequestException("Unable to set publication date ("
×
1389
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
1390
            }
×
1391
            /*
1392
             * Note: The code here mirrors that in the
1393
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1394
             * to the core logic (i.e. beyond updating the messaging about results) should
1395
             * be applied to the code there as well.
1396
             */
1397
            String errorMsg = null;
×
1398
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1399

1400
            try {
1401
                // ToDo - should this be in onSuccess()? May relate to todo above
1402
                if (prePubWf.isPresent()) {
×
1403
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1404
                    wfService.start(prePubWf.get(),
×
1405
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1406
                            false);
1407
                } else {
1408
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1409
                            createDataverseRequest(user), !contactPIDProvider);
×
1410
                    ds = commandEngine.submit(cmd);
×
1411
                }
1412
            } catch (CommandException ex) {
×
1413
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1414
                logger.severe(ex.getMessage());
×
1415
            }
×
1416

1417
            if (errorMsg != null) {
×
1418
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1419
            } else {
1420
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1421
            }
1422

1423
        } catch (WrappedResponse ex) {
×
1424
            return ex.getResponse();
×
1425
        }
1426
    }
1427

1428
    @POST
1429
    @AuthRequired
1430
    @Path("{id}/move/{targetDataverseAlias}")
1431
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1432
        try {
1433
            User u = getRequestUser(crc);
×
1434
            Dataset ds = findDatasetOrDie(id);
×
1435
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1436
            if (target == null) {
×
1437
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1438
            }
1439
            //Command requires Super user - it will be tested by the command
1440
            execCommand(new MoveDatasetCommand(
×
1441
                    createDataverseRequest(u), ds, target, force
×
1442
            ));
1443
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1444
        } catch (WrappedResponse ex) {
×
1445
            if (ex.getCause() instanceof UnforcedCommandException) {
×
1446
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1447
            } else {
1448
                return ex.getResponse();
×
1449
            }
1450
        }
1451
    }
1452

1453
    @POST
1454
    @AuthRequired
1455
    @Path("{id}/files/actions/:set-embargo")
1456
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1457

1458
        // user is authenticated
1459
        AuthenticatedUser authenticatedUser = null;
×
1460
        try {
1461
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1462
        } catch (WrappedResponse ex) {
×
1463
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1464
        }
×
1465

1466
        Dataset dataset;
1467
        try {
1468
            dataset = findDatasetOrDie(id);
×
1469
        } catch (WrappedResponse ex) {
×
1470
            return ex.getResponse();
×
1471
        }
×
1472
        
1473
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1474
        
1475
        if (!hasValidTerms){
×
1476
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1477
        }
1478

1479
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1480
        /*
1481
         * This is only a pre-test - if there's no draft version, there are clearly no
1482
         * files that a normal user can change. The converse is not true. A draft
1483
         * version could contain only files that have already been released. Further, we
1484
         * haven't checked the file list yet so the user could still be trying to change
1485
         * released files even if there are some unreleased/draft-only files. Doing this
1486
         * check here does avoid having to do further parsing for some error cases. It
1487
         * also checks the user can edit this dataset, so we don't have to make that
1488
         * check later.
1489
         */
1490

1491
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1492
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1493
        }
1494

1495
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1496
        long maxEmbargoDurationInMonths = 0;
×
1497
        try {
1498
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1499
        } catch (NumberFormatException nfe){
×
1500
            if (nfe.getMessage().contains("null")) {
×
1501
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1502
            }
1503
        }
×
1504
        if (maxEmbargoDurationInMonths == 0){
×
1505
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1506
        }
1507

1508
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1509

1510
        Embargo embargo = new Embargo();
×
1511

1512

1513
        LocalDate currentDateTime = LocalDate.now();
×
1514
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1515

1516
        // check :MaxEmbargoDurationInMonths if -1
1517
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1518
        // dateAvailable is not in the past
1519
        if (dateAvailable.isAfter(currentDateTime)){
×
1520
            embargo.setDateAvailable(dateAvailable);
×
1521
        } else {
1522
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1523
        }
1524

1525
        // dateAvailable is within limits
1526
        if (maxEmbargoDateTime != null){
×
1527
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
1528
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1529
            }
1530
        }
1531

1532
        embargo.setReason(json.getString("reason"));
×
1533

1534
        List<DataFile> datasetFiles = dataset.getFiles();
×
1535
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1536

1537
        // extract fileIds from json, find datafiles and add to list
1538
        if (json.containsKey("fileIds")){
×
1539
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1540
            for (JsonValue jsv : fileIds) {
×
1541
                try {
1542
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1543
                    filesToEmbargo.add(dataFile);
×
1544
                } catch (WrappedResponse ex) {
×
1545
                    return ex.getResponse();
×
1546
                }
×
1547
            }
×
1548
        }
1549

1550
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1551
        // check if files belong to dataset
1552
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1553
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1554
            boolean badFiles = false;
×
1555
            for (DataFile datafile : filesToEmbargo) {
×
1556
                // superuser can overrule an existing embargo, even on released files
1557
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1558
                    restrictedFiles.add(datafile.getId());
×
1559
                    badFiles = true;
×
1560
                }
1561
            }
×
1562
            if (badFiles) {
×
1563
                return Response.status(Status.FORBIDDEN)
×
1564
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1565
                                .add("message", "You do not have permission to embargo the following files")
×
1566
                                .add("files", restrictedFiles).build())
×
1567
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1568
            }
1569
            embargo=embargoService.merge(embargo);
×
1570
            // Good request, so add the embargo. Track any existing embargoes so we can
1571
            // delete them if there are no files left that reference them.
1572
            for (DataFile datafile : filesToEmbargo) {
×
1573
                Embargo emb = datafile.getEmbargo();
×
1574
                if (emb != null) {
×
1575
                    emb.getDataFiles().remove(datafile);
×
1576
                    if (emb.getDataFiles().isEmpty()) {
×
1577
                        orphanedEmbargoes.add(emb);
×
1578
                    }
1579
                }
1580
                // Save merges the datafile with an embargo into the context
1581
                datafile.setEmbargo(embargo);
×
1582
                fileService.save(datafile);
×
1583
            }
×
1584
            //Call service to get action logged
1585
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1586
            if (orphanedEmbargoes.size() > 0) {
×
1587
                for (Embargo emb : orphanedEmbargoes) {
×
1588
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1589
                }
×
1590
            }
1591
            //If superuser, report changes to any released files
1592
            if (authenticatedUser.isSuperuser()) {
×
1593
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1594
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1595
                if (!releasedFiles.isBlank()) {
×
1596
                    actionLogSvc
×
1597
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
1598
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1599
                                            + releasedFiles + ".")
1600
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1601
                }
1602
            }
1603
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1604
        } else {
1605
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1606
        }
1607
    }
1608

1609
    @POST
1610
    @AuthRequired
1611
    @Path("{id}/files/actions/:unset-embargo")
1612
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1613

1614
        // user is authenticated
1615
        AuthenticatedUser authenticatedUser = null;
×
1616
        try {
1617
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1618
        } catch (WrappedResponse ex) {
×
1619
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1620
        }
×
1621

1622
        Dataset dataset;
1623
        try {
1624
            dataset = findDatasetOrDie(id);
×
1625
        } catch (WrappedResponse ex) {
×
1626
            return ex.getResponse();
×
1627
        }
×
1628

1629
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1630
        // check if files are unreleased(DRAFT?)
1631
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1632
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1633
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1634
        }
1635

1636
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1637
        //Todo - is 400 right for embargoes not enabled
1638
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
1639
        int maxEmbargoDurationInMonths = 0;
×
1640
        try {
1641
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1642
        } catch (NumberFormatException nfe){
×
1643
            if (nfe.getMessage().contains("null")) {
×
1644
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1645
            }
1646
        }
×
1647
        if (maxEmbargoDurationInMonths == 0){
×
1648
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1649
        }
1650

1651
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1652

1653
        List<DataFile> datasetFiles = dataset.getFiles();
×
1654
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1655

1656
        // extract fileIds from json, find datafiles and add to list
1657
        if (json.containsKey("fileIds")){
×
1658
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1659
            for (JsonValue jsv : fileIds) {
×
1660
                try {
1661
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1662
                    embargoFilesToUnset.add(dataFile);
×
1663
                } catch (WrappedResponse ex) {
×
1664
                    return ex.getResponse();
×
1665
                }
×
1666
            }
×
1667
        }
1668

1669
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1670
        // check if files belong to dataset
1671
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1672
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1673
            boolean badFiles = false;
×
1674
            for (DataFile datafile : embargoFilesToUnset) {
×
1675
                // superuser can overrule an existing embargo, even on released files
1676
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1677
                    restrictedFiles.add(datafile.getId());
×
1678
                    badFiles = true;
×
1679
                }
1680
            }
×
1681
            if (badFiles) {
×
1682
                return Response.status(Status.FORBIDDEN)
×
1683
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1684
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1685
                                .add("files", restrictedFiles).build())
×
1686
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1687
            }
1688
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1689
            // delete them if there are no files left that reference them.
1690
            for (DataFile datafile : embargoFilesToUnset) {
×
1691
                Embargo emb = datafile.getEmbargo();
×
1692
                if (emb != null) {
×
1693
                    emb.getDataFiles().remove(datafile);
×
1694
                    if (emb.getDataFiles().isEmpty()) {
×
1695
                        orphanedEmbargoes.add(emb);
×
1696
                    }
1697
                }
1698
                // Save merges the datafile with an embargo into the context
1699
                datafile.setEmbargo(null);
×
1700
                fileService.save(datafile);
×
1701
            }
×
1702
            if (orphanedEmbargoes.size() > 0) {
×
1703
                for (Embargo emb : orphanedEmbargoes) {
×
1704
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1705
                }
×
1706
            }
1707
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1708
            if(!releasedFiles.isBlank()) {
×
1709
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1710
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
1711
                actionLogSvc.log(removeRecord);
×
1712
            }
1713
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1714
        } else {
1715
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1716
        }
1717
    }
1718

1719
    @POST
1720
    @AuthRequired
1721
    @Path("{id}/files/actions/:set-retention")
1722
    public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1723

1724
        // user is authenticated
1725
        AuthenticatedUser authenticatedUser = null;
×
1726
        try {
1727
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1728
        } catch (WrappedResponse ex) {
×
1729
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1730
        }
×
1731

1732
        Dataset dataset;
1733
        try {
1734
            dataset = findDatasetOrDie(id);
×
1735
        } catch (WrappedResponse ex) {
×
1736
            return ex.getResponse();
×
1737
        }
×
1738

1739
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1740

1741
        if (!hasValidTerms){
×
1742
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1743
        }
1744

1745
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1746
        // check if files are unreleased(DRAFT?)
1747
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1748
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1749
        }
1750

1751
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1752
        long minRetentionDurationInMonths = 0;
×
1753
        try {
1754
            minRetentionDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1755
        } catch (NumberFormatException nfe){
×
1756
            if (nfe.getMessage().contains("null")) {
×
1757
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1758
            }
1759
        }
×
1760
        if (minRetentionDurationInMonths == 0){
×
1761
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1762
        }
1763

1764
        JsonObject json;
1765
        try {
1766
            json = JsonUtil.getJsonObject(jsonBody);
×
1767
        } catch (JsonException ex) {
×
1768
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1769
        }
×
1770

1771
        Retention retention = new Retention();
×
1772

1773

1774
        LocalDate currentDateTime = LocalDate.now();
×
1775

1776
        // Extract the dateUnavailable - check if specified and valid
1777
        String dateUnavailableStr = "";
×
1778
        LocalDate dateUnavailable;
1779
        try {
1780
            dateUnavailableStr = json.getString("dateUnavailable");
×
1781
            dateUnavailable = LocalDate.parse(dateUnavailableStr);
×
1782
        } catch (NullPointerException npex) {
×
1783
            return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified");
×
1784
        } catch (ClassCastException ccex) {
×
1785
            return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string");
×
1786
        } catch (DateTimeParseException dtpex) {
×
1787
            return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr);
×
1788
        }
×
1789

1790
        // check :MinRetentionDurationInMonths if -1
1791
        LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null;
×
1792
        // dateUnavailable is not in the past
1793
        if (dateUnavailable.isAfter(currentDateTime)){
×
1794
            retention.setDateUnavailable(dateUnavailable);
×
1795
        } else {
1796
            return error(Status.BAD_REQUEST, "Date unavailable can not be in the past");
×
1797
        }
1798

1799
        // dateAvailable is within limits
1800
        if (minRetentionDateTime != null){
×
1801
            if (dateUnavailable.isBefore(minRetentionDateTime)){
×
1802
                return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
×
1803
            }
1804
        }
1805
        
1806
        try {
1807
            String reason = json.getString("reason");
×
1808
            retention.setReason(reason);
×
1809
        } catch (NullPointerException npex) {
×
1810
            // ignoring; no reason specified is OK, it is optional
1811
        } catch (ClassCastException ccex) {
×
1812
            return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string");
×
1813
        }
×
1814

1815

1816
        List<DataFile> datasetFiles = dataset.getFiles();
×
1817
        List<DataFile> filesToRetention = new LinkedList<>();
×
1818

1819
        // extract fileIds from json, find datafiles and add to list
1820
        if (json.containsKey("fileIds")){
×
1821
            try {
1822
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1823
                for (JsonValue jsv : fileIds) {
×
1824
                    try {
1825
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1826
                        filesToRetention.add(dataFile);
×
1827
                    } catch (WrappedResponse ex) {
×
1828
                        return ex.getResponse();
×
1829
                    }
×
1830
                }
×
1831
            } catch (ClassCastException ccex) {
×
1832
                return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings");
×
1833
            } catch (NullPointerException npex) {
×
1834
                return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified");
×
1835
            }
×
1836
        } else {
1837
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1838
        }
1839

1840
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1841
        // check if files belong to dataset
1842
        if (datasetFiles.containsAll(filesToRetention)) {
×
1843
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1844
            boolean badFiles = false;
×
1845
            for (DataFile datafile : filesToRetention) {
×
1846
                // superuser can overrule an existing retention, even on released files
1847
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1848
                    restrictedFiles.add(datafile.getId());
×
1849
                    badFiles = true;
×
1850
                }
1851
            }
×
1852
            if (badFiles) {
×
1853
                return Response.status(Status.FORBIDDEN)
×
1854
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1855
                                .add("message", "You do not have permission to set a retention period for the following files")
×
1856
                                .add("files", restrictedFiles).build())
×
1857
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1858
            }
1859
            retention=retentionService.merge(retention);
×
1860
            // Good request, so add the retention. Track any existing retentions so we can
1861
            // delete them if there are no files left that reference them.
1862
            for (DataFile datafile : filesToRetention) {
×
1863
                Retention ret = datafile.getRetention();
×
1864
                if (ret != null) {
×
1865
                    ret.getDataFiles().remove(datafile);
×
1866
                    if (ret.getDataFiles().isEmpty()) {
×
1867
                        orphanedRetentions.add(ret);
×
1868
                    }
1869
                }
1870
                // Save merges the datafile with an retention into the context
1871
                datafile.setRetention(retention);
×
1872
                fileService.save(datafile);
×
1873
            }
×
1874
            //Call service to get action logged
1875
            long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier());
×
1876
            if (orphanedRetentions.size() > 0) {
×
1877
                for (Retention ret : orphanedRetentions) {
×
1878
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
1879
                }
×
1880
            }
1881
            //If superuser, report changes to any released files
1882
            if (authenticatedUser.isSuperuser()) {
×
1883
                String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased())
×
1884
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1885
                if (!releasedFiles.isBlank()) {
×
1886
                    actionLogSvc
×
1887
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo")
×
1888
                                    .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) "
×
1889
                                            + releasedFiles + ".")
1890
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1891
                }
1892
            }
1893
            return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated"));
×
1894
        } else {
1895
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1896
        }
1897
    }
1898

1899
    @POST
1900
    @AuthRequired
1901
    @Path("{id}/files/actions/:unset-retention")
1902
    public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1903

1904
        // user is authenticated
1905
        AuthenticatedUser authenticatedUser = null;
×
1906
        try {
1907
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1908
        } catch (WrappedResponse ex) {
×
1909
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1910
        }
×
1911

1912
        Dataset dataset;
1913
        try {
1914
            dataset = findDatasetOrDie(id);
×
1915
        } catch (WrappedResponse ex) {
×
1916
            return ex.getResponse();
×
1917
        }
×
1918

1919
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1920
        // check if files are unreleased(DRAFT?)
1921
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1922
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1923
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1924
        }
1925

1926
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1927
        int minRetentionDurationInMonths = 0;
×
1928
        try {
1929
            minRetentionDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1930
        } catch (NumberFormatException nfe){
×
1931
            if (nfe.getMessage().contains("null")) {
×
1932
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1933
            }
1934
        }
×
1935
        if (minRetentionDurationInMonths == 0){
×
1936
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1937
        }
1938

1939
        JsonObject json;
1940
        try {
1941
            json = JsonUtil.getJsonObject(jsonBody);
×
1942
        } catch (JsonException ex) {
×
1943
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1944
        }
×
1945

1946
        List<DataFile> datasetFiles = dataset.getFiles();
×
1947
        List<DataFile> retentionFilesToUnset = new LinkedList<>();
×
1948

1949
        // extract fileIds from json, find datafiles and add to list
1950
        if (json.containsKey("fileIds")){
×
1951
            try {
1952
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1953
                for (JsonValue jsv : fileIds) {
×
1954
                    try {
1955
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1956
                        retentionFilesToUnset.add(dataFile);
×
1957
                    } catch (WrappedResponse ex) {
×
1958
                        return ex.getResponse();
×
1959
                    }
×
1960
                }
×
1961
            } catch (ClassCastException ccex) {
×
1962
                return error(Status.BAD_REQUEST, "fileIds must be an array of id strings");
×
1963
            } catch (NullPointerException npex) {
×
1964
                return error(Status.BAD_REQUEST, "No fileIds specified");
×
1965
            }
×
1966
        } else {
1967
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1968
        }
1969

1970
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1971
        // check if files belong to dataset
1972
        if (datasetFiles.containsAll(retentionFilesToUnset)) {
×
1973
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1974
            boolean badFiles = false;
×
1975
            for (DataFile datafile : retentionFilesToUnset) {
×
1976
                // superuser can overrule an existing retention, even on released files
1977
                if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) {
×
1978
                    restrictedFiles.add(datafile.getId());
×
1979
                    badFiles = true;
×
1980
                }
1981
            }
×
1982
            if (badFiles) {
×
1983
                return Response.status(Status.FORBIDDEN)
×
1984
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1985
                                .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods")
×
1986
                                .add("files", restrictedFiles).build())
×
1987
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1988
            }
1989
            // Good request, so remove the retention from the files. Track any existing retentions so we can
1990
            // delete them if there are no files left that reference them.
1991
            for (DataFile datafile : retentionFilesToUnset) {
×
1992
                Retention ret = datafile.getRetention();
×
1993
                if (ret != null) {
×
1994
                    ret.getDataFiles().remove(datafile);
×
1995
                    if (ret.getDataFiles().isEmpty()) {
×
1996
                        orphanedRetentions.add(ret);
×
1997
                    }
1998
                }
1999
                // Save merges the datafile with an retention into the context
2000
                datafile.setRetention(null);
×
2001
                fileService.save(datafile);
×
2002
            }
×
2003
            if (orphanedRetentions.size() > 0) {
×
2004
                for (Retention ret : orphanedRetentions) {
×
2005
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
2006
                }
×
2007
            }
2008
            String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
2009
            if(!releasedFiles.isBlank()) {
×
2010
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + ".");
×
2011
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
2012
                actionLogSvc.log(removeRecord);
×
2013
            }
2014
            return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)"));
×
2015
        } else {
2016
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
2017
        }
2018
    }
2019

2020
    @PUT
2021
    @AuthRequired
2022
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
2023
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
2024
        try {
2025
            User u = getRequestUser(crc);
×
2026
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
2027
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
2028
            if (linked == null){
×
2029
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
2030
            }
2031
            if (linking == null) {
×
2032
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
2033
            }
2034
            execCommand(new LinkDatasetCommand(
×
2035
                    createDataverseRequest(u), linking, linked
×
2036
            ));
2037
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
2038
        } catch (WrappedResponse ex) {
×
2039
            return ex.getResponse();
×
2040
        }
2041
    }
2042

2043
    @GET
2044
    @Path("{id}/versions/{versionId}/customlicense")
2045
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
2046
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
2047
        User user = session.getUser();
×
2048
        String persistentId;
2049
        try {
2050
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
2051
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
2052
            }
2053
            persistentId = getRequestParameter(":persistentId".substring(1));
×
2054
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
2055
                versionId = "DRAFT";
×
2056
            }
2057
        } catch (WrappedResponse wrappedResponse) {
×
2058
            return wrappedResponse.getResponse();
×
2059
        }
×
2060
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
2061
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
2062
    }
2063

2064

2065
    @GET
2066
    @AuthRequired
2067
    @Path("{id}/links")
2068
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
2069
        try {
2070
            User u = getRequestUser(crc);
×
2071
            if (!u.isSuperuser()) {
×
2072
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
2073
            }
2074
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2075

2076
            long datasetId = dataset.getId();
×
2077
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
2078
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
2079
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
2080
                JsonObjectBuilder datasetBuilder = Json.createObjectBuilder();
×
2081
                datasetBuilder.add("id", dataverse.getId());
×
2082
                datasetBuilder.add("alias", dataverse.getAlias());
×
2083
                datasetBuilder.add("displayName", dataverse.getDisplayName());
×
2084
                dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build());
×
2085
            }
×
2086
            JsonObjectBuilder response = Json.createObjectBuilder();
×
2087
            response.add("id", datasetId);
×
2088
            response.add("identifier", dataset.getIdentifier());
×
2089
            response.add("linked-dataverses", dataversesThatLinkToThisDatasetIdBuilder);
×
2090
            return ok(response);
×
2091
        } catch (WrappedResponse wr) {
×
2092
            return wr.getResponse();
×
2093
        }
2094
    }
2095

2096
    /**
2097
     * Add a given assignment to a given user or group
2098
     * @param ra     role assignment DTO
2099
     * @param id     dataset id
2100
     * @param apiKey
2101
     */
2102
    @POST
2103
    @AuthRequired
2104
    @Path("{identifier}/assignments")
2105
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
2106
        try {
2107
            Dataset dataset = findDatasetOrDie(id);
×
2108
            
2109
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
2110
            if (assignee == null) {
×
2111
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
2112
            }
2113
            
2114
            DataverseRole theRole;
2115
            Dataverse dv = dataset.getOwner();
×
2116
            theRole = null;
×
2117
            while ((theRole == null) && (dv != null)) {
×
2118
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
2119
                    if (aRole.getAlias().equals(ra.getRole())) {
×
2120
                        theRole = aRole;
×
2121
                        break;
×
2122
                    }
2123
                }
×
2124
                dv = dv.getOwner();
×
2125
            }
2126
            if (theRole == null) {
×
2127
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
2128
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
2129
            }
2130

2131
            String privateUrlToken = null;
×
2132
            return ok(
×
2133
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
2134
        } catch (WrappedResponse ex) {
×
2135
            List<String> args = Arrays.asList(ex.getMessage());
×
2136
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
2137
            return ex.getResponse();
×
2138
        }
2139

2140
    }
2141
    
2142
    @DELETE
2143
    @AuthRequired
2144
    @Path("{identifier}/assignments/{id}")
2145
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
2146
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
2147
        if (ra != null) {
×
2148
            try {
2149
                findDatasetOrDie(dsId);
×
2150
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
2151
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
2152
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
2153
            } catch (WrappedResponse ex) {
×
2154
                return ex.getResponse();
×
2155
            }
2156
        } else {
2157
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
2158
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
2159
        }
2160
    }
2161

2162
    @GET
2163
    @AuthRequired
2164
    @Path("{identifier}/assignments")
2165
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2166
        return response(req ->
×
2167
                ok(execCommand(
×
2168
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
2169
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
2170
    }
2171

2172
    @GET
2173
    @AuthRequired
2174
    @Path("{id}/privateUrl")
2175
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2176
        return response( req -> {
×
2177
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
2178
            return (privateUrl != null) ? ok(json(privateUrl))
×
2179
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
2180
        }, getRequestUser(crc));
×
2181
    }
2182

2183
    @POST
2184
    @AuthRequired
2185
    @Path("{id}/privateUrl")
2186
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
2187
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
2188
            throw new NotAcceptableException("Anonymized Access not enabled");
×
2189
        }
2190
        return response(req ->
×
2191
                ok(json(execCommand(
×
2192
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
2193
    }
2194

2195
    @DELETE
2196
    @AuthRequired
2197
    @Path("{id}/privateUrl")
2198
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2199
        return response( req -> {
×
2200
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2201
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
2202
            if (privateUrl != null) {
×
2203
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
2204
                return ok("Private URL deleted.");
×
2205
            } else {
2206
                return notFound("No Private URL to delete.");
×
2207
            }
2208
        }, getRequestUser(crc));
×
2209
    }
2210

2211
    @GET
2212
    @AuthRequired
2213
    @Path("{id}/thumbnail/candidates")
2214
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2215
        try {
2216
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2217
            boolean canUpdateThumbnail = false;
×
2218
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
2219
            if (!canUpdateThumbnail) {
×
2220
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
2221
            }
2222
            JsonArrayBuilder data = Json.createArrayBuilder();
×
2223
            boolean considerDatasetLogoAsCandidate = true;
×
2224
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
2225
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
2226
                String base64image = datasetThumbnail.getBase64image();
×
2227
                if (base64image != null) {
×
2228
                    logger.fine("found a candidate!");
×
2229
                    candidate.add("base64image", base64image);
×
2230
                }
2231
                DataFile dataFile = datasetThumbnail.getDataFile();
×
2232
                if (dataFile != null) {
×
2233
                    candidate.add("dataFileId", dataFile.getId());
×
2234
                }
2235
                data.add(candidate);
×
2236
            }
×
2237
            return ok(data);
×
2238
        } catch (WrappedResponse ex) {
×
2239
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
2240
        }
2241
    }
2242

2243
    @GET
2244
    @Produces({"image/png"})
2245
    @Path("{id}/thumbnail")
2246
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
2247
        try {
2248
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2249
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
2250
            if(is == null) {
×
2251
                return notFound("Thumbnail not available");
×
2252
            }
2253
            return Response.ok(is).build();
×
2254
        } catch (WrappedResponse wr) {
×
2255
            return notFound("Thumbnail not available");
×
2256
        }
2257
    }
2258

2259
    @GET
2260
    @Produces({ "image/png" })
2261
    @Path("{id}/logo")
2262
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
2263
        try {
2264
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2265
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
2266
            if (is == null) {
×
2267
                return notFound("Logo not available");
×
2268
            }
2269
            return Response.ok(is).build();
×
2270
        } catch (WrappedResponse wr) {
×
2271
            return notFound("Logo not available");
×
2272
        }
2273
    }
2274

2275
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
2276
    @POST
2277
    @AuthRequired
2278
    @Path("{id}/thumbnail/{dataFileId}")
2279
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
2280
        try {
2281
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
2282
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
2283
        } catch (WrappedResponse wr) {
×
2284
            return wr.getResponse();
×
2285
        }
2286
    }
2287

2288
    @POST
2289
    @AuthRequired
2290
    @Path("{id}/thumbnail")
2291
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2292
    @Produces("application/json")
2293
    @Operation(summary = "Uploads a logo for a dataset", 
2294
               description = "Uploads a logo for a dataset")
2295
    @APIResponse(responseCode = "200",
2296
               description = "Dataset logo uploaded successfully")
2297
    @Tag(name = "uploadDatasetLogo", 
2298
         description = "Uploads a logo for a dataset")
2299
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))          
2300
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
2301
        try {
2302
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
2303
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
2304
        } catch (WrappedResponse wr) {
×
2305
            return wr.getResponse();
×
2306
        }
2307
    }
2308

2309
    @DELETE
2310
    @AuthRequired
2311
    @Path("{id}/thumbnail")
2312
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2313
        try {
2314
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
2315
            return ok("Dataset thumbnail removed.");
×
2316
        } catch (WrappedResponse wr) {
×
2317
            return wr.getResponse();
×
2318
        }
2319
    }
2320

2321
    @Deprecated(forRemoval = true, since = "2024-07-07")
2322
    @GET
2323
    @AuthRequired
2324
    @Path("{identifier}/dataCaptureModule/rsync")
2325
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2326
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
2327
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
2328
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
2329
        }
2330
        Dataset dataset = null;
×
2331
        try {
2332
            dataset = findDatasetOrDie(id);
×
2333
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2334
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
2335
            
2336
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
2337
            if (lock == null) {
×
2338
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
2339
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
2340
            }
2341
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
2342
        } catch (WrappedResponse wr) {
×
2343
            return wr.getResponse();
×
2344
        } catch (EJBException ex) {
×
2345
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
2346
        }
2347
    }
2348
    
2349
    /**
2350
     * This api endpoint triggers the creation of a "package" file in a dataset
2351
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2352
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2353
     * The "package" can be downloaded over RSAL.
2354
     *
2355
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2356
     *
2357
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2358
     * But due to the possibly immense number of files (millions) the package approach was taken.
2359
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2360
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2361
     * -MAD 4.9.1
2362
     */
2363
    @POST
2364
    @AuthRequired
2365
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2366
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2367
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
2368
        AuthenticatedUser authenticatedUser = null;
×
2369
        try {
2370
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2371
        } catch (WrappedResponse ex) {
×
2372
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2373
        }
×
2374
        if (!authenticatedUser.isSuperuser()) {
×
2375
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2376
        }
2377
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2378
        try {
2379
            Dataset dataset = findDatasetOrDie(id);
×
2380
            if ("validation passed".equals(statusMessageFromDcm)) {
×
2381
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2382

2383
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2384
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2385
                int totalSize = jsonFromDcm.getInt("totalSize");
×
2386
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2387
                
2388
                if (storageDriverType.equals("file")) {
×
2389
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2390

2391
                    ImportMode importMode = ImportMode.MERGE;
×
2392
                    try {
2393
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2394
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2395
                        String message = jsonFromImportJobKickoff.getString("message");
×
2396
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2397
                        job.add("jobId", jobId);
×
2398
                        job.add("message", message);
×
2399
                        return ok(job);
×
2400
                    } catch (WrappedResponse wr) {
×
2401
                        String message = wr.getMessage();
×
2402
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2403
                    }
2404
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2405
                    
2406
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2407
                    try {
2408
                        
2409
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2410
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
2411
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2412
                        
2413
                        if (packageFile == null) {
×
2414
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
2415
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2416
                        }
2417
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2418
                        if (dcmLock == null) {
×
2419
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2420
                        } else {
2421
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
2422
                            dataset.removeLock(dcmLock);
×
2423
                        }
2424
                        
2425
                        // update version using the command engine to enforce user permissions and constraints
2426
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2427
                            try {
2428
                                Command<Dataset> cmd;
2429
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2430
                                commandEngine.submit(cmd);
×
2431
                            } catch (CommandException ex) {
×
2432
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
2433
                            }
×
2434
                        } else {
2435
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2436
                                    + "single version in draft mode.";
2437
                            logger.log(Level.SEVERE, constraintError);
×
2438
                        }
2439

2440
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2441
                        return ok(job);
×
2442
                        
2443
                    } catch (IOException e) {
×
2444
                        String message = e.getMessage();
×
2445
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2446
                    }
2447
                } else {
2448
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2449
                }
2450
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2451
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2452
                distinctAuthors.values().forEach((value) -> {
×
2453
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2454
                });
×
2455
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2456
                if (superUsers != null && !superUsers.isEmpty()) {
×
2457
                    superUsers.forEach((au) -> {
×
2458
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2459
                    });
×
2460
                }
2461
                return ok("User notified about checksum validation failure.");
×
2462
            } else {
2463
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2464
            }
2465
        } catch (WrappedResponse ex) {
×
2466
            return ex.getResponse();
×
2467
        }
2468
    }
2469
    
2470

2471
    @POST
2472
    @AuthRequired
2473
    @Path("{id}/submitForReview")
2474
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2475
        try {
2476
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
2477
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2478
            
2479
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2480
            
2481
            result.add("inReview", inReview);
×
2482
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2483
            return ok(result);
×
2484
        } catch (WrappedResponse wr) {
×
2485
            return wr.getResponse();
×
2486
        }
2487
    }
2488

2489
    @POST
2490
    @AuthRequired
2491
    @Path("{id}/returnToAuthor")
2492
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2493

2494
        if (jsonBody == null || jsonBody.isEmpty()) {
×
2495
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2496
        }
2497
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2498
        try {
2499
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2500
            String reasonForReturn = null;
×
2501
            reasonForReturn = json.getString("reasonForReturn");
×
2502
            if ((reasonForReturn == null || reasonForReturn.isEmpty())
×
2503
                    && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) {
×
2504
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2505
            }
2506
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2507
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2508

2509
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2510
            result.add("inReview", false);
×
2511
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2512
            return ok(result);
×
2513
        } catch (WrappedResponse wr) {
×
2514
            return wr.getResponse();
×
2515
        }
2516
    }
2517

2518
    @GET
2519
    @AuthRequired
2520
    @Path("{id}/curationStatus")
2521
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2522
        try {
2523
            Dataset ds = findDatasetOrDie(idSupplied);
×
2524
            DatasetVersion dsv = ds.getLatestVersion();
×
2525
            User user = getRequestUser(crc);
×
2526
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
2527
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2528
            } else {
2529
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2530
            }
2531
        } catch (WrappedResponse wr) {
×
2532
            return wr.getResponse();
×
2533
        }
2534
    }
2535

2536
    @PUT
2537
    @AuthRequired
2538
    @Path("{id}/curationStatus")
2539
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2540
        Dataset ds = null;
×
2541
        User u = null;
×
2542
        try {
2543
            ds = findDatasetOrDie(idSupplied);
×
2544
            u = getRequestUser(crc);
×
2545
        } catch (WrappedResponse wr) {
×
2546
            return wr.getResponse();
×
2547
        }
×
2548
        try {
2549
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2550
            return ok("Curation Status updated");
×
2551
        } catch (WrappedResponse wr) {
×
2552
            // Just change to Bad Request and send
2553
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2554
        }
2555
    }
2556

2557
    @DELETE
2558
    @AuthRequired
2559
    @Path("{id}/curationStatus")
2560
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2561
        Dataset ds = null;
×
2562
        User u = null;
×
2563
        try {
2564
            ds = findDatasetOrDie(idSupplied);
×
2565
            u = getRequestUser(crc);
×
2566
        } catch (WrappedResponse wr) {
×
2567
            return wr.getResponse();
×
2568
        }
×
2569
        try {
2570
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2571
            return ok("Curation Status deleted");
×
2572
        } catch (WrappedResponse wr) {
×
2573
            //Just change to Bad Request and send
2574
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2575
        }
2576
    }
2577

2578
    @GET
2579
    @AuthRequired
2580
    @Path("{id}/uploadurls")
2581
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2582
        try {
2583
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2584

2585
            boolean canUpdateDataset = false;
×
2586
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2587
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2588
            if (!canUpdateDataset) {
×
2589
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2590
            }
2591
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2592
            if (s3io == null) {
×
2593
                return error(Response.Status.NOT_FOUND,
×
2594
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2595
            }
2596
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
2597
            if (maxSize != null) {
×
2598
                if(fileSize > maxSize) {
×
2599
                    return error(Response.Status.BAD_REQUEST,
×
2600
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2601
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2602
                }
2603
            }
2604
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
2605
            if (limit != null) {
×
2606
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
2607
                    return error(Response.Status.BAD_REQUEST,
×
2608
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2609
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2610
                }
2611
            }
2612
            JsonObjectBuilder response = null;
×
2613
            String storageIdentifier = null;
×
2614
            try {
2615
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
2616
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2617

2618
            } catch (IOException io) {
×
2619
                logger.warning(io.getMessage());
×
2620
                throw new WrappedResponse(io,
×
2621
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
2622
            }
×
2623

2624
            response.add("storageIdentifier", storageIdentifier);
×
2625
            return ok(response);
×
2626
        } catch (WrappedResponse wr) {
×
2627
            return wr.getResponse();
×
2628
        }
2629
    }
2630

2631
    @DELETE
2632
    @AuthRequired
2633
    @Path("mpupload")
2634
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2635
        try {
2636
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2637
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2638
            User user = session.getUser();
×
2639
            if (!user.isAuthenticated()) {
×
2640
                try {
2641
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2642
                } catch (WrappedResponse ex) {
×
2643
                    logger.info(
×
2644
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2645
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2646
                    throw ex;
×
2647
                }
×
2648
            }
2649
            boolean allowed = false;
×
2650
            if (dataset != null) {
×
2651
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2652
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2653
            } else {
2654
                /*
2655
                 * The only legitimate case where a global id won't correspond to a dataset is
2656
                 * for uploads during creation. Given that this call will still fail unless all
2657
                 * three parameters correspond to an active multipart upload, it should be safe
2658
                 * to allow the attempt for an authenticated user. If there are concerns about
2659
                 * permissions, one could check with the current design that the user is allowed
2660
                 * to create datasets in some dataverse that is configured to use the storage
2661
                 * provider specified in the storageidentifier, but testing for the ability to
2662
                 * create a dataset in a specific dataverse would requiring changing the design
2663
                 * somehow (e.g. adding the ownerId to this call).
2664
                 */
2665
                allowed = true;
×
2666
            }
2667
            if (!allowed) {
×
2668
                return error(Response.Status.FORBIDDEN,
×
2669
                        "You are not permitted to abort file uploads with the supplied parameters.");
2670
            }
2671
            try {
2672
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2673
            } catch (IOException io) {
×
2674
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2675
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2676
                logger.warning(io.getMessage());
×
2677
                throw new WrappedResponse(io,
×
2678
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2679
            }
×
2680
            return Response.noContent().build();
×
2681
        } catch (WrappedResponse wr) {
×
2682
            return wr.getResponse();
×
2683
        }
2684
    }
2685

2686
    @PUT
2687
    @AuthRequired
2688
    @Path("mpupload")
2689
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2690
        try {
2691
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2692
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2693
            User user = session.getUser();
×
2694
            if (!user.isAuthenticated()) {
×
2695
                try {
2696
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2697
                } catch (WrappedResponse ex) {
×
2698
                    logger.info(
×
2699
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2700
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2701
                    throw ex;
×
2702
                }
×
2703
            }
2704
            boolean allowed = false;
×
2705
            if (dataset != null) {
×
2706
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2707
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2708
            } else {
2709
                /*
2710
                 * The only legitimate case where a global id won't correspond to a dataset is
2711
                 * for uploads during creation. Given that this call will still fail unless all
2712
                 * three parameters correspond to an active multipart upload, it should be safe
2713
                 * to allow the attempt for an authenticated user. If there are concerns about
2714
                 * permissions, one could check with the current design that the user is allowed
2715
                 * to create datasets in some dataverse that is configured to use the storage
2716
                 * provider specified in the storageidentifier, but testing for the ability to
2717
                 * create a dataset in a specific dataverse would requiring changing the design
2718
                 * somehow (e.g. adding the ownerId to this call).
2719
                 */
2720
                allowed = true;
×
2721
            }
2722
            if (!allowed) {
×
2723
                return error(Response.Status.FORBIDDEN,
×
2724
                        "You are not permitted to complete file uploads with the supplied parameters.");
2725
            }
2726
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
2727
            logger.info("Etags: " + partETagBody);
×
2728
            try {
2729
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2730
                for (String partNo : object.keySet()) {
×
2731
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2732
                }
×
2733
                for (PartETag et : eTagList) {
×
2734
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2735
                }
×
2736
            } catch (JsonException je) {
×
2737
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2738
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2739
            }
×
2740
            try {
2741
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2742
            } catch (IOException io) {
×
2743
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2744
                logger.warning(io.getMessage());
×
2745
                try {
2746
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2747
                } catch (IOException e) {
×
2748
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2749
                    logger.severe(io.getMessage());
×
2750
                }
×
2751

2752
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2753
            }
×
2754
            return ok("Multipart Upload completed");
×
2755
        } catch (WrappedResponse wr) {
×
2756
            return wr.getResponse();
×
2757
        }
2758
    }
2759

2760
    /**
2761
     * Add a File to an existing Dataset
2762
     *
2763
     * @param idSupplied
2764
     * @param jsonData
2765
     * @param fileInputStream
2766
     * @param contentDispositionHeader
2767
     * @param formDataBodyPart
2768
     * @return
2769
     */
2770
    @POST
2771
    @AuthRequired
2772
    @Path("{id}/add")
2773
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2774
    @Produces("application/json")
2775
    @Operation(summary = "Uploads a file for a dataset", 
2776
               description = "Uploads a file for a dataset")
2777
    @APIResponse(responseCode = "200",
2778
               description = "File uploaded successfully to dataset")
2779
    @Tag(name = "addFileToDataset", 
2780
         description = "Uploads a file for a dataset")
2781
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
2782
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2783
                    @PathParam("id") String idSupplied,
2784
                    @FormDataParam("jsonData") String jsonData,
2785
                    @FormDataParam("file") InputStream fileInputStream,
2786
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2787
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2788
                    ){
2789

2790
        if (!systemConfig.isHTTPUpload()) {
×
2791
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2792
        }
2793

2794
        // -------------------------------------
2795
        // (1) Get the user from the ContainerRequestContext
2796
        // -------------------------------------
2797
        User authUser;
2798
        authUser = getRequestUser(crc);
×
2799

2800
        // -------------------------------------
2801
        // (2) Get the Dataset Id
2802
        //  
2803
        // -------------------------------------
2804
        Dataset dataset;
2805
        
2806
        try {
2807
            dataset = findDatasetOrDie(idSupplied);
×
2808
        } catch (WrappedResponse wr) {
×
2809
            return wr.getResponse();
×
2810
        }
×
2811
        
2812
        //------------------------------------
2813
        // (2a) Make sure dataset does not have package file
2814
        //
2815
        // --------------------------------------
2816
        
2817
        for (DatasetVersion dv : dataset.getVersions()) {
×
2818
            if (dv.isHasPackageFile()) {
×
2819
                return error(Response.Status.FORBIDDEN,
×
2820
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2821
                );
2822
            }
2823
        }
×
2824

2825
        // (2a) Load up optional params via JSON
2826
        //---------------------------------------
2827
        OptionalFileParams optionalFileParams = null;
×
2828
        msgt("(api) jsonData: " + jsonData);
×
2829

2830
        try {
2831
            optionalFileParams = new OptionalFileParams(jsonData);
×
2832
        } catch (DataFileTagException ex) {
×
2833
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2834
        }
2835
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2836
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
2837
        }
×
2838
        
2839
        // -------------------------------------
2840
        // (3) Get the file name and content type
2841
        // -------------------------------------
2842
        String newFilename = null;
×
2843
        String newFileContentType = null;
×
2844
        String newStorageIdentifier = null;
×
2845
        if (null == contentDispositionHeader) {
×
2846
            if (optionalFileParams.hasStorageIdentifier()) {
×
2847
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
2848
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2849
                
2850
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
2851
                    return error(BAD_REQUEST,
×
2852
                            "Dataset store configuration does not allow provided storageIdentifier.");
2853
                }
2854
                if (optionalFileParams.hasFileName()) {
×
2855
                    newFilename = optionalFileParams.getFileName();
×
2856
                    if (optionalFileParams.hasMimetype()) {
×
2857
                        newFileContentType = optionalFileParams.getMimeType();
×
2858
                    }
2859
                }
2860
            } else {
2861
                return error(BAD_REQUEST,
×
2862
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2863
            }
2864
        } else {
2865
            newFilename = contentDispositionHeader.getFileName();
×
2866
            // Let's see if the form data part has the mime (content) type specified.
2867
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2868
            // because that defaults to "text/plain" when no "Content-Type:" header is
2869
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2870
            // is there. If not, we'll default to "application/octet-stream" - the generic
2871
            // unknown type. This will prompt the application to run type detection and
2872
            // potentially find something more accurate.
2873
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2874

2875
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2876
                if (header.equalsIgnoreCase("Content-Type")) {
×
2877
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2878
                }
2879
            }
×
2880
            if (newFileContentType == null) {
×
2881
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2882
            }
2883
        }
2884

2885

2886
        //-------------------
2887
        // (3) Create the AddReplaceFileHelper object
2888
        //-------------------
2889
        msg("ADD!");
×
2890

2891
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
2892
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2893
                ingestService,
2894
                datasetService,
2895
                fileService,
2896
                permissionSvc,
2897
                commandEngine,
2898
                systemConfig);
2899

2900

2901
        //-------------------
2902
        // (4) Run "runAddFileByDatasetId"
2903
        //-------------------
2904
        addFileHelper.runAddFileByDataset(dataset,
×
2905
                newFilename,
2906
                newFileContentType,
2907
                newStorageIdentifier,
2908
                fileInputStream,
2909
                optionalFileParams);
2910

2911

2912
        if (addFileHelper.hasError()){
×
2913
            //conflict response status added for 8859
2914
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
2915
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2916
            }
2917
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2918
        } else {
2919
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2920
            try {
2921
                //msgt("as String: " + addFileHelper.getSuccessResult());
2922
                /**
2923
                 * @todo We need a consistent, sane way to communicate a human
2924
                 * readable message to an API client suitable for human
2925
                 * consumption. Imagine if the UI were built in Angular or React
2926
                 * and we want to return a message from the API as-is to the
2927
                 * user. Human readable.
2928
                 */
2929
                logger.fine("successMsg: " + successMsg);
×
2930
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2931
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
2932
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2933
                } else {
2934
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2935
                }
2936

2937
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2938
            } catch (NoFilesException ex) {
×
2939
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
2940
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2941

2942
            }
2943
        }
2944
        
2945
    } // end: addFileToDataset
2946

2947

2948
    /**
2949
     * Clean storage of a Dataset
2950
     *
2951
     * @param idSupplied
2952
     * @return
2953
     */
2954
    @GET
2955
    @AuthRequired
2956
    @Path("{id}/cleanStorage")
2957
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2958
        // get user and dataset
2959
        User authUser = getRequestUser(crc);
×
2960

2961
        Dataset dataset;
2962
        try {
2963
            dataset = findDatasetOrDie(idSupplied);
×
2964
        } catch (WrappedResponse wr) {
×
2965
            return wr.getResponse();
×
2966
        }
×
2967
        
2968
        // check permissions
2969
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
2970
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
2971
        }
2972

2973
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
2974

2975
        // check if no legacy files are present
2976
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
2977
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
2978
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
2979
        }
2980

2981
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
2982
        List<String> deleted;
2983
        try {
2984
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
2985
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
2986
        } catch (IOException ex) {
×
2987
            logger.log(Level.SEVERE, null, ex);
×
2988
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
2989
        }
×
2990

2991
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
2992
        
2993
    }
2994

2995
    @GET
2996
    @AuthRequired
2997
    @Path("{id}/versions/{versionId1}/compare/{versionId2}")
2998
    public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id,
2999
                                      @PathParam("versionId1") String versionId1,
3000
                                      @PathParam("versionId2") String versionId2,
3001
                                      @Context UriInfo uriInfo, @Context HttpHeaders headers) {
3002
        try {
NEW
3003
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
NEW
3004
            DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers);
×
NEW
3005
            DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers);
×
NEW
3006
            return ok(DatasetVersion.compareVersions(dsv1, dsv2));
×
NEW
3007
        } catch (WrappedResponse wr) {
×
NEW
3008
            return wr.getResponse();
×
3009
        }
3010
    }
3011

3012
    private static Set<String> getDatasetFilenames(Dataset dataset) {
3013
        Set<String> files = new HashSet<>();
×
3014
        for (DataFile dataFile: dataset.getFiles()) {
×
3015
            String storageIdentifier = dataFile.getStorageIdentifier();
×
3016
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
3017
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
3018
            files.add(locationParts[locationParts.length-1]);
×
3019
        }
×
3020
        return files;
×
3021
    }
3022

3023
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
3024
        return f -> {
1✔
3025
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
3026
        };
3027
    }
3028

3029
    private void msg(String m) {
3030
        //System.out.println(m);
UNCOV
3031
        logger.fine(m);
×
UNCOV
3032
    }
×
3033

3034
    private void dashes() {
3035
        msg("----------------");
×
UNCOV
3036
    }
×
3037

3038
    private void msgt(String m) {
3039
        dashes();
×
UNCOV
3040
        msg(m);
×
UNCOV
3041
        dashes();
×
3042
    }
×
3043

3044

3045
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
3046
            throws WrappedResponse {
UNCOV
3047
        switch (versionId) {
×
3048
            case DS_VERSION_LATEST:
UNCOV
3049
                return hdl.handleLatest();
×
3050
            case DS_VERSION_DRAFT:
UNCOV
3051
                return hdl.handleDraft();
×
3052
            case DS_VERSION_LATEST_PUBLISHED:
UNCOV
3053
                return hdl.handleLatestPublished();
×
3054
            default:
3055
                try {
3056
                    String[] versions = versionId.split("\\.");
×
UNCOV
3057
                    switch (versions.length) {
×
3058
                        case 1:
3059
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
3060
                        case 2:
UNCOV
3061
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
3062
                        default:
UNCOV
3063
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3064
                    }
UNCOV
3065
                } catch (NumberFormatException nfe) {
×
3066
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3067
                }
3068
        }
3069
    }
3070

3071
    /*
3072
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
3073
     * a deaccessioned dataset.
3074
     */
3075
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, 
3076
                                                  String versionNumber, 
3077
                                                  final Dataset ds,
3078
                                                  UriInfo uriInfo, 
3079
                                                  HttpHeaders headers) throws WrappedResponse {
3080
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
UNCOV
3081
        boolean checkFilePerms = false;
×
UNCOV
3082
        boolean includeDeaccessioned = false;
×
UNCOV
3083
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
×
3084
    }
3085
    
3086
    /*
3087
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3088
     */
3089
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3090
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
UNCOV
3091
        boolean checkPermsWhenDeaccessioned = true;
×
UNCOV
3092
        boolean bypassAccessCheck = false;
×
UNCOV
3093
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3094
    }
3095

3096
    /*
3097
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3098
     */
3099
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3100
                                                  UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
UNCOV
3101
        boolean bypassAccessCheck = false;
×
UNCOV
3102
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3103
    }
3104

3105
    /*
3106
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
3107
     */
3108
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3109
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
3110
            boolean bypassAccessCheck)
3111
            throws WrappedResponse {
3112

UNCOV
3113
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
3114

UNCOV
3115
        if (dsv == null || dsv.getId() == null) {
×
3116
            throw new WrappedResponse(
×
UNCOV
3117
                    notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
3118
        }
3119
        if (dsv.isReleased()&& uriInfo!=null) {
×
3120
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
UNCOV
3121
            mdcLogService.logEntry(entry);
×
3122
        }
3123
        return dsv;
×
3124
    }
3125
 
3126
    @GET
3127
    @Path("{identifier}/locks")
3128
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3129

UNCOV
3130
        Dataset dataset = null;
×
3131
        try {
UNCOV
3132
            dataset = findDatasetOrDie(id);
×
3133
            Set<DatasetLock> locks;
UNCOV
3134
            if (lockType == null) {
×
3135
                locks = dataset.getLocks();
×
3136
            } else {
3137
                // request for a specific type lock:
3138
                DatasetLock lock = dataset.getLockFor(lockType);
×
3139

UNCOV
3140
                locks = new HashSet<>();
×
3141
                if (lock != null) {
×
UNCOV
3142
                    locks.add(lock);
×
3143
                }
3144
            }
3145
            
UNCOV
3146
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3147

UNCOV
3148
        } catch (WrappedResponse wr) {
×
3149
            return wr.getResponse();
×
3150
        }
3151
    }
3152

3153
    @DELETE
3154
    @AuthRequired
3155
    @Path("{identifier}/locks")
3156
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3157

UNCOV
3158
        return response(req -> {
×
3159
            try {
UNCOV
3160
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3161
                if (!user.isSuperuser()) {
×
UNCOV
3162
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3163
                }
3164
                Dataset dataset = findDatasetOrDie(id);
×
3165
                
UNCOV
3166
                if (lockType == null) {
×
3167
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
UNCOV
3168
                    for (DatasetLock lock : dataset.getLocks()) {
×
3169
                        locks.add(lock.getReason());
×
3170
                    }
×
3171
                    if (!locks.isEmpty()) {
×
3172
                        for (DatasetLock.Reason locktype : locks) {
×
3173
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
3174
                            // refresh the dataset:
3175
                            dataset = findDatasetOrDie(id);
×
3176
                        }
×
3177
                        // kick of dataset reindexing, in case the locks removed 
3178
                        // affected the search card:
3179
                        indexService.asyncIndexDataset(dataset, true);
×
UNCOV
3180
                        return ok("locks removed");
×
3181
                    }
3182
                    return ok("dataset not locked");
×
3183
                }
3184
                // request for a specific type lock:
3185
                DatasetLock lock = dataset.getLockFor(lockType);
×
UNCOV
3186
                if (lock != null) {
×
UNCOV
3187
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
3188
                    // refresh the dataset:
3189
                    dataset = findDatasetOrDie(id);
×
3190
                    // ... and kick of dataset reindexing, in case the lock removed 
3191
                    // affected the search card:
3192
                    indexService.asyncIndexDataset(dataset, true);
×
UNCOV
3193
                    return ok("lock type " + lock.getReason() + " removed");
×
3194
                }
3195
                return ok("no lock type " + lockType + " on the dataset");
×
3196
            } catch (WrappedResponse wr) {
×
UNCOV
3197
                return wr.getResponse();
×
3198
            }
3199

3200
        }, getRequestUser(crc));
×
3201

3202
    }
3203
    
3204
    @POST
3205
    @AuthRequired
3206
    @Path("{identifier}/lock/{type}")
3207
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
UNCOV
3208
        return response(req -> {
×
3209
            try {
UNCOV
3210
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3211
                if (!user.isSuperuser()) {
×
UNCOV
3212
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3213
                }
3214
                Dataset dataset = findDatasetOrDie(id);
×
3215
                DatasetLock lock = dataset.getLockFor(lockType);
×
UNCOV
3216
                if (lock != null) {
×
3217
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
3218
                }
3219
                lock = new DatasetLock(lockType, user);
×
3220
                execCommand(new AddLockCommand(req, dataset, lock));
×
3221
                // refresh the dataset:
3222
                dataset = findDatasetOrDie(id);
×
3223
                // ... and kick of dataset reindexing:
UNCOV
3224
                indexService.asyncIndexDataset(dataset, true);
×
3225

UNCOV
3226
                return ok("dataset locked with lock type " + lockType);
×
3227
            } catch (WrappedResponse wr) {
×
UNCOV
3228
                return wr.getResponse();
×
3229
            }
3230

3231
        }, getRequestUser(crc));
×
3232
    }
3233
    
3234
    @GET
3235
    @AuthRequired
3236
    @Path("locks")
3237
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
3238
        // This API is here, under /datasets, and not under /admin, because we
3239
        // likely want it to be accessible to admin users who may not necessarily 
3240
        // have localhost access, that would be required to get to /api/admin in 
3241
        // most installations. It is still reasonable however to limit access to
3242
        // this api to admin users only.
3243
        AuthenticatedUser apiUser;
3244
        try {
UNCOV
3245
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3246
        } catch (WrappedResponse ex) {
×
UNCOV
3247
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3248
        }
×
3249
        if (!apiUser.isSuperuser()) {
×
3250
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3251
        }
3252
        
3253
        // Locks can be optinally filtered by type, user or both.
UNCOV
3254
        DatasetLock.Reason lockTypeValue = null;
×
UNCOV
3255
        AuthenticatedUser user = null; 
×
3256
        
3257
        // For the lock type, we use a QueryParam of type String, instead of 
3258
        // DatasetLock.Reason; that would be less code to write, but this way 
3259
        // we can check if the value passed matches a valid lock type ("reason") 
3260
        // and provide a helpful error message if it doesn't. If you use a 
3261
        // QueryParam of an Enum type, trying to pass an invalid value to it 
3262
        // results in a potentially confusing "404/NOT FOUND - requested 
3263
        // resource is not available".
UNCOV
3264
        if (lockType != null && !lockType.isEmpty()) {
×
3265
            try {
UNCOV
3266
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
3267
            } catch (IllegalArgumentException iax) {
×
UNCOV
3268
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
3269
                for (Reason r: Reason.values()) {
×
3270
                    reasonJoiner.add(r.name());
×
3271
                };
3272
                String errorMessage = "Invalid lock type value: " + lockType + 
×
3273
                        "; valid lock types: " + reasonJoiner.toString();
×
UNCOV
3274
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
3275
            }
×
3276
        }
3277
        
3278
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
UNCOV
3279
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
UNCOV
3280
            if (user == null) {
×
3281
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
3282
            }
3283
        }
3284
        
3285
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
UNCOV
3286
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
3287
                            
UNCOV
3288
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3289
    }   
3290
    
3291
    
3292
    @GET
3293
    @Path("{id}/makeDataCount/citations")
3294
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
3295
        
3296
        try {
UNCOV
3297
            Dataset dataset = findDatasetOrDie(idSupplied);
×
UNCOV
3298
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
UNCOV
3299
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
3300
            for (DatasetExternalCitations citation : externalCitations) {
×
3301
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
3302
                /**
3303
                 * In the future we can imagine storing and presenting more
3304
                 * information about the citation such as the title of the paper
3305
                 * and the names of the authors. For now, we'll at least give
3306
                 * the URL of the citation so people can click and find out more
3307
                 * about the citation.
3308
                 */
UNCOV
3309
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
UNCOV
3310
                datasetsCitations.add(candidateObj);
×
UNCOV
3311
            }
×
3312
            return ok(datasetsCitations);
×
3313

3314
        } catch (WrappedResponse wr) {
×
3315
            return wr.getResponse();
×
3316
        }
3317

3318
    }
3319

3320
    @GET
3321
    @Path("{id}/makeDataCount/{metric}")
3322
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
UNCOV
3323
        String nullCurrentMonth = null;
×
UNCOV
3324
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
3325
    }
3326

3327
    @GET
3328
    @AuthRequired
3329
    @Path("{identifier}/storagesize")
3330
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
UNCOV
3331
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
UNCOV
3332
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
3333
    }
3334

3335
    @GET
3336
    @AuthRequired
3337
    @Path("{identifier}/versions/{versionId}/downloadsize")
3338
    public Response getDownloadSize(@Context ContainerRequestContext crc,
3339
                                    @PathParam("identifier") String dvIdtf,
3340
                                    @PathParam("versionId") String version,
3341
                                    @QueryParam("contentType") String contentType,
3342
                                    @QueryParam("accessStatus") String accessStatus,
3343
                                    @QueryParam("categoryName") String categoryName,
3344
                                    @QueryParam("tabularTagName") String tabularTagName,
3345
                                    @QueryParam("searchText") String searchText,
3346
                                    @QueryParam("mode") String mode,
3347
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
3348
                                    @Context UriInfo uriInfo,
3349
                                    @Context HttpHeaders headers) {
3350

UNCOV
3351
        return response(req -> {
×
3352
            FileSearchCriteria fileSearchCriteria;
3353
            try {
3354
                fileSearchCriteria = new FileSearchCriteria(
×
3355
                        contentType,
UNCOV
3356
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
3357
                        categoryName,
3358
                        tabularTagName,
3359
                        searchText
3360
                );
UNCOV
3361
            } catch (IllegalArgumentException e) {
×
UNCOV
3362
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
UNCOV
3363
            }
×
3364
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
3365
            try {
3366
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
UNCOV
3367
            } catch (IllegalArgumentException e) {
×
UNCOV
3368
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
3369
            }
×
3370
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
3371
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
3372
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
3373
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
3374
            jsonObjectBuilder.add("message", message);
×
3375
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
3376
            return ok(jsonObjectBuilder);
×
3377
        }, getRequestUser(crc));
×
3378
    }
3379

3380
    @GET
3381
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
3382
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
3383
        try {
UNCOV
3384
            Dataset dataset = findDatasetOrDie(idSupplied);
×
UNCOV
3385
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
UNCOV
3386
            MakeDataCountUtil.MetricType metricType = null;
×
3387
            try {
3388
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
3389
            } catch (IllegalArgumentException ex) {
×
UNCOV
3390
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
3391
            }
×
3392
            String monthYear = null;
×
3393
            if (yyyymm != null) {
×
3394
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
3395
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
3396
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
3397
            }
UNCOV
3398
            if (country != null) {
×
3399
                country = country.toLowerCase();
×
UNCOV
3400
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
3401
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3402
                }
3403
            }
3404
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
UNCOV
3405
            if (datasetMetrics == null) {
×
UNCOV
3406
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3407
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
3408
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3409
            }
3410
            Long viewsTotalRegular = null;
×
3411
            Long viewsUniqueRegular = null;
×
UNCOV
3412
            Long downloadsTotalRegular = null;
×
3413
            Long downloadsUniqueRegular = null;
×
3414
            Long viewsTotalMachine = null;
×
3415
            Long viewsUniqueMachine = null;
×
3416
            Long downloadsTotalMachine = null;
×
3417
            Long downloadsUniqueMachine = null;
×
3418
            Long viewsTotal = null;
×
3419
            Long viewsUnique = null;
×
3420
            Long downloadsTotal = null;
×
3421
            Long downloadsUnique = null;
×
3422
            switch (metricSupplied) {
×
3423
                case "viewsTotal":
3424
                    viewsTotal = datasetMetrics.getViewsTotal();
×
3425
                    break;
×
3426
                case "viewsTotalRegular":
3427
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
3428
                    break;
×
3429
                case "viewsTotalMachine":
3430
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
3431
                    break;
×
3432
                case "viewsUnique":
3433
                    viewsUnique = datasetMetrics.getViewsUnique();
×
3434
                    break;
×
3435
                case "viewsUniqueRegular":
3436
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
3437
                    break;
×
3438
                case "viewsUniqueMachine":
3439
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
3440
                    break;
×
3441
                case "downloadsTotal":
3442
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
3443
                    break;
×
3444
                case "downloadsTotalRegular":
3445
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
3446
                    break;
×
3447
                case "downloadsTotalMachine":
3448
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
3449
                    break;
×
3450
                case "downloadsUnique":
3451
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
3452
                    break;
×
3453
                case "downloadsUniqueRegular":
3454
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
3455
                    break;
×
3456
                case "downloadsUniqueMachine":
3457
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
3458
                    break;
×
3459
                default:
3460
                    break;
3461
            }
3462
            /**
3463
             * TODO: Think more about the JSON output and the API design.
3464
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3465
             * now, by country. We could return multiple metrics (viewsTotal,
3466
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3467
             */
UNCOV
3468
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
UNCOV
3469
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
UNCOV
3470
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3471
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3472
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3473
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3474
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3475
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3476
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3477
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3478
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3479
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3480
            return ok(jsonObjectBuilder);
×
3481
        } catch (WrappedResponse wr) {
×
3482
            return wr.getResponse();
×
3483
        } catch (Exception e) {
×
3484
            //bad date - caught in sanitize call
3485
            return error(BAD_REQUEST, e.getMessage());
×
3486
        }
3487
    }
3488
    
3489
    @GET
3490
    @AuthRequired
3491
    @Path("{identifier}/storageDriver")
3492
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3493
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3494
        
3495
        Dataset dataset; 
3496
        
3497
        try {
UNCOV
3498
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3499
        } catch (WrappedResponse ex) {
×
UNCOV
3500
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3501
        }
×
3502
        
3503
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3504
    }
3505
    
3506
    @PUT
3507
    @AuthRequired
3508
    @Path("{identifier}/storageDriver")
3509
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3510
            String storageDriverLabel,
3511
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3512
        
3513
        // Superuser-only:
3514
        AuthenticatedUser user;
3515
        try {
UNCOV
3516
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3517
        } catch (WrappedResponse ex) {
×
UNCOV
3518
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3519
        }
×
3520
        if (!user.isSuperuser()) {
×
3521
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3522
        }
3523

3524
        Dataset dataset;
3525

3526
        try {
UNCOV
3527
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3528
        } catch (WrappedResponse ex) {
×
UNCOV
3529
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3530
        }
×
3531
        
3532
        // We don't want to allow setting this to a store id that does not exist: 
3533
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
UNCOV
3534
            if (store.getKey().equals(storageDriverLabel)) {
×
UNCOV
3535
                dataset.setStorageDriverId(store.getValue());
×
3536
                datasetService.merge(dataset);
×
3537
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3538
            }
3539
        }
×
3540
        return error(Response.Status.BAD_REQUEST,
×
3541
                "No Storage Driver found for : " + storageDriverLabel);
3542
    }
3543
    
3544
    @DELETE
3545
    @AuthRequired
3546
    @Path("{identifier}/storageDriver")
3547
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3548
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3549
        
3550
        // Superuser-only:
3551
        AuthenticatedUser user;
3552
        try {
UNCOV
3553
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3554
        } catch (WrappedResponse ex) {
×
UNCOV
3555
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3556
        }
×
3557
        if (!user.isSuperuser()) {
×
3558
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3559
        }
3560

3561
        Dataset dataset;
3562

3563
        try {
UNCOV
3564
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3565
        } catch (WrappedResponse ex) {
×
UNCOV
3566
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3567
        }
×
3568
        
3569
        dataset.setStorageDriverId(null);
×
3570
        datasetService.merge(dataset);
×
UNCOV
3571
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3572
    }
3573

3574
    @GET
3575
    @AuthRequired
3576
    @Path("{identifier}/curationLabelSet")
3577
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3578
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3579

3580
        try {
UNCOV
3581
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3582
            if (!user.isSuperuser()) {
×
UNCOV
3583
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3584
            }
3585
        } catch (WrappedResponse wr) {
×
3586
            return wr.getResponse();
×
UNCOV
3587
        }
×
3588

3589
        Dataset dataset;
3590

3591
        try {
UNCOV
3592
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3593
        } catch (WrappedResponse ex) {
×
UNCOV
3594
            return ex.getResponse();
×
3595
        }
×
3596

3597
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3598
    }
3599

3600
    @PUT
3601
    @AuthRequired
3602
    @Path("{identifier}/curationLabelSet")
3603
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3604
                                        @PathParam("identifier") String dvIdtf,
3605
                                        @QueryParam("name") String curationLabelSet,
3606
                                        @Context UriInfo uriInfo,
3607
                                        @Context HttpHeaders headers) throws WrappedResponse {
3608

3609
        // Superuser-only:
3610
        AuthenticatedUser user;
3611
        try {
UNCOV
3612
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3613
        } catch (WrappedResponse ex) {
×
UNCOV
3614
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3615
        }
×
3616
        if (!user.isSuperuser()) {
×
3617
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3618
        }
3619

3620
        Dataset dataset;
3621

3622
        try {
UNCOV
3623
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3624
        } catch (WrappedResponse ex) {
×
UNCOV
3625
            return ex.getResponse();
×
3626
        }
×
3627
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3628
            dataset.setCurationLabelSetName(curationLabelSet);
×
3629
            datasetService.merge(dataset);
×
3630
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3631
        } else {
3632
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3633
                if (setName.equals(curationLabelSet)) {
×
UNCOV
3634
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3635
                    datasetService.merge(dataset);
×
3636
                    return ok("Curation Label Set Name set to: " + setName);
×
3637
                }
3638
            }
×
3639
        }
UNCOV
3640
        return error(Response.Status.BAD_REQUEST,
×
3641
            "No Such Curation Label Set");
3642
    }
3643

3644
    @DELETE
3645
    @AuthRequired
3646
    @Path("{identifier}/curationLabelSet")
3647
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3648
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3649

3650
        // Superuser-only:
3651
        AuthenticatedUser user;
3652
        try {
UNCOV
3653
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3654
        } catch (WrappedResponse ex) {
×
UNCOV
3655
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3656
        }
×
3657
        if (!user.isSuperuser()) {
×
3658
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3659
        }
3660

3661
        Dataset dataset;
3662

3663
        try {
UNCOV
3664
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3665
        } catch (WrappedResponse ex) {
×
UNCOV
3666
            return ex.getResponse();
×
3667
        }
×
3668

3669
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3670
        datasetService.merge(dataset);
×
UNCOV
3671
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3672
    }
3673

3674
    @GET
3675
    @AuthRequired
3676
    @Path("{identifier}/allowedCurationLabels")
3677
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3678
                                             @PathParam("identifier") String dvIdtf,
3679
                                             @Context UriInfo uriInfo,
3680
                                             @Context HttpHeaders headers) throws WrappedResponse {
UNCOV
3681
        AuthenticatedUser user = null;
×
3682
        try {
UNCOV
3683
            user = getRequestAuthenticatedUserOrDie(crc);
×
3684
        } catch (WrappedResponse wr) {
×
UNCOV
3685
            return wr.getResponse();
×
3686
        }
×
3687

3688
        Dataset dataset;
3689

3690
        try {
UNCOV
3691
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
3692
        } catch (WrappedResponse ex) {
×
UNCOV
3693
            return ex.getResponse();
×
3694
        }
×
3695
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3696
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
3697
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3698
        } else {
3699
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3700
        }
3701
    }
3702

3703
    @GET
3704
    @AuthRequired
3705
    @Path("{identifier}/timestamps")
3706
    @Produces(MediaType.APPLICATION_JSON)
3707
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3708

UNCOV
3709
        Dataset dataset = null;
×
UNCOV
3710
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3711
        try {
3712
            dataset = findDatasetOrDie(id);
×
3713
            User u = getRequestUser(crc);
×
UNCOV
3714
            Set<Permission> perms = new HashSet<Permission>();
×
3715
            perms.add(Permission.ViewUnpublishedDataset);
×
3716
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3717
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3718
            logger.fine("CSD: " + canSeeDraft);
×
3719
            logger.fine("IT: " + dataset.getIndexTime());
×
3720
            logger.fine("MT: " + dataset.getModificationTime());
×
3721
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
3722
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3723
            // Basic info if it's released
3724
            if (dataset.isReleased() || canSeeDraft) {
×
3725
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
UNCOV
3726
                if (dataset.getPublicationDate() != null) {
×
3727
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3728
                }
3729

3730
                if (dataset.getLastExportTime() != null) {
×
UNCOV
3731
                    timestamps.add("lastMetadataExportTime",
×
UNCOV
3732
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3733

3734
                }
3735

UNCOV
3736
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
UNCOV
3737
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
UNCOV
3738
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3739
                }
3740
                // If the modification/permissionmodification time is
3741
                // set and the index time is null or is before the mod time, the relevant index is stale
UNCOV
3742
                timestamps.add("hasStaleIndex",
×
UNCOV
3743
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
UNCOV
3744
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3745
                                : false);
×
3746
                timestamps.add("hasStalePermissionIndex",
×
3747
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3748
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3749
                                : false);
×
3750
            }
3751
            // More detail if you can see a draft
3752
            if (canSeeDraft) {
×
UNCOV
3753
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
UNCOV
3754
                if (dataset.getIndexTime() != null) {
×
3755
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3756
                }
3757
                if (dataset.getPermissionModificationTime() != null) {
×
3758
                    timestamps.add("lastPermissionUpdateTime",
×
UNCOV
3759
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3760
                }
3761
                if (dataset.getPermissionIndexTime() != null) {
×
3762
                    timestamps.add("lastPermissionIndexTime",
×
UNCOV
3763
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3764
                }
3765
                if (dataset.getGlobalIdCreateTime() != null) {
×
3766
                    timestamps.add("globalIdCreateTime", formatter
×
UNCOV
3767
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3768
                }
3769

3770
            }
UNCOV
3771
            return ok(timestamps);
×
UNCOV
3772
        } catch (WrappedResponse wr) {
×
UNCOV
3773
            return wr.getResponse();
×
3774
        }
3775
    }
3776

3777

3778
/****************************
3779
 * Globus Support Section:
3780
 * 
3781
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3782
 * app is launched and makes a callback to the
3783
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3784
 * needed to set up it's display.
3785
 * 
3786
 * At some point after that, the user will make a selection as to which files to
3787
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3788
 * to indicate a transfer is about to start. In addition to providing the
3789
 * details of where to transfer the files to/from, Dataverse also grants the
3790
 * Globus principal involved the relevant rw or r permission for the dataset.
3791
 * 
3792
 * Once the transfer is started, the app records the task id and sends it to
3793
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3794
 * monitors the transfer task and when it ultimately succeeds for fails it
3795
 * revokes the principal's permission and, for the transfer in case, adds the
3796
 * files to the dataset. (The dataset is locked until the transfer completes.)
3797
 * 
3798
 * (If no transfer is started within a specified timeout, permissions will
3799
 * automatically be revoked - see the GlobusServiceBean for details.)
3800
 *
3801
 * The option to reference a file at a remote endpoint (rather than transfer it)
3802
 * follows the first two steps of the process above but completes with a call to
3803
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3804
 * files can be added to the dataset immediately.)
3805
 */
3806

3807
    /**
3808
     * Retrieve the parameters and signed URLs required to perform a globus
3809
     * transfer. This api endpoint is expected to be called as a signed callback
3810
     * after the globus-dataverse app/other app is launched, but it will accept
3811
     * other forms of authentication.
3812
     * 
3813
     * @param crc
3814
     * @param datasetId
3815
     */
3816
    @GET
3817
    @AuthRequired
3818
    @Path("{id}/globusUploadParameters")
3819
    @Produces(MediaType.APPLICATION_JSON)
3820
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3821
            @QueryParam(value = "locale") String locale) {
3822
        // -------------------------------------
3823
        // (1) Get the user from the ContainerRequestContext
3824
        // -------------------------------------
3825
        AuthenticatedUser authUser;
3826
        try {
UNCOV
3827
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3828
        } catch (WrappedResponse e) {
×
UNCOV
3829
            return e.getResponse();
×
3830
        }
×
3831
        // -------------------------------------
3832
        // (2) Get the Dataset Id
3833
        // -------------------------------------
3834
        Dataset dataset;
3835

3836
        try {
UNCOV
3837
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
3838
        } catch (WrappedResponse wr) {
×
UNCOV
3839
            return wr.getResponse();
×
3840
        }
×
3841
        String storeId = dataset.getEffectiveStorageDriverId();
×
3842
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3843
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3844
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
UNCOV
3845
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3846
        }
3847

3848
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3849

UNCOV
3850
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3851
        String transferEndpoint = null;
×
UNCOV
3852
        JsonArray referenceEndpointsWithPaths = null;
×
3853
        if (managed) {
×
3854
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3855
        } else {
3856
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3857
        }
3858

3859
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
UNCOV
3860
        queryParams.add("queryParameters",
×
UNCOV
3861
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3862
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3863
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3864
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3865
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3866
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3867
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3868
        substitutedParams.keySet().forEach((key) -> {
×
3869
            params.add(key, substitutedParams.get(key));
×
3870
        });
×
3871
        params.add("managed", Boolean.toString(managed));
×
3872
        if (managed) {
×
3873
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
3874
            if (maxSize != null) {
×
3875
                params.add("fileSizeLimit", maxSize);
×
3876
            }
3877
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
3878
            if (limit != null) {
×
UNCOV
3879
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3880
            }
3881
        }
3882
        if (transferEndpoint != null) {
×
UNCOV
3883
            params.add("endpoint", transferEndpoint);
×
3884
        } else {
3885
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3886
        }
UNCOV
3887
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3888
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
UNCOV
3889
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3890
        allowedApiCalls.add(
×
3891
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3892
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3893
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3894
        if(managed) {
×
3895
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3896
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3897
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
3898
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3899
        } else {
3900
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3901
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
UNCOV
3902
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
3903
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3904
        }
3905
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3906
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
UNCOV
3907
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3908
                .add(URLTokenUtil.TIMEOUT, 5));
×
3909
        allowedApiCalls.add(
×
3910
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3911
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3912
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3913

3914
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3915
    }
3916

3917
    /**
3918
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3919
     * 
3920
     * @param crc
3921
     * @param datasetId
3922
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3923
     * @return
3924
     * @throws IOException
3925
     * @throws ExecutionException
3926
     * @throws InterruptedException
3927
     */
3928
    @POST
3929
    @AuthRequired
3930
    @Path("{id}/requestGlobusUploadPaths")
3931
    @Consumes(MediaType.APPLICATION_JSON)
3932
    @Produces(MediaType.APPLICATION_JSON)
3933
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3934
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3935

UNCOV
3936
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3937

UNCOV
3938
        if (!systemConfig.isGlobusUpload()) {
×
3939
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
UNCOV
3940
                    BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
3941
        }
3942

3943
        // -------------------------------------
3944
        // (1) Get the user from the ContainerRequestContext
3945
        // -------------------------------------
3946
        AuthenticatedUser authUser;
3947
        try {
UNCOV
3948
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
3949
        } catch (WrappedResponse e) {
×
UNCOV
3950
            return e.getResponse();
×
3951
        }
×
3952

3953
        // -------------------------------------
3954
        // (2) Get the Dataset Id
3955
        // -------------------------------------
3956
        Dataset dataset;
3957

3958
        try {
UNCOV
3959
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
3960
        } catch (WrappedResponse wr) {
×
UNCOV
3961
            return wr.getResponse();
×
3962
        }
×
3963
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
3964
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
3965

3966
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
3967
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3968
                try {
3969
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
3970
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
UNCOV
3971
                        return badRequest("No referencedFiles specified");
×
3972
                    }
3973
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
3974
                    return (ok(fileMap));
×
UNCOV
3975
                } catch (Exception e) {
×
3976
                    return badRequest(e.getLocalizedMessage());
×
3977
                }
3978
            } else {
3979
                try {
UNCOV
3980
                    String principal = params.getString("principal");
×
UNCOV
3981
                    int numberOfPaths = params.getInt("numberOfFiles");
×
UNCOV
3982
                    if (numberOfPaths <= 0) {
×
3983
                        return badRequest("numberOfFiles must be positive");
×
3984
                    }
3985

3986
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
UNCOV
3987
                    switch (response.getInt("status")) {
×
3988
                    case 201:
3989
                        return ok(response.getJsonObject("paths"));
×
3990
                    case 400:
UNCOV
3991
                        return badRequest("Unable to grant permission");
×
3992
                    case 409:
UNCOV
3993
                        return conflict("Permission already exists");
×
3994
                    default:
UNCOV
3995
                        return error(null, "Unexpected error when granting permission");
×
3996
                    }
3997

3998
                } catch (NullPointerException | ClassCastException e) {
×
UNCOV
3999
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
4000

4001
                }
4002
            }
4003
        } else {
UNCOV
4004
            return forbidden("User doesn't have permission to upload to this dataset");
×
4005
        }
4006

4007
    }
4008

4009
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
4010
     * 
4011
     * @param crc
4012
     * @param datasetId
4013
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
4014
     * @param uriInfo
4015
     * @return
4016
     * @throws IOException
4017
     * @throws ExecutionException
4018
     * @throws InterruptedException
4019
     */
4020
    @POST
4021
    @AuthRequired
4022
    @Path("{id}/addGlobusFiles")
4023
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4024
    @Produces("application/json")
4025
    @Operation(summary = "Uploads a Globus file for a dataset", 
4026
               description = "Uploads a Globus file for a dataset")
4027
    @APIResponse(responseCode = "200",
4028
               description = "Globus file uploaded successfully to dataset")
4029
    @Tag(name = "addGlobusFilesToDataset", 
4030
         description = "Uploads a Globus file for a dataset")
4031
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4032
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
4033
                                            @PathParam("id") String datasetId,
4034
                                            @FormDataParam("jsonData") String jsonData,
4035
                                            @Context UriInfo uriInfo
4036
    ) throws IOException, ExecutionException, InterruptedException {
4037

UNCOV
4038
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
4039

4040
        // -------------------------------------
4041
        // (1) Get the user from the API key
4042
        // -------------------------------------
4043
        AuthenticatedUser authUser;
4044
        try {
UNCOV
4045
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
4046
        } catch (WrappedResponse ex) {
×
UNCOV
4047
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
4048
            );
4049
        }
×
4050

4051
        // -------------------------------------
4052
        // (2) Get the Dataset Id
4053
        // -------------------------------------
4054
        Dataset dataset;
4055

4056
        try {
UNCOV
4057
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
4058
        } catch (WrappedResponse wr) {
×
UNCOV
4059
            return wr.getResponse();
×
4060
        }
×
4061
        
4062
        // Is Globus upload service available? 
4063
        
4064
        // ... on this Dataverse instance?
UNCOV
4065
        if (!systemConfig.isGlobusUpload()) {
×
UNCOV
4066
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
4067
        }
4068

4069
        // ... and on this specific Dataset? 
UNCOV
4070
        String storeId = dataset.getEffectiveStorageDriverId();
×
4071
        // acceptsGlobusTransfers should only be true for an S3 or globus store
UNCOV
4072
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4073
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
UNCOV
4074
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
4075
        }
4076
        
4077
        // Check if the dataset is already locked
4078
        // We are reusing the code and logic used by various command to determine 
4079
        // if there are any locks on the dataset that would prevent the current 
4080
        // users from modifying it:
4081
        try {
UNCOV
4082
            DataverseRequest dataverseRequest = createDataverseRequest(authUser);
×
UNCOV
4083
            permissionService.checkEditDatasetLock(dataset, dataverseRequest, null); 
×
UNCOV
4084
        } catch (IllegalCommandException icex) {
×
4085
            return error(Response.Status.FORBIDDEN, "Dataset " + datasetId + " is locked: " + icex.getLocalizedMessage());
×
4086
        }
×
4087
        
4088
        JsonObject jsonObject = null;
×
4089
        try {
UNCOV
4090
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
4091
        } catch (Exception ex) {
×
UNCOV
4092
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
4093
            return badRequest("Error parsing json body");
×
4094

4095
        }
×
4096

4097
        //------------------------------------
4098
        // (2b) Make sure dataset does not have package file
4099
        // --------------------------------------
4100

UNCOV
4101
        for (DatasetVersion dv : dataset.getVersions()) {
×
UNCOV
4102
            if (dv.isHasPackageFile()) {
×
UNCOV
4103
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4104
                );
4105
            }
4106
        }
×
4107

4108

4109
        String lockInfoMessage = "Globus Upload API started ";
×
UNCOV
4110
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
UNCOV
4111
                (authUser).getId(), lockInfoMessage);
×
4112
        if (lock != null) {
×
4113
            dataset.addLock(lock);
×
4114
        } else {
4115
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
4116
        }
4117

4118
        if(uriInfo != null) {
×
UNCOV
4119
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
4120
        }
4121

4122
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
4123
        
4124
        // Async Call
4125
        try {
UNCOV
4126
            globusService.globusUpload(jsonObject, dataset, requestUrl, authUser);
×
UNCOV
4127
        } catch (IllegalArgumentException ex) {
×
UNCOV
4128
            return badRequest("Invalid parameters: "+ex.getMessage());
×
4129
        }
×
4130

4131
        return ok("Async call to Globus Upload started ");
×
4132

4133
    }
4134
    
4135
/**
4136
 * Retrieve the parameters and signed URLs required to perform a globus
4137
 * transfer/download. This api endpoint is expected to be called as a signed
4138
 * callback after the globus-dataverse app/other app is launched, but it will
4139
 * accept other forms of authentication.
4140
 * 
4141
 * @param crc
4142
 * @param datasetId
4143
 * @param locale
4144
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
4145
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
4146
 */
4147
    @GET
4148
    @AuthRequired
4149
    @Path("{id}/globusDownloadParameters")
4150
    @Produces(MediaType.APPLICATION_JSON)
4151
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4152
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
4153
        // -------------------------------------
4154
        // (1) Get the user from the ContainerRequestContext
4155
        // -------------------------------------
UNCOV
4156
        AuthenticatedUser authUser = null;
×
4157
        try {
UNCOV
4158
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4159
        } catch (WrappedResponse e) {
×
UNCOV
4160
            logger.fine("guest user globus download");
×
4161
        }
×
4162
        // -------------------------------------
4163
        // (2) Get the Dataset Id
4164
        // -------------------------------------
4165
        Dataset dataset;
4166

4167
        try {
UNCOV
4168
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
4169
        } catch (WrappedResponse wr) {
×
UNCOV
4170
            return wr.getResponse();
×
4171
        }
×
4172
        String storeId = dataset.getEffectiveStorageDriverId();
×
4173
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4174
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4175
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
UNCOV
4176
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4177
        }
4178

4179
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
UNCOV
4180
        if (files == null) {
×
UNCOV
4181
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4182
        }
4183

4184
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
4185

UNCOV
4186
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
4187
        String transferEndpoint = null;
×
4188

4189
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
4190
        queryParams.add("queryParameters",
×
UNCOV
4191
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
4192
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
4193
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
4194
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
4195
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
4196
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
4197
        JsonObjectBuilder params = Json.createObjectBuilder();
×
4198
        substitutedParams.keySet().forEach((key) -> {
×
4199
            params.add(key, substitutedParams.get(key));
×
4200
        });
×
4201
        params.add("managed", Boolean.toString(managed));
×
4202
        if (managed) {
×
4203
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
4204
            params.add("endpoint", transferEndpoint);
×
4205
        }
4206
        params.add("files", files);
×
4207
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
UNCOV
4208
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
4209
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
4210
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4211
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
4212
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4213
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
4214
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4215
                .add(URLTokenUtil.URL_TEMPLATE,
×
4216
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
4217
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4218
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
UNCOV
4219
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
4220
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
4221
                .add(URLTokenUtil.TIMEOUT, 5));
×
4222
        allowedApiCalls.add(
×
4223
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
4224
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
4225
                        .add(URLTokenUtil.TIMEOUT, 5));
×
4226

4227
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
4228
    }
4229

4230
    /**
4231
     * Requests permissions for a given globus user to download the specified files
4232
     * the dataset and returns information about the paths to transfer from.
4233
     * 
4234
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
4235
     * 
4236
     * @param crc
4237
     * @param datasetId
4238
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
4239
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
4240
     * @throws IOException
4241
     * @throws ExecutionException
4242
     * @throws InterruptedException
4243
     */
4244
    @POST
4245
    @AuthRequired
4246
    @Path("{id}/requestGlobusDownload")
4247
    @Consumes(MediaType.APPLICATION_JSON)
4248
    @Produces(MediaType.APPLICATION_JSON)
4249
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4250
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
4251
            throws IOException, ExecutionException, InterruptedException {
4252

UNCOV
4253
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
4254

UNCOV
4255
        if (!systemConfig.isGlobusDownload()) {
×
4256
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
UNCOV
4257
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4258
        }
4259

4260
        // -------------------------------------
4261
        // (1) Get the user from the ContainerRequestContext
4262
        // -------------------------------------
UNCOV
4263
        User user = getRequestUser(crc);
×
4264

4265
        // -------------------------------------
4266
        // (2) Get the Dataset Id
4267
        // -------------------------------------
4268
        Dataset dataset;
4269

4270
        try {
UNCOV
4271
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
4272
        } catch (WrappedResponse wr) {
×
UNCOV
4273
            return wr.getResponse();
×
4274
        }
×
4275
        JsonObject body = null;
×
4276
        if (jsonBody != null) {
×
4277
            body = JsonUtil.getJsonObject(jsonBody);
×
4278
        }
4279
        Set<String> fileIds = null;
×
4280
        if (downloadId != null) {
×
UNCOV
4281
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
4282
            if (files != null) {
×
4283
                fileIds = files.keySet();
×
4284
            }
4285
        } else {
×
4286
            if ((body!=null) && body.containsKey("fileIds")) {
×
UNCOV
4287
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
4288
                fileIds = new HashSet<String>(fileVals.size());
×
4289
                for (JsonValue fileVal : fileVals) {
×
4290
                    String id = null;
×
4291
                    switch (fileVal.getValueType()) {
×
4292
                    case STRING:
4293
                        id = ((JsonString) fileVal).getString();
×
4294
                        break;
×
4295
                    case NUMBER:
4296
                        id = ((JsonNumber) fileVal).toString();
×
4297
                        break;
×
4298
                    default:
4299
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
4300
                    }
4301
                    ;
4302
                    fileIds.add(id);
×
UNCOV
4303
                }
×
UNCOV
4304
            } else {
×
4305
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
4306
            }
4307
        }
4308

UNCOV
4309
        if (fileIds.isEmpty()) {
×
UNCOV
4310
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4311
        }
4312
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
4313
        for (String id : fileIds) {
×
UNCOV
4314
            boolean published = false;
×
4315
            logger.info("File id: " + id);
×
4316

4317
            DataFile df = null;
×
4318
            try {
UNCOV
4319
                df = findDataFileOrDie(id);
×
4320
            } catch (WrappedResponse wr) {
×
UNCOV
4321
                return wr.getResponse();
×
4322
            }
×
4323
            if (!df.getOwner().equals(dataset)) {
×
4324
                return badRequest("All files must be in the dataset");
×
4325
            }
4326
            dataFiles.add(df);
×
4327

UNCOV
4328
            for (FileMetadata fm : df.getFileMetadatas()) {
×
4329
                if (fm.getDatasetVersion().isPublished()) {
×
UNCOV
4330
                    published = true;
×
4331
                    break;
×
4332
                }
4333
            }
×
4334

UNCOV
4335
            if (!published) {
×
4336
                // If the file is not published, they can still download the file, if the user
4337
                // has the permission to view unpublished versions:
4338

UNCOV
4339
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
UNCOV
4340
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
UNCOV
4341
                    return forbidden("User doesn't have permission to download file: " + id);
×
4342
                }
4343
            } else { // published and restricted and/or embargoed
4344
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
4345
                    // This line also handles all three authenticated session user, token user, and
4346
                    // guest cases.
4347
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
UNCOV
4348
                        return forbidden("User doesn't have permission to download file: " + id);
×
4349
                    }
4350

4351
            }
UNCOV
4352
        }
×
4353
        // Allowed to download all requested files
UNCOV
4354
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
4355
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4356
            // If managed, give the principal read permissions
4357
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
4358
            switch (status) {
×
4359
            case 201:
4360
                return ok(files);
×
4361
            case 400:
UNCOV
4362
                return badRequest("Unable to grant permission");
×
4363
            case 409:
UNCOV
4364
                return conflict("Permission already exists");
×
4365
            default:
UNCOV
4366
                return error(null, "Unexpected error when granting permission");
×
4367
            }
4368

4369
        }
4370

UNCOV
4371
        return ok(files);
×
4372
    }
4373

4374
    /**
4375
     * Monitors a globus download and removes permissions on the dir/dataset when
4376
     * the specified transfer task is completed.
4377
     * 
4378
     * @param crc
4379
     * @param datasetId
4380
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
4381
     *                  Globus task to monitor.
4382
     * @return
4383
     * @throws IOException
4384
     * @throws ExecutionException
4385
     * @throws InterruptedException
4386
     */
4387
    @POST
4388
    @AuthRequired
4389
    @Path("{id}/monitorGlobusDownload")
4390
    @Consumes(MediaType.APPLICATION_JSON)
4391
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4392
            String jsonData) throws IOException, ExecutionException, InterruptedException {
4393

UNCOV
4394
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
4395

UNCOV
4396
        if (!systemConfig.isGlobusDownload()) {
×
4397
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
UNCOV
4398
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4399
        }
4400

4401
        // -------------------------------------
4402
        // (1) Get the user from the ContainerRequestContext
4403
        // -------------------------------------
4404
        User authUser;
UNCOV
4405
        authUser = getRequestUser(crc);
×
4406

4407
        // -------------------------------------
4408
        // (2) Get the Dataset Id
4409
        // -------------------------------------
4410
        Dataset dataset;
4411

4412
        try {
UNCOV
4413
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
4414
        } catch (WrappedResponse wr) {
×
UNCOV
4415
            return wr.getResponse();
×
4416
        }
×
4417

4418
        // Async Call
4419
        globusService.globusDownload(jsonData, dataset, authUser);
×
4420

UNCOV
4421
        return ok("Async call to Globus Download started");
×
4422

4423
    }
4424

4425
    /**
4426
     * Add multiple Files to an existing Dataset
4427
     *
4428
     * @param idSupplied
4429
     * @param jsonData
4430
     * @return
4431
     */
4432
    @POST
4433
    @AuthRequired
4434
    @Path("{id}/addFiles")
4435
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4436
    @Produces("application/json")
4437
    @Operation(summary = "Uploads a set of files to a dataset", 
4438
               description = "Uploads a set of files to a dataset")
4439
    @APIResponse(responseCode = "200",
4440
               description = "Files uploaded successfully to dataset")
4441
    @Tag(name = "addFilesToDataset", 
4442
         description = "Uploads a set of files to a dataset")
4443
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4444
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4445
            @FormDataParam("jsonData") String jsonData) {
4446

UNCOV
4447
        if (!systemConfig.isHTTPUpload()) {
×
UNCOV
4448
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4449
        }
4450

4451
        // -------------------------------------
4452
        // (1) Get the user from the ContainerRequestContext
4453
        // -------------------------------------
4454
        User authUser;
UNCOV
4455
        authUser = getRequestUser(crc);
×
4456

4457
        // -------------------------------------
4458
        // (2) Get the Dataset Id
4459
        // -------------------------------------
4460
        Dataset dataset;
4461

4462
        try {
UNCOV
4463
            dataset = findDatasetOrDie(idSupplied);
×
UNCOV
4464
        } catch (WrappedResponse wr) {
×
UNCOV
4465
            return wr.getResponse();
×
4466
        }
×
4467

4468
        dataset.getLocks().forEach(dl -> {
×
4469
            logger.info(dl.toString());
×
UNCOV
4470
        });
×
4471

4472
        //------------------------------------
4473
        // (2a) Make sure dataset does not have package file
4474
        // --------------------------------------
4475

UNCOV
4476
        for (DatasetVersion dv : dataset.getVersions()) {
×
UNCOV
4477
            if (dv.isHasPackageFile()) {
×
UNCOV
4478
                return error(Response.Status.FORBIDDEN,
×
4479
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4480
                );
4481
            }
4482
        }
×
4483

UNCOV
4484
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4485

UNCOV
4486
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4487
                dvRequest,
4488
                this.ingestService,
4489
                this.datasetService,
4490
                this.fileService,
4491
                this.permissionSvc,
4492
                this.commandEngine,
4493
                this.systemConfig
4494
        );
4495

UNCOV
4496
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4497

4498
    }
4499

4500
    /**
4501
     * Replace multiple Files to an existing Dataset
4502
     *
4503
     * @param idSupplied
4504
     * @param jsonData
4505
     * @return
4506
     */
4507
    @POST
4508
    @AuthRequired
4509
    @Path("{id}/replaceFiles")
4510
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4511
    @Produces("application/json")
4512
    @Operation(summary = "Replace a set of files to a dataset", 
4513
               description = "Replace a set of files to a dataset")
4514
    @APIResponse(responseCode = "200",
4515
               description = "Files replaced successfully to dataset")
4516
    @Tag(name = "replaceFilesInDataset", 
4517
         description = "Replace a set of files to a dataset")
4518
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) 
4519
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4520
                                          @PathParam("id") String idSupplied,
4521
                                          @FormDataParam("jsonData") String jsonData) {
4522

UNCOV
4523
        if (!systemConfig.isHTTPUpload()) {
×
UNCOV
4524
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4525
        }
4526

4527
        // -------------------------------------
4528
        // (1) Get the user from the ContainerRequestContext
4529
        // -------------------------------------
4530
        User authUser;
UNCOV
4531
        authUser = getRequestUser(crc);
×
4532

4533
        // -------------------------------------
4534
        // (2) Get the Dataset Id
4535
        // -------------------------------------
4536
        Dataset dataset;
4537

4538
        try {
UNCOV
4539
            dataset = findDatasetOrDie(idSupplied);
×
UNCOV
4540
        } catch (WrappedResponse wr) {
×
UNCOV
4541
            return wr.getResponse();
×
4542
        }
×
4543

4544
        dataset.getLocks().forEach(dl -> {
×
4545
            logger.info(dl.toString());
×
UNCOV
4546
        });
×
4547

4548
        //------------------------------------
4549
        // (2a) Make sure dataset does not have package file
4550
        // --------------------------------------
4551

UNCOV
4552
        for (DatasetVersion dv : dataset.getVersions()) {
×
UNCOV
4553
            if (dv.isHasPackageFile()) {
×
UNCOV
4554
                return error(Response.Status.FORBIDDEN,
×
4555
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4556
                );
4557
            }
4558
        }
×
4559

UNCOV
4560
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4561

UNCOV
4562
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4563
                dvRequest,
4564
                this.ingestService,
4565
                this.datasetService,
4566
                this.fileService,
4567
                this.permissionSvc,
4568
                this.commandEngine,
4569
                this.systemConfig
4570
        );
4571

UNCOV
4572
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4573

4574
    }
4575

4576
    /**
4577
     * API to find curation assignments and statuses
4578
     *
4579
     * @return
4580
     * @throws WrappedResponse
4581
     */
4582
    @GET
4583
    @AuthRequired
4584
    @Path("/listCurationStates")
4585
    @Produces("text/csv")
4586
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4587

4588
        try {
UNCOV
4589
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
4590
            if (!user.isSuperuser()) {
×
UNCOV
4591
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4592
            }
4593
        } catch (WrappedResponse wr) {
×
4594
            return wr.getResponse();
×
UNCOV
4595
        }
×
4596

4597
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4598
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
UNCOV
4599
        allRoles.forEach(r -> {
×
4600
            if (r.permissions().contains(Permission.PublishDataset))
×
4601
                curationRoles.add(r);
×
4602
        });
×
4603
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4604
        curationRoles.forEach(r -> {
×
4605
            assignees.put(r.getAlias(), null);
×
4606
        });
×
4607

4608
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4609
                BundleUtil.getStringFromBundle("dataset"),
×
UNCOV
4610
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4611
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4612
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4613
                String.join(",", assignees.keySet())));
×
4614
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4615
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4616
            curationRoles.forEach(r -> {
×
4617
                assignees.put(r.getAlias(), new HashSet<String>());
×
4618
            });
×
4619
            for (RoleAssignment ra : ras) {
×
4620
                if (curationRoles.contains(ra.getRole())) {
×
4621
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4622
                }
4623
            }
×
4624
            DatasetVersion dsv = dataset.getLatestVersion();
×
UNCOV
4625
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4626
            String status = dsv.getExternalStatusLabel();
×
4627
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4628
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4629
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4630
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4631
            List<String> sList = new ArrayList<String>();
×
4632
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4633
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4634
        }
×
4635
        csvSB.append("\n");
×
4636
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4637
    }
4638

4639
    // APIs to manage archival status
4640

4641
    @GET
4642
    @AuthRequired
4643
    @Produces(MediaType.APPLICATION_JSON)
4644
    @Path("/{id}/{version}/archivalStatus")
4645
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4646
                                                    @PathParam("id") String datasetId,
4647
                                                    @PathParam("version") String versionNumber,
4648
                                                    @Context UriInfo uriInfo,
4649
                                                    @Context HttpHeaders headers) {
4650

4651
        try {
UNCOV
4652
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
4653
            if (!au.isSuperuser()) {
×
UNCOV
4654
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4655
            }
4656
            DataverseRequest req = createDataverseRequest(au);
×
4657
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4658
                    headers);
4659

4660
            if (dsv.getArchivalCopyLocation() == null) {
×
UNCOV
4661
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4662
            } else {
4663
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
4664
                return ok(status);
×
4665
            }
4666
        } catch (WrappedResponse wr) {
×
4667
            return wr.getResponse();
×
4668
        }
4669
    }
4670

4671
    @PUT
4672
    @AuthRequired
4673
    @Consumes(MediaType.APPLICATION_JSON)
4674
    @Path("/{id}/{version}/archivalStatus")
4675
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4676
                                                    @PathParam("id") String datasetId,
4677
                                                    @PathParam("version") String versionNumber,
4678
                                                    String newStatus,
4679
                                                    @Context UriInfo uriInfo,
4680
                                                    @Context HttpHeaders headers) {
4681

UNCOV
4682
        logger.fine(newStatus);
×
4683
        try {
UNCOV
4684
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4685

UNCOV
4686
            if (!au.isSuperuser()) {
×
4687
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4688
            }
4689
            
4690
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
UNCOV
4691
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4692
            
UNCOV
4693
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4694
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
UNCOV
4695
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
4696
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4697

4698
                    DataverseRequest req = createDataverseRequest(au);
×
4699
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4700
                            uriInfo, headers);
4701

4702
                    if (dsv == null) {
×
UNCOV
4703
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4704
                    }
4705
                    if (isSingleVersionArchiving()) {
×
4706
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
UNCOV
4707
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
4708
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4709
                            }
4710
                        }
×
4711
                    }
4712

4713
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
UNCOV
4714
                    dsv = datasetversionService.merge(dsv);
×
UNCOV
4715
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
4716
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4717

4718
                    return ok("Status updated");
×
4719
                }
4720
            }
4721
        } catch (WrappedResponse wr) {
×
UNCOV
4722
            return wr.getResponse();
×
UNCOV
4723
        } catch (JsonException| IllegalStateException ex) {
×
4724
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4725
        }
×
4726
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4727
    }
4728
    
4729
    @DELETE
4730
    @AuthRequired
4731
    @Produces(MediaType.APPLICATION_JSON)
4732
    @Path("/{id}/{version}/archivalStatus")
4733
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4734
                                                       @PathParam("id") String datasetId,
4735
                                                       @PathParam("version") String versionNumber,
4736
                                                       @Context UriInfo uriInfo,
4737
                                                       @Context HttpHeaders headers) {
4738

4739
        try {
UNCOV
4740
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
4741
            if (!au.isSuperuser()) {
×
UNCOV
4742
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4743
            }
4744

4745
            DataverseRequest req = createDataverseRequest(au);
×
UNCOV
4746
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4747
                    headers);
4748
            if (dsv == null) {
×
4749
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4750
            }
4751
            dsv.setArchivalCopyLocation(null);
×
4752
            dsv = datasetversionService.merge(dsv);
×
4753

4754
            return ok("Status deleted");
×
4755

UNCOV
4756
        } catch (WrappedResponse wr) {
×
4757
            return wr.getResponse();
×
4758
        }
4759
    }
4760
    
4761
    private boolean isSingleVersionArchiving() {
UNCOV
4762
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
UNCOV
4763
        if (className != null) {
×
4764
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4765
            try {
4766
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
UNCOV
4767
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
UNCOV
4768
            } catch (ClassNotFoundException e) {
×
4769
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4770
            } catch (ClassCastException cce) {
×
4771
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
4772
            }
×
4773
        }
4774
        return false;
×
4775
    }
4776
    
4777
    // This method provides a callback for an external tool to retrieve it's
4778
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4779
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4780
    // will be plain/unsigned.
4781
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4782
    // for a draft dataset), or public case.
4783
    @GET
4784
    @AuthRequired
4785
    @Path("{id}/versions/{version}/toolparams/{tid}")
4786
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4787
                                            @PathParam("tid") long externalToolId,
4788
                                            @PathParam("id") String datasetId,
4789
                                            @PathParam("version") String version,
4790
                                            @QueryParam(value = "locale") String locale) {
4791
        try {
UNCOV
4792
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
UNCOV
4793
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
UNCOV
4794
            if (target == null) {
×
4795
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4796
            }
4797
            
4798
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
UNCOV
4799
            if(externalTool==null) {
×
UNCOV
4800
                return error(BAD_REQUEST, "External tool not found.");
×
4801
            }
4802
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
4803
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4804
            }
4805
            ApiToken apiToken = null;
×
4806
            User u = getRequestUser(crc);
×
UNCOV
4807
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4808

4809
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4810
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
UNCOV
4811
        } catch (WrappedResponse wr) {
×
4812
            return wr.getResponse();
×
4813
        }
4814
    }
4815

4816
    @GET
4817
    @Path("summaryFieldNames")
4818
    public Response getDatasetSummaryFieldNames() {
UNCOV
4819
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
UNCOV
4820
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
UNCOV
4821
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4822
        for (String fieldName : fieldNames) {
×
4823
            fieldNamesArrayBuilder.add(fieldName);
×
4824
        }
4825
        return ok(fieldNamesArrayBuilder);
×
4826
    }
4827

4828
    @GET
4829
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4830
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
UNCOV
4831
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
UNCOV
4832
        if (privateUrlUser == null) {
×
UNCOV
4833
            return notFound("Private URL user not found");
×
4834
        }
4835
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4836
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
UNCOV
4837
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4838
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4839
        }
4840
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4841
        if (dsv == null || dsv.getId() == null) {
×
UNCOV
4842
            return notFound("Dataset version not found");
×
4843
        }
4844
        JsonObjectBuilder responseJson;
4845
        if (isAnonymizedAccess) {
×
UNCOV
4846
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
UNCOV
4847
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4848
        } else {
×
4849
            responseJson = json(dsv, null, true, returnOwners);
×
4850
        }
4851
        return ok(responseJson);
×
4852
    }
4853

4854
    @GET
4855
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4856
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
UNCOV
4857
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
UNCOV
4858
        if (privateUrlUser == null) {
×
UNCOV
4859
            return notFound("Private URL user not found");
×
4860
        }
4861
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4862
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
UNCOV
4863
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4864
    }
4865

4866
    @GET
4867
    @AuthRequired
4868
    @Path("{id}/versions/{versionId}/citation")
4869
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4870
                                              @PathParam("id") String datasetId,
4871
                                              @PathParam("versionId") String versionId,
4872
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4873
                                              @Context UriInfo uriInfo,
4874
                                              @Context HttpHeaders headers) {
UNCOV
4875
        boolean checkFilePerms = false;
×
UNCOV
4876
        return response(req -> ok(
×
UNCOV
4877
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers,
×
4878
                        includeDeaccessioned, checkFilePerms).getCitation(true, false)),
×
4879
                getRequestUser(crc));
×
4880
    }
4881

4882
    @POST
4883
    @AuthRequired
4884
    @Path("{id}/versions/{versionId}/deaccession")
4885
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
UNCOV
4886
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
UNCOV
4887
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4888
        }
4889
        return response(req -> {
×
4890
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4891
            try {
4892
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4893
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
UNCOV
4894
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
4895
                if (deaccessionForwardURL != null) {
×
4896
                    try {
4897
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4898
                    } catch (IllegalArgumentException iae) {
×
UNCOV
4899
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
4900
                    }
×
4901
                }
4902
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4903
                
UNCOV
4904
                return ok("Dataset " + 
×
4905
                        (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + 
×
4906
                        " deaccessioned for version " + versionId);
4907
            } catch (JsonParsingException jpe) {
×
4908
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4909
            }
4910
        }, getRequestUser(crc));
×
4911
    }
4912

4913
    @GET
4914
    @AuthRequired
4915
    @Path("{identifier}/guestbookEntryAtRequest")
4916
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4917
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4918

4919
        Dataset dataset;
4920

4921
        try {
UNCOV
4922
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
4923
        } catch (WrappedResponse ex) {
×
UNCOV
4924
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4925
        }
×
4926
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
4927
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
4928
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4929
        }
4930
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
4931
    }
4932

4933
    @PUT
4934
    @AuthRequired
4935
    @Path("{identifier}/guestbookEntryAtRequest")
4936
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4937
                                               boolean gbAtRequest,
4938
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4939

4940
        // Superuser-only:
4941
        AuthenticatedUser user;
4942
        try {
UNCOV
4943
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
4944
        } catch (WrappedResponse ex) {
×
UNCOV
4945
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4946
        }
×
4947
        if (!user.isSuperuser()) {
×
4948
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4949
        }
4950

4951
        Dataset dataset;
4952

4953
        try {
UNCOV
4954
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
4955
        } catch (WrappedResponse ex) {
×
UNCOV
4956
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4957
        }
×
4958
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
4959
        if (!gbAtRequestOpt.isPresent()) {
×
4960
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
4961
        }
4962
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
4963
        dataset.setGuestbookEntryAtRequest(choice);
×
UNCOV
4964
        datasetService.merge(dataset);
×
4965
        return ok("Guestbook Entry At Request set to: " + choice);
×
4966
    }
4967

4968
    @DELETE
4969
    @AuthRequired
4970
    @Path("{identifier}/guestbookEntryAtRequest")
4971
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4972
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4973

4974
        // Superuser-only:
4975
        AuthenticatedUser user;
4976
        try {
UNCOV
4977
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
4978
        } catch (WrappedResponse ex) {
×
UNCOV
4979
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4980
        }
×
4981
        if (!user.isSuperuser()) {
×
4982
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4983
        }
4984

4985
        Dataset dataset;
4986

4987
        try {
UNCOV
4988
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
4989
        } catch (WrappedResponse ex) {
×
UNCOV
4990
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4991
        }
×
4992

4993
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
4994
        datasetService.merge(dataset);
×
UNCOV
4995
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4996
    }
4997

4998
    @GET
4999
    @AuthRequired
5000
    @Path("{id}/userPermissions")
5001
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
5002
        Dataset dataset;
5003
        try {
UNCOV
5004
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
5005
        } catch (WrappedResponse wr) {
×
UNCOV
5006
            return wr.getResponse();
×
5007
        }
×
5008
        User requestUser = getRequestUser(crc);
×
5009
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
5010
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
5011
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
5012
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
5013
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
5014
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
5015
        return ok(jsonObjectBuilder);
×
5016
    }
5017

5018
    @GET
5019
    @AuthRequired
5020
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
5021
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
5022
                                                 @PathParam("id") String datasetId,
5023
                                                 @PathParam("versionId") String versionId,
5024
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
5025
                                                 @Context UriInfo uriInfo,
5026
                                                 @Context HttpHeaders headers) {
UNCOV
5027
        return response(req -> {
×
UNCOV
5028
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
UNCOV
5029
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
5030
        }, getRequestUser(crc));
×
5031
    }
5032
    
5033
    /**
5034
     * Get the PidProvider that will be used for generating new DOIs in this dataset
5035
     *
5036
     * @return - the id of the effective PID generator for the given dataset
5037
     * @throws WrappedResponse
5038
     */
5039
    @GET
5040
    @AuthRequired
5041
    @Path("{identifier}/pidGenerator")
5042
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5043
            @Context HttpHeaders headers) throws WrappedResponse {
5044

5045
        Dataset dataset;
5046

5047
        try {
UNCOV
5048
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
5049
        } catch (WrappedResponse ex) {
×
UNCOV
5050
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5051
        }
×
5052
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
5053
        if(pidProvider == null) {
×
5054
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
5055
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
5056
        }
UNCOV
5057
        String pidGeneratorId = pidProvider.getId();
×
5058
        return ok(pidGeneratorId);
×
5059
    }
5060

5061
    @PUT
5062
    @AuthRequired
5063
    @Path("{identifier}/pidGenerator")
5064
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
5065
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
5066

5067
        // Superuser-only:
5068
        AuthenticatedUser user;
5069
        try {
UNCOV
5070
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
5071
        } catch (WrappedResponse ex) {
×
UNCOV
5072
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
5073
        }
×
5074
        if (!user.isSuperuser()) {
×
5075
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5076
        }
5077

5078
        Dataset dataset;
5079

5080
        try {
UNCOV
5081
            dataset = findDatasetOrDie(datasetId);
×
UNCOV
5082
        } catch (WrappedResponse ex) {
×
UNCOV
5083
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5084
        }
×
5085
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
5086
            dataset.setPidGeneratorId(generatorId);
×
5087
            datasetService.merge(dataset);
×
5088
            return ok("PID Generator set to: " + generatorId);
×
5089
        } else {
5090
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
5091
        }
5092

5093
    }
5094

5095
    @DELETE
5096
    @AuthRequired
5097
    @Path("{identifier}/pidGenerator")
5098
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5099
            @Context HttpHeaders headers) throws WrappedResponse {
5100

5101
        // Superuser-only:
5102
        AuthenticatedUser user;
5103
        try {
UNCOV
5104
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
5105
        } catch (WrappedResponse ex) {
×
UNCOV
5106
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5107
        }
×
5108
        if (!user.isSuperuser()) {
×
5109
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5110
        }
5111

5112
        Dataset dataset;
5113

5114
        try {
UNCOV
5115
            dataset = findDatasetOrDie(dvIdtf);
×
UNCOV
5116
        } catch (WrappedResponse ex) {
×
UNCOV
5117
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5118
        }
×
5119

5120
        dataset.setPidGenerator(null);
×
5121
        datasetService.merge(dataset);
×
UNCOV
5122
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
5123
    }
5124

5125
    @GET
5126
    @Path("datasetTypes")
5127
    public Response getDatasetTypes() {
UNCOV
5128
        JsonArrayBuilder jab = Json.createArrayBuilder();
×
UNCOV
5129
        List<DatasetType> datasetTypes = datasetTypeSvc.listAll();
×
UNCOV
5130
        for (DatasetType datasetType : datasetTypes) {
×
5131
            JsonObjectBuilder job = Json.createObjectBuilder();
×
5132
            job.add("id", datasetType.getId());
×
5133
            job.add("name", datasetType.getName());
×
5134
            jab.add(job);
×
5135
        }
×
5136
        return ok(jab.build());
×
5137
    }
5138

5139
    @GET
5140
    @Path("datasetTypes/{idOrName}")
5141
    public Response getDatasetTypes(@PathParam("idOrName") String idOrName) {
UNCOV
5142
        DatasetType datasetType = null;
×
UNCOV
5143
        if (StringUtils.isNumeric(idOrName)) {
×
5144
            try {
5145
                long id = Long.parseLong(idOrName);
×
5146
                datasetType = datasetTypeSvc.getById(id);
×
UNCOV
5147
            } catch (NumberFormatException ex) {
×
5148
                return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
×
5149
            }
×
5150
        } else {
5151
            datasetType = datasetTypeSvc.getByName(idOrName);
×
5152
        }
UNCOV
5153
        if (datasetType != null) {
×
5154
            return ok(datasetType.toJson());
×
5155
        } else {
5156
            return error(NOT_FOUND, "Could not find a dataset type with name " + idOrName);
×
5157
        }
5158
    }
5159

5160
    @POST
5161
    @AuthRequired
5162
    @Path("datasetTypes")
5163
    public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) {
5164
        AuthenticatedUser user;
5165
        try {
UNCOV
5166
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
5167
        } catch (WrappedResponse ex) {
×
UNCOV
5168
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5169
        }
×
5170
        if (!user.isSuperuser()) {
×
5171
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5172
        }
5173

5174
        if (jsonIn == null || jsonIn.isEmpty()) {
×
UNCOV
5175
            return error(BAD_REQUEST, "JSON input was null or empty!");
×
5176
        }
5177

5178
        String nameIn = null;
×
5179
        try {
UNCOV
5180
            JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn);
×
5181
            nameIn = jsonObject.getString("name", null);
×
UNCOV
5182
        } catch (JsonParsingException ex) {
×
5183
            return error(BAD_REQUEST, "Problem parsing supplied JSON: " + ex.getLocalizedMessage());
×
5184
        }
×
5185
        if (nameIn == null) {
×
5186
            return error(BAD_REQUEST, "A name for the dataset type is required");
×
5187
        }
5188
        if (StringUtils.isNumeric(nameIn)) {
×
5189
            // getDatasetTypes supports id or name so we don't want a names that looks like an id
UNCOV
5190
            return error(BAD_REQUEST, "The name of the type cannot be only digits.");
×
5191
        }
5192

5193
        try {
UNCOV
5194
            DatasetType datasetType = new DatasetType();
×
UNCOV
5195
            datasetType.setName(nameIn);
×
UNCOV
5196
            DatasetType saved = datasetTypeSvc.save(datasetType);
×
5197
            Long typeId = saved.getId();
×
5198
            String name = saved.getName();
×
5199
            return ok(saved.toJson());
×
5200
        } catch (WrappedResponse ex) {
×
5201
            return error(BAD_REQUEST, ex.getMessage());
×
5202
        }
5203
    }
5204

5205
    @DELETE
5206
    @AuthRequired
5207
    @Path("datasetTypes/{id}")
5208
    public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) {
5209
        AuthenticatedUser user;
5210
        try {
UNCOV
5211
            user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
5212
        } catch (WrappedResponse ex) {
×
UNCOV
5213
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5214
        }
×
5215
        if (!user.isSuperuser()) {
×
5216
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5217
        }
5218

5219
        if (doomed == null || doomed.isEmpty()) {
×
UNCOV
5220
            throw new IllegalArgumentException("ID is required!");
×
5221
        }
5222

5223
        long idToDelete;
5224
        try {
UNCOV
5225
            idToDelete = Long.parseLong(doomed);
×
UNCOV
5226
        } catch (NumberFormatException e) {
×
UNCOV
5227
            throw new IllegalArgumentException("ID must be a number");
×
5228
        }
×
5229

5230
        DatasetType datasetTypeToDelete = datasetTypeSvc.getById(idToDelete);
×
5231
        if (datasetTypeToDelete == null) {
×
UNCOV
5232
            return error(BAD_REQUEST, "Could not find dataset type with id " + idToDelete);
×
5233
        }
5234

5235
        if (DatasetType.DEFAULT_DATASET_TYPE.equals(datasetTypeToDelete.getName())) {
×
UNCOV
5236
            return error(Status.FORBIDDEN, "You cannot delete the default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE);
×
5237
        }
5238

5239
        try {
UNCOV
5240
            int numDeleted = datasetTypeSvc.deleteById(idToDelete);
×
UNCOV
5241
            if (numDeleted == 1) {
×
UNCOV
5242
                return ok("deleted");
×
5243
            } else {
5244
                return error(BAD_REQUEST, "Something went wrong. Number of dataset types deleted: " + numDeleted);
×
5245
            }
UNCOV
5246
        } catch (WrappedResponse ex) {
×
5247
            return error(BAD_REQUEST, ex.getMessage());
×
5248
        }
5249
    }
5250

5251
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc