• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #24523

11 Feb 2025 10:34PM CUT coverage: 22.733% (-0.02%) from 22.749%
#24523

Pull #11230

github

web-flow
Merge 919836007 into f4380eafe
Pull Request #11230: Delete Files From Dataset

0 of 42 new or added lines in 1 file covered. (0.0%)

2 existing lines in 2 files now uncovered.

19944 of 87733 relevant lines covered (22.73%)

0.23 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.16
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4
import edu.harvard.iq.dataverse.*;
5
import edu.harvard.iq.dataverse.DatasetLock.Reason;
6
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
7
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
8
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
9
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
10
import edu.harvard.iq.dataverse.authorization.DataverseRole;
11
import edu.harvard.iq.dataverse.authorization.Permission;
12
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
13
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
14
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
15
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
16
import edu.harvard.iq.dataverse.authorization.users.User;
17
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
18
import edu.harvard.iq.dataverse.dataaccess.*;
19
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
20
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
21
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
22
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
23
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
24
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
25
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
26
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
27
import edu.harvard.iq.dataverse.engine.command.Command;
28
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
29
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
30
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
31
import edu.harvard.iq.dataverse.engine.command.impl.*;
32
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
33
import edu.harvard.iq.dataverse.export.ExportService;
34
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
35
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
36
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
37
import edu.harvard.iq.dataverse.globus.GlobusUtil;
38
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
39
import edu.harvard.iq.dataverse.makedatacount.*;
40
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
41
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
42
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
43
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
44
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
46
import edu.harvard.iq.dataverse.search.IndexServiceBean;
47
import edu.harvard.iq.dataverse.settings.FeatureFlags;
48
import edu.harvard.iq.dataverse.settings.JvmSettings;
49
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
50
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
51
import edu.harvard.iq.dataverse.util.*;
52
import edu.harvard.iq.dataverse.util.bagit.OREMap;
53
import edu.harvard.iq.dataverse.util.json.*;
54
import edu.harvard.iq.dataverse.workflow.Workflow;
55
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
56
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
57
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
58
import jakarta.ejb.EJB;
59
import jakarta.ejb.EJBException;
60
import jakarta.inject.Inject;
61
import jakarta.json.*;
62
import jakarta.json.stream.JsonParsingException;
63
import jakarta.servlet.http.HttpServletRequest;
64
import jakarta.servlet.http.HttpServletResponse;
65
import jakarta.ws.rs.*;
66
import jakarta.ws.rs.container.ContainerRequestContext;
67
import jakarta.ws.rs.core.*;
68
import jakarta.ws.rs.core.Response.Status;
69
import org.apache.commons.lang3.StringUtils;
70
import org.eclipse.microprofile.openapi.annotations.Operation;
71
import org.eclipse.microprofile.openapi.annotations.media.Content;
72
import org.eclipse.microprofile.openapi.annotations.media.Schema;
73
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
74
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
75
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
76
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
77
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
78
import org.glassfish.jersey.media.multipart.FormDataParam;
79

80
import java.io.IOException;
81
import java.io.InputStream;
82
import java.net.URI;
83
import java.sql.Timestamp;
84
import java.text.MessageFormat;
85
import java.text.SimpleDateFormat;
86
import java.time.LocalDate;
87
import java.time.LocalDateTime;
88
import java.time.ZoneId;
89
import java.time.format.DateTimeFormatter;
90
import java.time.format.DateTimeParseException;
91
import java.util.*;
92
import java.util.Map.Entry;
93
import java.util.concurrent.ExecutionException;
94
import java.util.function.Predicate;
95
import java.util.logging.Level;
96
import java.util.logging.Logger;
97
import java.util.regex.Pattern;
98
import java.util.stream.Collectors;
99

100
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
101
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
102
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
103
import edu.harvard.iq.dataverse.dataset.DatasetType;
104
import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean;
105
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
106
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
107
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
108
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
109
import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
110

111
@Path("datasets")
112
public class Datasets extends AbstractApiBean {
×
113

114
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
115
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
116
    
117
    @Inject DataverseSession session;
118

119
    @EJB
120
    DatasetServiceBean datasetService;
121

122
    @EJB
123
    DataverseServiceBean dataverseService;
124
    
125
    @EJB
126
    GlobusServiceBean globusService;
127

128
    @EJB
129
    UserNotificationServiceBean userNotificationService;
130
    
131
    @EJB
132
    PermissionServiceBean permissionService;
133
    
134
    @EJB
135
    AuthenticationServiceBean authenticationServiceBean;
136
    
137
    @EJB
138
    DDIExportServiceBean ddiExportService;
139

140
    @EJB
141
    MetadataBlockServiceBean metadataBlockService;
142
    
143
    @EJB
144
    DataFileServiceBean fileService;
145

146
    @EJB
147
    IngestServiceBean ingestService;
148

149
    @EJB
150
    EjbDataverseEngine commandEngine;
151
    
152
    @EJB
153
    IndexServiceBean indexService;
154

155
    @EJB
156
    S3PackageImporter s3PackageImporter;
157
     
158
    @EJB
159
    SettingsServiceBean settingsService;
160

161
    // TODO: Move to AbstractApiBean
162
    @EJB
163
    DatasetMetricsServiceBean datasetMetricsSvc;
164
    
165
    @EJB
166
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
167

168
    @EJB
169
    EmbargoServiceBean embargoService;
170

171
    @EJB
172
    RetentionServiceBean retentionService;
173

174
    @Inject
175
    MakeDataCountLoggingServiceBean mdcLogService;
176
    
177
    @Inject
178
    DataverseRequestServiceBean dvRequestService;
179

180
    @Inject
181
    WorkflowServiceBean wfService;
182
    
183
    @Inject
184
    DataverseRoleServiceBean dataverseRoleService;
185

186
    @EJB
187
    DatasetVersionServiceBean datasetversionService;
188

189
    @Inject
190
    PrivateUrlServiceBean privateUrlService;
191

192
    @Inject
193
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
194

195
    @Inject
196
    DatasetTypeServiceBean datasetTypeSvc;
197

198
    /**
199
     * Used to consolidate the way we parse and handle dataset versions.
200
     * @param <T> 
201
     */
202
    public interface DsVersionHandler<T> {
203
        T handleLatest();
204
        T handleDraft();
205
        T handleSpecific( long major, long minor );
206
        T handleLatestPublished();
207
    }
208
    
209
    @GET
210
    @AuthRequired
211
    @Path("{id}")
212
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
213
        return response( req -> {
×
214
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true)));
×
215
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
216
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
217
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
218
            if((latest != null) && latest.isReleased()) {
×
219
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
220
                mdcLogService.logEntry(entry);
×
221
            }
222
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
223
        }, getRequestUser(crc));
×
224
    }
225
    
226
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
227
    // to obtain the dataset that we are trying to export - which would handle
228
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
229
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
230
    @GET
231
    @Path("/export")
232
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" })
233
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
234

235
        try {
236
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
237
            if (dataset == null) {
×
238
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
239
            }
240
            
241
            ExportService instance = ExportService.getInstance();
×
242
            
243
            InputStream is = instance.getExport(dataset, exporter);
×
244
           
245
            String mediaType = instance.getMediaType(exporter);
×
246
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
247
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
248
            mdcLogService.logEntry(entry);
×
249
            
250
            return Response.ok()
×
251
                    .entity(is)
×
252
                    .type(mediaType).
×
253
                    build();
×
254
        } catch (Exception wr) {
×
255
            logger.warning(wr.getMessage());
×
256
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
257
        }
258
    }
259

260
    @DELETE
261
    @AuthRequired
262
    @Path("{id}")
263
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
264
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
265
        // (and there's a comment that says "TODO: remove this command")
266
        // do we need an exposed API call for it? 
267
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
268
        // if the dataset only has 1 version... In other words, the functionality 
269
        // currently provided by this API is covered between the "deleteDraftVersion" and
270
        // "destroyDataset" API calls.  
271
        // (The logic below follows the current implementation of the underlying 
272
        // commands!)
273

274
        User u = getRequestUser(crc);
×
275
        return response( req -> {
×
276
            Dataset doomed = findDatasetOrDie(id);
×
277
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
278
            boolean destroy = false;
×
279
            
280
            if (doomed.getVersions().size() == 1) {
×
281
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
282
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
283
                }
284
                destroy = true;
×
285
            } else {
286
                if (!doomedVersion.isDraft()) {
×
287
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
288
                }
289
            }
290
            
291
            // Gather the locations of the physical files that will need to be 
292
            // deleted once the destroy command execution has been finalized:
293
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
294
            
295
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
296
            
297
            // If we have gotten this far, the destroy command has succeeded, 
298
            // so we can finalize it by permanently deleting the physical files:
299
            // (DataFileService will double-check that the datafiles no 
300
            // longer exist in the database, before attempting to delete 
301
            // the physical files)
302
            if (!deleteStorageLocations.isEmpty()) {
×
303
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
304
            }
305
            
306
            return ok("Dataset " + id + " deleted");
×
307
        }, u);
308
    }
309
        
310
    @DELETE
311
    @AuthRequired
312
    @Path("{id}/destroy")
313
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
314

315
        User u = getRequestUser(crc);
×
316
        return response(req -> {
×
317
            // first check if dataset is released, and if so, if user is a superuser
318
            Dataset doomed = findDatasetOrDie(id);
×
319

320
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
321
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
322
            }
323

324
            // Gather the locations of the physical files that will need to be 
325
            // deleted once the destroy command execution has been finalized:
326
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
327

328
            execCommand(new DestroyDatasetCommand(doomed, req));
×
329

330
            // If we have gotten this far, the destroy command has succeeded, 
331
            // so we can finalize permanently deleting the physical files:
332
            // (DataFileService will double-check that the datafiles no 
333
            // longer exist in the database, before attempting to delete 
334
            // the physical files)
335
            if (!deleteStorageLocations.isEmpty()) {
×
336
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
337
            }
338

339
            return ok("Dataset " + id + " destroyed");
×
340
        }, u);
341
    }
342
    
343
    @DELETE
344
    @AuthRequired
345
    @Path("{id}/versions/{versionId}")
346
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
347
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
348
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
349
        }
350

351
        return response( req -> {
×
352
            Dataset dataset = findDatasetOrDie(id);
×
353
            DatasetVersion doomed = dataset.getLatestVersion();
×
354
            
355
            if (!doomed.isDraft()) {
×
356
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
357
            }
358
            
359
            // Gather the locations of the physical files that will need to be 
360
            // deleted once the destroy command execution has been finalized:
361
            
362
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
363
            
364
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
365
            
366
            // If we have gotten this far, the delete command has succeeded - 
367
            // by either deleting the Draft version of a published dataset, 
368
            // or destroying an unpublished one. 
369
            // This means we can finalize permanently deleting the physical files:
370
            // (DataFileService will double-check that the datafiles no 
371
            // longer exist in the database, before attempting to delete 
372
            // the physical files)
373
            if (!deleteStorageLocations.isEmpty()) {
×
374
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
375
            }
376
            
377
            return ok("Draft version of dataset " + id + " deleted");
×
378
        }, getRequestUser(crc));
×
379
    }
380
        
381
    @DELETE
382
    @AuthRequired
383
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
384
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
385
                boolean index = true;
×
386
        return response(req -> {
×
387
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
388
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
389
        }, getRequestUser(crc));
×
390
    }
391
        
392
    @PUT
393
    @AuthRequired
394
    @Path("{id}/citationdate")
395
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
396
        return response( req -> {
×
397
            if ( dsfTypeName.trim().isEmpty() ){
×
398
                return badRequest("Please provide a dataset field type in the requst body.");
×
399
            }
400
            DatasetFieldType dsfType = null;
×
401
            if (!":publicationDate".equals(dsfTypeName)) {
×
402
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
403
                if (dsfType == null) {
×
404
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
405
                }
406
            }
407

408
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
409
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
410
        }, getRequestUser(crc));
×
411
    }
412
    
413
    @DELETE
414
    @AuthRequired
415
    @Path("{id}/citationdate")
416
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
417
        return response( req -> {
×
418
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
419
            return ok("Citation Date for dataset " + id + " set to default");
×
420
        }, getRequestUser(crc));
×
421
    }
422
    
423
    @GET
424
    @AuthRequired
425
    @Path("{id}/versions")
426
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles,@QueryParam("excludeMetadataBlocks") Boolean excludeMetadataBlocks, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
427

428
        return response( req -> {
×
429
            Dataset dataset = findDatasetOrDie(id);
×
430
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
431
            Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks;
×
432

433
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
434
                                .stream()
×
435
                                .map( d -> json(d, deepLookup, includeMetadataBlocks) )
×
436
                                .collect(toJsonArray()));
×
437
        }, getRequestUser(crc));
×
438
    }
439
    
440
    @GET
441
    @AuthRequired
442
    @Path("{id}/versions/{versionId}")
443
    public Response getVersion(@Context ContainerRequestContext crc,
444
                               @PathParam("id") String datasetId,
445
                               @PathParam("versionId") String versionId,
446
                               @QueryParam("excludeFiles") Boolean excludeFiles,
447
                               @QueryParam("excludeMetadataBlocks") Boolean excludeMetadataBlocks,
448
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
449
                               @QueryParam("returnOwners") boolean returnOwners,
450
                               @Context UriInfo uriInfo,
451
                               @Context HttpHeaders headers) {
452
        return response( req -> {
×
453
            
454
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
455
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
456
            
457
            Dataset dataset = findDatasetOrDie(datasetId);
×
458
            DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, 
×
459
                                                                            versionId, 
460
                                                                            dataset, 
461
                                                                            uriInfo, 
462
                                                                            headers, 
463
                                                                            includeDeaccessioned,
464
                                                                            checkPerms);
465

466
            if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) {
×
467
                return notFound("Dataset version not found");
×
468
            }
469

470
            if (excludeFiles == null ? true : !excludeFiles) {
×
471
                requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId());
×
472
            }
473
            Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks;
×
474

475
            JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
×
476
                                                 null, 
477
                                                 excludeFiles == null ? true : !excludeFiles,
×
478
                                                 returnOwners, includeMetadataBlocks);
×
479
            return ok(jsonBuilder);
×
480

481
        }, getRequestUser(crc));
×
482
    }
483

484
    @GET
485
    @AuthRequired
486
    @Path("{id}/versions/{versionId}/files")
487
    public Response getVersionFiles(@Context ContainerRequestContext crc,
488
                                    @PathParam("id") String datasetId,
489
                                    @PathParam("versionId") String versionId,
490
                                    @QueryParam("limit") Integer limit,
491
                                    @QueryParam("offset") Integer offset,
492
                                    @QueryParam("contentType") String contentType,
493
                                    @QueryParam("accessStatus") String accessStatus,
494
                                    @QueryParam("categoryName") String categoryName,
495
                                    @QueryParam("tabularTagName") String tabularTagName,
496
                                    @QueryParam("searchText") String searchText,
497
                                    @QueryParam("orderCriteria") String orderCriteria,
498
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
499
                                    @Context UriInfo uriInfo,
500
                                    @Context HttpHeaders headers) {
501
        return response(req -> {
×
502
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned);
×
503
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
504
            try {
505
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
506
            } catch (IllegalArgumentException e) {
×
507
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
508
            }
×
509
            FileSearchCriteria fileSearchCriteria;
510
            try {
511
                fileSearchCriteria = new FileSearchCriteria(
×
512
                        contentType,
513
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
514
                        categoryName,
515
                        tabularTagName,
516
                        searchText
517
                );
518
            } catch (IllegalArgumentException e) {
×
519
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
520
            }
×
521
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
522
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
523
        }, getRequestUser(crc));
×
524
    }
525

526
    @GET
527
    @AuthRequired
528
    @Path("{id}/versions/{versionId}/files/counts")
529
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
530
                                         @PathParam("id") String datasetId,
531
                                         @PathParam("versionId") String versionId,
532
                                         @QueryParam("contentType") String contentType,
533
                                         @QueryParam("accessStatus") String accessStatus,
534
                                         @QueryParam("categoryName") String categoryName,
535
                                         @QueryParam("tabularTagName") String tabularTagName,
536
                                         @QueryParam("searchText") String searchText,
537
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
538
                                         @Context UriInfo uriInfo,
539
                                         @Context HttpHeaders headers) {
540
        return response(req -> {
×
541
            FileSearchCriteria fileSearchCriteria;
542
            try {
543
                fileSearchCriteria = new FileSearchCriteria(
×
544
                        contentType,
545
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
546
                        categoryName,
547
                        tabularTagName,
548
                        searchText
549
                );
550
            } catch (IllegalArgumentException e) {
×
551
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
552
            }
×
553
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
554
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
555
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
556
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
557
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
558
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
559
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
560
            return ok(jsonObjectBuilder);
×
561
        }, getRequestUser(crc));
×
562
    }
563

564
    @GET
565
    @AuthRequired
566
    @Path("{id}/dirindex")
567
    @Produces("text/html")
568
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
569

570
        folderName = folderName == null ? "" : folderName;
×
571
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
572
        
573
        DatasetVersion version;
574
        try {
575
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
576
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
577
        } catch (WrappedResponse wr) {
×
578
            return wr.getResponse();
×
579
        }
×
580
        
581
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
582
        
583
        // return "NOT FOUND" if there is no such folder in the dataset version:
584
        
585
        if ("".equals(output)) {
×
586
            return notFound("Folder " + folderName + " does not exist");
×
587
        }
588
        
589
        
590
        String indexFileName = folderName.equals("") ? ".index.html"
×
591
                : ".index-" + folderName.replace('/', '_') + ".html";
×
592
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
593

594
        
595
        return Response.ok()
×
596
                .entity(output)
×
597
                //.type("application/html").
598
                .build();
×
599
    }
600
    
601
    @GET
602
    @AuthRequired
603
    @Path("{id}/versions/{versionId}/metadata")
604
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
605
        return response( req -> ok(
×
606
                    jsonByBlocks(
×
607
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
608
                                .getDatasetFields())), getRequestUser(crc));
×
609
    }
610
    
611
    @GET
612
    @AuthRequired
613
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
614
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
615
                                            @PathParam("id") String datasetId,
616
                                            @PathParam("versionNumber") String versionNumber,
617
                                            @PathParam("block") String blockName,
618
                                            @Context UriInfo uriInfo,
619
                                            @Context HttpHeaders headers) {
620
        
621
        return response( req -> {
×
622
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
623
            
624
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
625
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
626
                if ( p.getKey().getName().equals(blockName) ) {
×
627
                    return ok(json(p.getKey(), p.getValue()));
×
628
                }
629
            }
×
630
            return notFound("metadata block named " + blockName + " not found");
×
631
        }, getRequestUser(crc));
×
632
    }
633

634
    /**
635
     * Add Signposting
636
     * @param datasetId
637
     * @param versionId
638
     * @param uriInfo
639
     * @param headers
640
     * @return
641
     */
642
    @GET
643
    @AuthRequired
644
    @Path("{id}/versions/{versionId}/linkset")
645
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
646
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
647
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
648
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
649
        }
650
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
651
        try {
652
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
653
            return Response
×
654
                    .ok(Json.createObjectBuilder()
×
655
                            .add("linkset",
×
656
                                    new SignpostingResources(systemConfig, dsv,
657
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
658
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
659
                                                    .getJsonLinkset())
×
660
                            .build())
×
661
                    .type(MediaType.APPLICATION_JSON).build();
×
662
        } catch (WrappedResponse wr) {
×
663
            return wr.getResponse();
×
664
        }
665
    }
666

667
    @POST
668
    @AuthRequired
669
    @Path("{id}/modifyRegistration")
670
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
671
        return response( req -> {
×
672
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
673
            return ok("Dataset " + id + " target url updated");
×
674
        }, getRequestUser(crc));
×
675
    }
676
    
677
    @POST
678
    @AuthRequired
679
    @Path("/modifyRegistrationAll")
680
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
681
        return response( req -> {
×
682
            datasetService.findAll().forEach( ds -> {
×
683
                try {
684
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
685
                } catch (WrappedResponse ex) {
×
686
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
687
                }
×
688
            });
×
689
            return ok("Update All Dataset target url completed");
×
690
        }, getRequestUser(crc));
×
691
    }
692
    
693
    @POST
694
    @AuthRequired
695
    @Path("{id}/modifyRegistrationMetadata")
696
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
697

698
        try {
699
            Dataset dataset = findDatasetOrDie(id);
×
700
            if (!dataset.isReleased()) {
×
701
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
702
            }
703
        } catch (WrappedResponse ex) {
×
704
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
705
        }
×
706

707
        return response(req -> {
×
708
            Dataset dataset = findDatasetOrDie(id);
×
709
            execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req));
×
710
            List<String> args = Arrays.asList(dataset.getIdentifier());
×
711
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
712
        }, getRequestUser(crc));
×
713
    }
714
    
715
    @POST
716
    @AuthRequired
717
    @Path("/modifyRegistrationPIDMetadataAll")
718
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
719
        return response( req -> {
×
720
            datasetService.findAll().forEach( ds -> {
×
721
                try {
722
                    logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier());
×
723
                    if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) {
×
724
                        if (ds.isReleased()) {
×
725
                            logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered");
×
726
                        }
727
                    } else {
728
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
729
                    }
730
                } catch (WrappedResponse ex) {
×
731
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
732
                }
×
733
            });
×
734
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
735
        }, getRequestUser(crc));
×
736
    }
737
  
738
    @PUT
739
    @AuthRequired
740
    @Path("{id}/versions/{versionId}")
741
    @Consumes(MediaType.APPLICATION_JSON)
742
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
743
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
744
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
745
        }
746
        
747
        try {
748
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
749
            Dataset ds = findDatasetOrDie(id);
×
750
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
751
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
752
            
753
            // clear possibly stale fields from the incoming dataset version.
754
            // creation and modification dates are updated by the commands.
755
            incomingVersion.setId(null);
×
756
            incomingVersion.setVersionNumber(null);
×
757
            incomingVersion.setMinorVersionNumber(null);
×
758
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
759
            incomingVersion.setDataset(ds);
×
760
            incomingVersion.setCreateTime(null);
×
761
            incomingVersion.setLastUpdateTime(null);
×
762
            
763
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
764
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
765
            }
766
            
767
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
768
            
769
            DatasetVersion managedVersion;
770
            if (updateDraft) {
×
771
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
772
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
773
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
774
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
775
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
776
                if (!hasValidTerms) {
×
777
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
778
                }
779
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
780
                managedVersion = managedDataset.getOrCreateEditVersion();
×
781
            } else {
×
782
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
783
                if (!hasValidTerms) {
×
784
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
785
                }
786
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
787
            }
788
            return ok( json(managedVersion, true) );
×
789
                    
790
        } catch (JsonParseException ex) {
×
791
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
792
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
793
            
794
        } catch (WrappedResponse ex) {
×
795
            return ex.getResponse();
×
796
            
797
        }
798
    }
799

800
    @GET
801
    @AuthRequired
802
    @Path("{id}/versions/{versionId}/metadata")
803
    @Produces("application/ld+json, application/json-ld")
804
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
805
        try {
806
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
807
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
808
            OREMap ore = new OREMap(dsv,
×
809
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
810
            return ok(ore.getOREMapBuilder(true));
×
811

812
        } catch (WrappedResponse ex) {
×
813
            ex.printStackTrace();
×
814
            return ex.getResponse();
×
815
        } catch (Exception jpe) {
×
816
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
817
            jpe.printStackTrace();
×
818
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
819
        }
820
    }
821

822
    @GET
823
    @AuthRequired
824
    @Path("{id}/metadata")
825
    @Produces("application/ld+json, application/json-ld")
826
    public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
827
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
828
    }
829

830
    @PUT
831
    @AuthRequired
832
    @Path("{id}/metadata")
833
    @Consumes("application/ld+json, application/json-ld")
834
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
835

836
        try {
837
            Dataset ds = findDatasetOrDie(id);
×
838
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
839
            //Get draft state as of now
840

841
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
842
            //Get the current draft or create a new version to update
843
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
844
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
845
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
846
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
847
            if (!hasValidTerms) {
×
848
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
849
            }
850
            DatasetVersion managedVersion;
851
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
852
            managedVersion = managedDataset.getLatestVersion();
×
853
            String info = updateDraft ? "Version Updated" : "Version Created";
×
854
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
855

856
        } catch (WrappedResponse ex) {
×
857
            return ex.getResponse();
×
858
        } catch (JsonParsingException jpe) {
×
859
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
860
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
861
        }
862
    }
863

864
    @PUT
865
    @AuthRequired
866
    @Path("{id}/metadata/delete")
867
    @Consumes("application/ld+json, application/json-ld")
868
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
869
        try {
870
            Dataset ds = findDatasetOrDie(id);
×
871
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
872
            //Get draft state as of now
873

874
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
875
            //Get the current draft or create a new version to update
876
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
877
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
878
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
879
            DatasetVersion managedVersion;
880
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
881
            managedVersion = managedDataset.getLatestVersion();
×
882
            String info = updateDraft ? "Version Updated" : "Version Created";
×
883
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
884

885
        } catch (WrappedResponse ex) {
×
886
            ex.printStackTrace();
×
887
            return ex.getResponse();
×
888
        } catch (JsonParsingException jpe) {
×
889
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
890
            jpe.printStackTrace();
×
891
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
892
        }
893
    }
894

895
    @PUT
896
    @AuthRequired
897
    @Path("{id}/deleteMetadata")
898
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
899

900
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
901

902
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
903
    }
904

905
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
906
        try {
907

908
            Dataset ds = findDatasetOrDie(id);
×
909
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
910
            //Get the current draft or create a new version to update
911
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
912
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
913
            List<DatasetField> fields = new LinkedList<>();
×
914
            DatasetField singleField = null;
×
915

916
            JsonArray fieldsJson = json.getJsonArray("fields");
×
917
            if (fieldsJson == null) {
×
918
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
919
                fields.add(singleField);
×
920
            } else {
921
                fields = jsonParser().parseMultipleFields(json);
×
922
            }
923

924
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
925

926
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
927
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
928
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
929

930
            for (DatasetField updateField : fields) {
×
931
                boolean found = false;
×
932
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
933
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
934
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
935
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
936
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
937
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
938
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
939
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
940
                                                found = true;
×
941
                                                controlledVocabularyItemsToRemove.add(existing);
×
942
                                            }
943
                                        }
×
944
                                        if (!found) {
×
945
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
946
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
947
                                        }
948
                                    }
×
949
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
950
                                        dsf.getControlledVocabularyValues().remove(remove);
×
951
                                    }
×
952

953
                                } else {
954
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
955
                                        found = true;
×
956
                                        dsf.setSingleControlledVocabularyValue(null);
×
957
                                    }
958

959
                                }
960
                            } else {
961
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
962
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
963
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
964
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
965
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
966
                                                    found = true;
×
967
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
968
                                                }
969
                                            }
×
970
                                            if (!found) {
×
971
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
972
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
973
                                            }
974
                                        }
×
975
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
976
                                            dsf.getDatasetFieldValues().remove(remove);
×
977
                                        });
×
978

979
                                    } else {
980
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
981
                                            found = true;
×
982
                                            dsf.setSingleValue(null);
×
983
                                        }
984

985
                                    }
986
                                } else {
987
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
988
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
989
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
990
                                            String existingString = getCompoundDisplayValue(existing);
×
991
                                            if (existingString.equals(deleteVal)) {
×
992
                                                found = true;
×
993
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
994
                                            }
995
                                        }
×
996
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
997
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
998
                                        });
×
999
                                        if (!found) {
×
1000
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
1001
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
1002
                                        }
1003
                                    }
×
1004
                                }
1005
                            }
1006
                        } else {
1007
                            found = true;
×
1008
                            dsf.setSingleValue(null);
×
1009
                            dsf.setSingleControlledVocabularyValue(null);
×
1010
                        }
1011
                        break;
×
1012
                    }
1013
                }
×
1014
                if (!found){
×
1015
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
1016
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1017
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1018
                }
1019
            }
×
1020

1021

1022
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1023
            return ok(json(managedVersion, true));
×
1024

1025
        } catch (JsonParseException ex) {
×
1026
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1027
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1028

1029
        } catch (WrappedResponse ex) {
×
1030
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
1031
            return ex.getResponse();
×
1032

1033
        }
1034
    
1035
    }
1036
    
1037
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
1038
        String returnString = "";
×
1039
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
1040
            for (String value : dsf.getValues()) {
×
1041
                if (!(value == null)) {
×
1042
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1043
                }
1044
            }
×
1045
        }
×
1046
        return returnString;
×
1047
    }
1048
    
1049
    @PUT
1050
    @AuthRequired
1051
    @Path("{id}/editMetadata")
1052
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1053

1054
        Boolean replaceData = replace != null;
×
1055
        DataverseRequest req = null;
×
1056
        req = createDataverseRequest(getRequestUser(crc));
×
1057

1058
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1059
    }
1060
    
1061
    
1062
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1063
        try {
1064
           
1065
            Dataset ds = findDatasetOrDie(id);
×
1066
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1067
            //Get the current draft or create a new version to update
1068
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1069
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1070
            List<DatasetField> fields = new LinkedList<>();
×
1071
            DatasetField singleField = null;
×
1072
            
1073
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1074
            if (fieldsJson == null) {
×
1075
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
1076
                fields.add(singleField);
×
1077
            } else {
1078
                fields = jsonParser().parseMultipleFields(json);
×
1079
            }
1080
            
1081

1082
            String valdationErrors = validateDatasetFieldValues(fields);
×
1083

1084
            if (!valdationErrors.isEmpty()) {
×
1085
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1086
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1087
            }
1088

1089
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1090

1091
            //loop through the update fields     
1092
            // and compare to the version fields  
1093
            //if exist add/replace values
1094
            //if not add entire dsf
1095
            for (DatasetField updateField : fields) {
×
1096
                boolean found = false;
×
1097
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1098
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1099
                        found = true;
×
1100
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1101
                            List priorCVV = new ArrayList<>();
×
1102
                            String cvvDisplay = "";
×
1103

1104
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1105
                                cvvDisplay = dsf.getDisplayValue();
×
1106
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1107
                                    priorCVV.add(cvvOld);
×
1108
                                }
×
1109
                            }
1110

1111
                            if (replaceData) {
×
1112
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1113
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1114
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1115
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1116
                                    priorCVV.clear();
×
1117
                                    dsf.getControlledVocabularyValues().clear();
×
1118
                                } else {
1119
                                    dsf.setSingleValue("");
×
1120
                                    dsf.setSingleControlledVocabularyValue(null);
×
1121
                                }
1122
                              cvvDisplay="";
×
1123
                            }
1124
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1125
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1126
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1127
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
1128
                                            priorCVV.add(cvv);
×
1129
                                        }
1130
                                    }
×
1131
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1132
                                } else {
1133
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1134
                                }
1135
                            } else {
1136
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1137
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1138
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1139
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1140
                                                dfv.setDatasetField(dsf);
×
1141
                                                dsf.getDatasetFieldValues().add(dfv);
×
1142
                                            }
1143
                                        }
×
1144
                                    } else {
1145
                                        dsf.setSingleValue(updateField.getValue());
×
1146
                                    }
1147
                                } else {
1148
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1149
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1150
                                            dfcv.setParentDatasetField(dsf);
×
1151
                                            dsf.setDatasetVersion(dsv);
×
1152
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1153
                                        }
1154
                                    }
×
1155
                                }
1156
                            }
1157
                        } else {
×
1158
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
1159
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1160
                            }
1161
                        }
1162
                        break;
1163
                    }
1164
                }
×
1165
                if (!found) {
×
1166
                    updateField.setDatasetVersion(dsv);
×
1167
                    dsv.getDatasetFields().add(updateField);
×
1168
                }
1169
            }
×
1170
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1171

1172
            return ok(json(managedVersion, true));
×
1173

1174
        } catch (JsonParseException ex) {
×
1175
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1176
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1177

1178
        } catch (WrappedResponse ex) {
×
1179
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1180
            return ex.getResponse();
×
1181

1182
        }
1183
    }
1184
    
1185
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1186
        StringBuilder error = new StringBuilder();
×
1187

1188
        for (DatasetField dsf : fields) {
×
1189
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1190
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1191
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1192
            } else if (!dsf.getDatasetFieldType().isAllowMultiples()) {
×
1193
                if (dsf.getDatasetFieldType().isControlledVocabulary() && dsf.getSingleControlledVocabularyValue().getStrValue().isEmpty()) {
×
1194
                    error.append("Empty cvoc value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1195
                } else if (dsf.getDatasetFieldType().isCompound() && dsf.getDatasetFieldCompoundValues().isEmpty()) {
×
1196
                    error.append("Empty compound value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1197
                } else if (!dsf.getDatasetFieldType().isControlledVocabulary() && !dsf.getDatasetFieldType().isCompound() && dsf.getSingleValue().getValue().isEmpty()) {
×
1198
                    error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1199
                }
1200
            }
1201
        }
×
1202

1203
        if (!error.toString().isEmpty()) {
×
1204
            return (error.toString());
×
1205
        }
1206
        return "";
×
1207
    }
1208
    
1209
    /**
1210
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1211
     */
1212
    @GET
1213
    @AuthRequired
1214
    @Path("{id}/actions/:publish")
1215
    @Deprecated
1216
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1217
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
1218
        return publishDataset(crc, id, type, false);
×
1219
    }
1220

1221
    @POST
1222
    @AuthRequired
1223
    @Path("{id}/actions/:publish")
1224
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1225
        try {
1226
            if (type == null) {
×
1227
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1228
            }
1229
            boolean updateCurrent=false;
×
1230
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1231
            type = type.toLowerCase();
×
1232
            boolean isMinor=false;
×
1233
            switch (type) {
×
1234
                case "minor":
1235
                    isMinor = true;
×
1236
                    break;
×
1237
                case "major":
1238
                    isMinor = false;
×
1239
                    break;
×
1240
                case "updatecurrent":
1241
                    if (user.isSuperuser()) {
×
1242
                        updateCurrent = true;
×
1243
                    } else {
1244
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1245
                    }
1246
                    break;
1247
                default:
1248
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1249
            }
1250

1251
            Dataset ds = findDatasetOrDie(id);
×
1252
            
1253
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1254
            if (!hasValidTerms) {
×
1255
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1256
            }
1257
            
1258
            if (mustBeIndexed) {
×
1259
                logger.fine("IT: " + ds.getIndexTime());
×
1260
                logger.fine("MT: " + ds.getModificationTime());
×
1261
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1262
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1263
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
1264
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1265
                }
1266
                /*
1267
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1268
                 * modification or permission modification times. The checks here are trying to
1269
                 * see if indexing or permissionindexing could be pending, so they check to see
1270
                 * if the relevant modification time is set and if so, whether the index is also
1271
                 * set and if so, if it after the modification time. If the modification time is
1272
                 * set and the index time is null or is before the mod time, the 409/conflict
1273
                 * error is returned.
1274
                 *
1275
                 */
1276
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1277
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
1278
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1279
                }
1280
            }
1281
            if (updateCurrent) {
×
1282
                /*
1283
                 * Note: The code here mirrors that in the
1284
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1285
                 * to the core logic (i.e. beyond updating the messaging about results) should
1286
                 * be applied to the code there as well.
1287
                 */
1288
                String errorMsg = null;
×
1289
                String successMsg = null;
×
1290
                try {
1291
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1292
                    ds = commandEngine.submit(cmd);
×
1293
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1294

1295
                    // If configured, update archive copy as well
1296
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1297
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1298
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
1299
                    if (archiveCommand != null) {
×
1300
                        // Delete the record of any existing copy since it is now out of date/incorrect
1301
                        updateVersion.setArchivalCopyLocation(null);
×
1302
                        /*
1303
                         * Then try to generate and submit an archival copy. Note that running this
1304
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1305
                         * "The attribute [id] of class
1306
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1307
                         * key column in the database. Updates are not allowed." To avoid that, and to
1308
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1309
                         * pulled this out as a separate submit().
1310
                         */
1311
                        try {
1312
                            updateVersion = commandEngine.submit(archiveCommand);
×
1313
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
1314
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1315
                            } else {
1316
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1317
                            }
1318
                        } catch (CommandException ex) {
×
1319
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1320
                            logger.severe(ex.getMessage());
×
1321
                        }
×
1322
                    }
1323
                } catch (CommandException ex) {
×
1324
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1325
                    logger.severe(ex.getMessage());
×
1326
                }
×
1327
                if (errorMsg != null) {
×
1328
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1329
                } else {
1330
                    return Response.ok(Json.createObjectBuilder()
×
1331
                            .add("status", ApiConstants.STATUS_OK)
×
1332
                            .add("status_details", successMsg)
×
1333
                            .add("data", json(ds)).build())
×
1334
                            .type(MediaType.APPLICATION_JSON)
×
1335
                            .build();
×
1336
                }
1337
            } else {
1338
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
1339
                        createDataverseRequest(user),
×
1340
                        isMinor));
1341
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1342
            }
1343
        } catch (WrappedResponse ex) {
×
1344
            return ex.getResponse();
×
1345
        }
1346
    }
1347

1348
    @POST
1349
    @AuthRequired
1350
    @Path("{id}/actions/:releasemigrated")
1351
    @Consumes("application/ld+json, application/json-ld")
1352
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1353
        try {
1354
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1355
            if (!user.isSuperuser()) {
×
1356
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1357
            }
1358

1359
            Dataset ds = findDatasetOrDie(id);
×
1360
            try {
1361
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1362
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1363
                logger.fine("Submitted date: " + pubDate);
×
1364
                LocalDateTime dateTime = null;
×
1365
                if(!StringUtils.isEmpty(pubDate)) {
×
1366
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
1367
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1368
                    //Set version release date
1369
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1370
                }
1371
                // dataset.getPublicationDateFormattedYYYYMMDD())
1372
                // Assign a version number if not set
1373
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1374

1375
                    if (ds.getVersions().size() == 1) {
×
1376
                        // First Release
1377
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1378
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1379
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1380
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
1381
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1382
                    } else {
1383
                        // major, non-first release
1384
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
1385
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1386
                    }
1387
                }
1388
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1389
                    //Also set publication date if this is the first
1390
                    if(dateTime != null) {
×
1391
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1392
                    }
1393
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1394
                    // is null, so set it here.
1395
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1396
                }
1397
            } catch (Exception e) {
×
1398
                logger.fine(e.getMessage());
×
1399
                throw new BadRequestException("Unable to set publication date ("
×
1400
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
1401
            }
×
1402
            /*
1403
             * Note: The code here mirrors that in the
1404
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1405
             * to the core logic (i.e. beyond updating the messaging about results) should
1406
             * be applied to the code there as well.
1407
             */
1408
            String errorMsg = null;
×
1409
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1410

1411
            try {
1412
                // ToDo - should this be in onSuccess()? May relate to todo above
1413
                if (prePubWf.isPresent()) {
×
1414
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1415
                    wfService.start(prePubWf.get(),
×
1416
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1417
                            false);
1418
                } else {
1419
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1420
                            createDataverseRequest(user), !contactPIDProvider);
×
1421
                    ds = commandEngine.submit(cmd);
×
1422
                }
1423
            } catch (CommandException ex) {
×
1424
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1425
                logger.severe(ex.getMessage());
×
1426
            }
×
1427

1428
            if (errorMsg != null) {
×
1429
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1430
            } else {
1431
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1432
            }
1433

1434
        } catch (WrappedResponse ex) {
×
1435
            return ex.getResponse();
×
1436
        }
1437
    }
1438

1439
    @POST
1440
    @AuthRequired
1441
    @Path("{id}/move/{targetDataverseAlias}")
1442
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1443
        try {
1444
            User u = getRequestUser(crc);
×
1445
            Dataset ds = findDatasetOrDie(id);
×
1446
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1447
            if (target == null) {
×
1448
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1449
            }
1450
            //Command requires Super user - it will be tested by the command
1451
            execCommand(new MoveDatasetCommand(
×
1452
                    createDataverseRequest(u), ds, target, force
×
1453
            ));
1454
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1455
        } catch (WrappedResponse ex) {
×
1456
            if (ex.getCause() instanceof UnforcedCommandException) {
×
1457
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1458
            } else {
1459
                return ex.getResponse();
×
1460
            }
1461
        }
1462
    }
1463

1464
    @POST
1465
    @AuthRequired
1466
    @Path("{id}/files/actions/:set-embargo")
1467
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1468

1469
        // user is authenticated
1470
        AuthenticatedUser authenticatedUser = null;
×
1471
        try {
1472
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1473
        } catch (WrappedResponse ex) {
×
1474
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1475
        }
×
1476

1477
        Dataset dataset;
1478
        try {
1479
            dataset = findDatasetOrDie(id);
×
1480
        } catch (WrappedResponse ex) {
×
1481
            return ex.getResponse();
×
1482
        }
×
1483
        
1484
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1485
        
1486
        if (!hasValidTerms){
×
1487
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1488
        }
1489

1490
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1491
        /*
1492
         * This is only a pre-test - if there's no draft version, there are clearly no
1493
         * files that a normal user can change. The converse is not true. A draft
1494
         * version could contain only files that have already been released. Further, we
1495
         * haven't checked the file list yet so the user could still be trying to change
1496
         * released files even if there are some unreleased/draft-only files. Doing this
1497
         * check here does avoid having to do further parsing for some error cases. It
1498
         * also checks the user can edit this dataset, so we don't have to make that
1499
         * check later.
1500
         */
1501

1502
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1503
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1504
        }
1505

1506
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1507
        long maxEmbargoDurationInMonths = 0;
×
1508
        try {
1509
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1510
        } catch (NumberFormatException nfe){
×
1511
            if (nfe.getMessage().contains("null")) {
×
1512
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1513
            }
1514
        }
×
1515
        if (maxEmbargoDurationInMonths == 0){
×
1516
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1517
        }
1518

1519
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1520

1521
        Embargo embargo = new Embargo();
×
1522

1523

1524
        LocalDate currentDateTime = LocalDate.now();
×
1525
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1526

1527
        // check :MaxEmbargoDurationInMonths if -1
1528
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1529
        // dateAvailable is not in the past
1530
        if (dateAvailable.isAfter(currentDateTime)){
×
1531
            embargo.setDateAvailable(dateAvailable);
×
1532
        } else {
1533
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1534
        }
1535

1536
        // dateAvailable is within limits
1537
        if (maxEmbargoDateTime != null){
×
1538
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
1539
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1540
            }
1541
        }
1542

1543
        embargo.setReason(json.getString("reason"));
×
1544

1545
        List<DataFile> datasetFiles = dataset.getFiles();
×
1546
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1547

1548
        // extract fileIds from json, find datafiles and add to list
1549
        if (json.containsKey("fileIds")){
×
1550
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1551
            for (JsonValue jsv : fileIds) {
×
1552
                try {
1553
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1554
                    filesToEmbargo.add(dataFile);
×
1555
                } catch (WrappedResponse ex) {
×
1556
                    return ex.getResponse();
×
1557
                }
×
1558
            }
×
1559
        }
1560

1561
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1562
        // check if files belong to dataset
1563
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1564
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1565
            boolean badFiles = false;
×
1566
            for (DataFile datafile : filesToEmbargo) {
×
1567
                // superuser can overrule an existing embargo, even on released files
1568
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1569
                    restrictedFiles.add(datafile.getId());
×
1570
                    badFiles = true;
×
1571
                }
1572
            }
×
1573
            if (badFiles) {
×
1574
                return Response.status(Status.FORBIDDEN)
×
1575
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1576
                                .add("message", "You do not have permission to embargo the following files")
×
1577
                                .add("files", restrictedFiles).build())
×
1578
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1579
            }
1580
            embargo=embargoService.merge(embargo);
×
1581
            // Good request, so add the embargo. Track any existing embargoes so we can
1582
            // delete them if there are no files left that reference them.
1583
            for (DataFile datafile : filesToEmbargo) {
×
1584
                Embargo emb = datafile.getEmbargo();
×
1585
                if (emb != null) {
×
1586
                    emb.getDataFiles().remove(datafile);
×
1587
                    if (emb.getDataFiles().isEmpty()) {
×
1588
                        orphanedEmbargoes.add(emb);
×
1589
                    }
1590
                }
1591
                // Save merges the datafile with an embargo into the context
1592
                datafile.setEmbargo(embargo);
×
1593
                fileService.save(datafile);
×
1594
            }
×
1595
            //Call service to get action logged
1596
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1597
            if (orphanedEmbargoes.size() > 0) {
×
1598
                for (Embargo emb : orphanedEmbargoes) {
×
1599
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1600
                }
×
1601
            }
1602
            //If superuser, report changes to any released files
1603
            if (authenticatedUser.isSuperuser()) {
×
1604
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1605
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1606
                if (!releasedFiles.isBlank()) {
×
1607
                    actionLogSvc
×
1608
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
1609
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1610
                                            + releasedFiles + ".")
1611
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1612
                }
1613
            }
1614
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1615
        } else {
1616
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1617
        }
1618
    }
1619

1620
    @POST
1621
    @AuthRequired
1622
    @Path("{id}/files/actions/:unset-embargo")
1623
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1624

1625
        // user is authenticated
1626
        AuthenticatedUser authenticatedUser = null;
×
1627
        try {
1628
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1629
        } catch (WrappedResponse ex) {
×
1630
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1631
        }
×
1632

1633
        Dataset dataset;
1634
        try {
1635
            dataset = findDatasetOrDie(id);
×
1636
        } catch (WrappedResponse ex) {
×
1637
            return ex.getResponse();
×
1638
        }
×
1639

1640
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1641
        // check if files are unreleased(DRAFT?)
1642
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1643
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1644
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1645
        }
1646

1647
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1648
        //Todo - is 400 right for embargoes not enabled
1649
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
1650
        int maxEmbargoDurationInMonths = 0;
×
1651
        try {
1652
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1653
        } catch (NumberFormatException nfe){
×
1654
            if (nfe.getMessage().contains("null")) {
×
1655
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1656
            }
1657
        }
×
1658
        if (maxEmbargoDurationInMonths == 0){
×
1659
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1660
        }
1661

1662
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1663

1664
        List<DataFile> datasetFiles = dataset.getFiles();
×
1665
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1666

1667
        // extract fileIds from json, find datafiles and add to list
1668
        if (json.containsKey("fileIds")){
×
1669
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1670
            for (JsonValue jsv : fileIds) {
×
1671
                try {
1672
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1673
                    embargoFilesToUnset.add(dataFile);
×
1674
                } catch (WrappedResponse ex) {
×
1675
                    return ex.getResponse();
×
1676
                }
×
1677
            }
×
1678
        }
1679

1680
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1681
        // check if files belong to dataset
1682
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1683
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1684
            boolean badFiles = false;
×
1685
            for (DataFile datafile : embargoFilesToUnset) {
×
1686
                // superuser can overrule an existing embargo, even on released files
1687
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1688
                    restrictedFiles.add(datafile.getId());
×
1689
                    badFiles = true;
×
1690
                }
1691
            }
×
1692
            if (badFiles) {
×
1693
                return Response.status(Status.FORBIDDEN)
×
1694
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1695
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1696
                                .add("files", restrictedFiles).build())
×
1697
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1698
            }
1699
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1700
            // delete them if there are no files left that reference them.
1701
            for (DataFile datafile : embargoFilesToUnset) {
×
1702
                Embargo emb = datafile.getEmbargo();
×
1703
                if (emb != null) {
×
1704
                    emb.getDataFiles().remove(datafile);
×
1705
                    if (emb.getDataFiles().isEmpty()) {
×
1706
                        orphanedEmbargoes.add(emb);
×
1707
                    }
1708
                }
1709
                // Save merges the datafile with an embargo into the context
1710
                datafile.setEmbargo(null);
×
1711
                fileService.save(datafile);
×
1712
            }
×
1713
            if (orphanedEmbargoes.size() > 0) {
×
1714
                for (Embargo emb : orphanedEmbargoes) {
×
1715
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1716
                }
×
1717
            }
1718
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1719
            if(!releasedFiles.isBlank()) {
×
1720
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1721
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
1722
                actionLogSvc.log(removeRecord);
×
1723
            }
1724
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1725
        } else {
1726
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1727
        }
1728
    }
1729

1730
    @POST
1731
    @AuthRequired
1732
    @Path("{id}/files/actions/:set-retention")
1733
    public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1734

1735
        // user is authenticated
1736
        AuthenticatedUser authenticatedUser = null;
×
1737
        try {
1738
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1739
        } catch (WrappedResponse ex) {
×
1740
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1741
        }
×
1742

1743
        Dataset dataset;
1744
        try {
1745
            dataset = findDatasetOrDie(id);
×
1746
        } catch (WrappedResponse ex) {
×
1747
            return ex.getResponse();
×
1748
        }
×
1749

1750
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1751

1752
        if (!hasValidTerms){
×
1753
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1754
        }
1755

1756
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1757
        // check if files are unreleased(DRAFT?)
1758
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1759
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1760
        }
1761

1762
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1763
        long minRetentionDurationInMonths = 0;
×
1764
        try {
1765
            minRetentionDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1766
        } catch (NumberFormatException nfe){
×
1767
            if (nfe.getMessage().contains("null")) {
×
1768
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1769
            }
1770
        }
×
1771
        if (minRetentionDurationInMonths == 0){
×
1772
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1773
        }
1774

1775
        JsonObject json;
1776
        try {
1777
            json = JsonUtil.getJsonObject(jsonBody);
×
1778
        } catch (JsonException ex) {
×
1779
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1780
        }
×
1781

1782
        Retention retention = new Retention();
×
1783

1784

1785
        LocalDate currentDateTime = LocalDate.now();
×
1786

1787
        // Extract the dateUnavailable - check if specified and valid
1788
        String dateUnavailableStr = "";
×
1789
        LocalDate dateUnavailable;
1790
        try {
1791
            dateUnavailableStr = json.getString("dateUnavailable");
×
1792
            dateUnavailable = LocalDate.parse(dateUnavailableStr);
×
1793
        } catch (NullPointerException npex) {
×
1794
            return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified");
×
1795
        } catch (ClassCastException ccex) {
×
1796
            return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string");
×
1797
        } catch (DateTimeParseException dtpex) {
×
1798
            return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr);
×
1799
        }
×
1800

1801
        // check :MinRetentionDurationInMonths if -1
1802
        LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null;
×
1803
        // dateUnavailable is not in the past
1804
        if (dateUnavailable.isAfter(currentDateTime)){
×
1805
            retention.setDateUnavailable(dateUnavailable);
×
1806
        } else {
1807
            return error(Status.BAD_REQUEST, "Date unavailable can not be in the past");
×
1808
        }
1809

1810
        // dateAvailable is within limits
1811
        if (minRetentionDateTime != null){
×
1812
            if (dateUnavailable.isBefore(minRetentionDateTime)){
×
1813
                return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
×
1814
            }
1815
        }
1816
        
1817
        try {
1818
            String reason = json.getString("reason");
×
1819
            retention.setReason(reason);
×
1820
        } catch (NullPointerException npex) {
×
1821
            // ignoring; no reason specified is OK, it is optional
1822
        } catch (ClassCastException ccex) {
×
1823
            return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string");
×
1824
        }
×
1825

1826

1827
        List<DataFile> datasetFiles = dataset.getFiles();
×
1828
        List<DataFile> filesToRetention = new LinkedList<>();
×
1829

1830
        // extract fileIds from json, find datafiles and add to list
1831
        if (json.containsKey("fileIds")){
×
1832
            try {
1833
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1834
                for (JsonValue jsv : fileIds) {
×
1835
                    try {
1836
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1837
                        filesToRetention.add(dataFile);
×
1838
                    } catch (WrappedResponse ex) {
×
1839
                        return ex.getResponse();
×
1840
                    }
×
1841
                }
×
1842
            } catch (ClassCastException ccex) {
×
1843
                return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings");
×
1844
            } catch (NullPointerException npex) {
×
1845
                return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified");
×
1846
            }
×
1847
        } else {
1848
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1849
        }
1850

1851
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1852
        // check if files belong to dataset
1853
        if (datasetFiles.containsAll(filesToRetention)) {
×
1854
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1855
            boolean badFiles = false;
×
1856
            for (DataFile datafile : filesToRetention) {
×
1857
                // superuser can overrule an existing retention, even on released files
1858
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1859
                    restrictedFiles.add(datafile.getId());
×
1860
                    badFiles = true;
×
1861
                }
1862
            }
×
1863
            if (badFiles) {
×
1864
                return Response.status(Status.FORBIDDEN)
×
1865
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1866
                                .add("message", "You do not have permission to set a retention period for the following files")
×
1867
                                .add("files", restrictedFiles).build())
×
1868
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1869
            }
1870
            retention=retentionService.merge(retention);
×
1871
            // Good request, so add the retention. Track any existing retentions so we can
1872
            // delete them if there are no files left that reference them.
1873
            for (DataFile datafile : filesToRetention) {
×
1874
                Retention ret = datafile.getRetention();
×
1875
                if (ret != null) {
×
1876
                    ret.getDataFiles().remove(datafile);
×
1877
                    if (ret.getDataFiles().isEmpty()) {
×
1878
                        orphanedRetentions.add(ret);
×
1879
                    }
1880
                }
1881
                // Save merges the datafile with an retention into the context
1882
                datafile.setRetention(retention);
×
1883
                fileService.save(datafile);
×
1884
            }
×
1885
            //Call service to get action logged
1886
            long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier());
×
1887
            if (orphanedRetentions.size() > 0) {
×
1888
                for (Retention ret : orphanedRetentions) {
×
1889
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
1890
                }
×
1891
            }
1892
            //If superuser, report changes to any released files
1893
            if (authenticatedUser.isSuperuser()) {
×
1894
                String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased())
×
1895
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1896
                if (!releasedFiles.isBlank()) {
×
1897
                    actionLogSvc
×
1898
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo")
×
1899
                                    .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) "
×
1900
                                            + releasedFiles + ".")
1901
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1902
                }
1903
            }
1904
            return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated"));
×
1905
        } else {
1906
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1907
        }
1908
    }
1909

1910
    @POST
1911
    @AuthRequired
1912
    @Path("{id}/files/actions/:unset-retention")
1913
    public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1914

1915
        // user is authenticated
1916
        AuthenticatedUser authenticatedUser = null;
×
1917
        try {
1918
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1919
        } catch (WrappedResponse ex) {
×
1920
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1921
        }
×
1922

1923
        Dataset dataset;
1924
        try {
1925
            dataset = findDatasetOrDie(id);
×
1926
        } catch (WrappedResponse ex) {
×
1927
            return ex.getResponse();
×
1928
        }
×
1929

1930
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1931
        // check if files are unreleased(DRAFT?)
1932
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1933
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1934
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1935
        }
1936

1937
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1938
        int minRetentionDurationInMonths = 0;
×
1939
        try {
1940
            minRetentionDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1941
        } catch (NumberFormatException nfe){
×
1942
            if (nfe.getMessage().contains("null")) {
×
1943
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1944
            }
1945
        }
×
1946
        if (minRetentionDurationInMonths == 0){
×
1947
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1948
        }
1949

1950
        JsonObject json;
1951
        try {
1952
            json = JsonUtil.getJsonObject(jsonBody);
×
1953
        } catch (JsonException ex) {
×
1954
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1955
        }
×
1956

1957
        List<DataFile> datasetFiles = dataset.getFiles();
×
1958
        List<DataFile> retentionFilesToUnset = new LinkedList<>();
×
1959

1960
        // extract fileIds from json, find datafiles and add to list
1961
        if (json.containsKey("fileIds")){
×
1962
            try {
1963
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1964
                for (JsonValue jsv : fileIds) {
×
1965
                    try {
1966
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1967
                        retentionFilesToUnset.add(dataFile);
×
1968
                    } catch (WrappedResponse ex) {
×
1969
                        return ex.getResponse();
×
1970
                    }
×
1971
                }
×
1972
            } catch (ClassCastException ccex) {
×
1973
                return error(Status.BAD_REQUEST, "fileIds must be an array of id strings");
×
1974
            } catch (NullPointerException npex) {
×
1975
                return error(Status.BAD_REQUEST, "No fileIds specified");
×
1976
            }
×
1977
        } else {
1978
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1979
        }
1980

1981
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1982
        // check if files belong to dataset
1983
        if (datasetFiles.containsAll(retentionFilesToUnset)) {
×
1984
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1985
            boolean badFiles = false;
×
1986
            for (DataFile datafile : retentionFilesToUnset) {
×
1987
                // superuser can overrule an existing retention, even on released files
1988
                if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) {
×
1989
                    restrictedFiles.add(datafile.getId());
×
1990
                    badFiles = true;
×
1991
                }
1992
            }
×
1993
            if (badFiles) {
×
1994
                return Response.status(Status.FORBIDDEN)
×
1995
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1996
                                .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods")
×
1997
                                .add("files", restrictedFiles).build())
×
1998
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1999
            }
2000
            // Good request, so remove the retention from the files. Track any existing retentions so we can
2001
            // delete them if there are no files left that reference them.
2002
            for (DataFile datafile : retentionFilesToUnset) {
×
2003
                Retention ret = datafile.getRetention();
×
2004
                if (ret != null) {
×
2005
                    ret.getDataFiles().remove(datafile);
×
2006
                    if (ret.getDataFiles().isEmpty()) {
×
2007
                        orphanedRetentions.add(ret);
×
2008
                    }
2009
                }
2010
                // Save merges the datafile with an retention into the context
2011
                datafile.setRetention(null);
×
2012
                fileService.save(datafile);
×
2013
            }
×
2014
            if (orphanedRetentions.size() > 0) {
×
2015
                for (Retention ret : orphanedRetentions) {
×
2016
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
2017
                }
×
2018
            }
2019
            String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
2020
            if(!releasedFiles.isBlank()) {
×
2021
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + ".");
×
2022
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
2023
                actionLogSvc.log(removeRecord);
×
2024
            }
2025
            return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)"));
×
2026
        } else {
2027
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
2028
        }
2029
    }
2030

2031
    @PUT
2032
    @AuthRequired
2033
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
2034
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
2035
        try {
2036
            User u = getRequestUser(crc);
×
2037
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
2038
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
2039
            if (linked == null){
×
2040
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
2041
            }
2042
            if (linking == null) {
×
2043
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
2044
            }
2045
            execCommand(new LinkDatasetCommand(
×
2046
                    createDataverseRequest(u), linking, linked
×
2047
            ));
2048
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
2049
        } catch (WrappedResponse ex) {
×
2050
            return ex.getResponse();
×
2051
        }
2052
    }
2053

2054
    @GET
2055
    @Path("{id}/versions/{versionId}/customlicense")
2056
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
2057
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
2058
        User user = session.getUser();
×
2059
        String persistentId;
2060
        try {
2061
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
2062
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
2063
            }
2064
            persistentId = getRequestParameter(":persistentId".substring(1));
×
2065
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
2066
                versionId = "DRAFT";
×
2067
            }
2068
        } catch (WrappedResponse wrappedResponse) {
×
2069
            return wrappedResponse.getResponse();
×
2070
        }
×
2071
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
2072
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
2073
    }
2074

2075

2076
    @GET
2077
    @AuthRequired
2078
    @Path("{id}/links")
2079
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
2080
        try {
2081
            User u = getRequestUser(crc);
×
2082
            if (!u.isSuperuser()) {
×
2083
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
2084
            }
2085
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2086

2087
            long datasetId = dataset.getId();
×
2088
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
2089
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
2090
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
2091
                JsonObjectBuilder datasetBuilder = Json.createObjectBuilder();
×
2092
                datasetBuilder.add("id", dataverse.getId());
×
2093
                datasetBuilder.add("alias", dataverse.getAlias());
×
2094
                datasetBuilder.add("displayName", dataverse.getDisplayName());
×
2095
                dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build());
×
2096
            }
×
2097
            JsonObjectBuilder response = Json.createObjectBuilder();
×
2098
            response.add("id", datasetId);
×
2099
            response.add("identifier", dataset.getIdentifier());
×
2100
            response.add("linked-dataverses", dataversesThatLinkToThisDatasetIdBuilder);
×
2101
            return ok(response);
×
2102
        } catch (WrappedResponse wr) {
×
2103
            return wr.getResponse();
×
2104
        }
2105
    }
2106

2107
    /**
2108
     * Add a given assignment to a given user or group
2109
     * @param ra     role assignment DTO
2110
     * @param id     dataset id
2111
     * @param apiKey
2112
     */
2113
    @POST
2114
    @AuthRequired
2115
    @Path("{identifier}/assignments")
2116
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
2117
        try {
2118
            Dataset dataset = findDatasetOrDie(id);
×
2119
            
2120
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
2121
            if (assignee == null) {
×
2122
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
2123
            }
2124
            
2125
            DataverseRole theRole;
2126
            Dataverse dv = dataset.getOwner();
×
2127
            theRole = null;
×
2128
            while ((theRole == null) && (dv != null)) {
×
2129
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
2130
                    if (aRole.getAlias().equals(ra.getRole())) {
×
2131
                        theRole = aRole;
×
2132
                        break;
×
2133
                    }
2134
                }
×
2135
                dv = dv.getOwner();
×
2136
            }
2137
            if (theRole == null) {
×
2138
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
2139
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
2140
            }
2141

2142
            String privateUrlToken = null;
×
2143
            return ok(
×
2144
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
2145
        } catch (WrappedResponse ex) {
×
2146
            List<String> args = Arrays.asList(ex.getMessage());
×
2147
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
2148
            return ex.getResponse();
×
2149
        }
2150

2151
    }
2152
    
2153
    @DELETE
2154
    @AuthRequired
2155
    @Path("{identifier}/assignments/{id}")
2156
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
2157
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
2158
        if (ra != null) {
×
2159
            try {
2160
                findDatasetOrDie(dsId);
×
2161
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
2162
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
2163
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
2164
            } catch (WrappedResponse ex) {
×
2165
                return ex.getResponse();
×
2166
            }
2167
        } else {
2168
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
2169
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
2170
        }
2171
    }
2172

2173
    @GET
2174
    @AuthRequired
2175
    @Path("{identifier}/assignments")
2176
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2177
        return response(req ->
×
2178
                ok(execCommand(
×
2179
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
2180
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
2181
    }
2182

2183
    @GET
2184
    @AuthRequired
2185
    @Deprecated(forRemoval = true, since = "2024-10-17")
2186
    @Path("{id}/privateUrl")
2187
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2188
        return getPreviewUrlData(crc, idSupplied);
×
2189
    }
2190

2191
    @POST
2192
    @AuthRequired
2193
    @Deprecated(forRemoval = true, since = "2024-10-17")
2194
    @Path("{id}/privateUrl")
2195
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @DefaultValue("false") @QueryParam("anonymizedAccess") boolean anonymizedAccess) {
2196
        return createPreviewUrl(crc, idSupplied, anonymizedAccess);
×
2197
    }
2198

2199
    @DELETE
2200
    @AuthRequired
2201
    @Deprecated(forRemoval = true, since = "2024-10-17")
2202
    @Path("{id}/privateUrl")
2203
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2204
        return deletePreviewUrl(crc, idSupplied);
×
2205
    }
2206
    
2207
    @GET
2208
    @AuthRequired
2209
    @Path("{id}/previewUrl")
2210
    public Response getPreviewUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2211
        return response( req -> {
×
2212
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
2213
            return (privateUrl != null) ? ok(json(privateUrl))
×
2214
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
2215
        }, getRequestUser(crc));
×
2216
    }
2217

2218
    @POST
2219
    @AuthRequired
2220
    @Path("{id}/previewUrl")
2221
    public Response createPreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
2222
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
2223
            throw new NotAcceptableException("Anonymized Access not enabled");
×
2224
        }
2225
        return response(req ->
×
2226
                ok(json(execCommand(
×
2227
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
2228
    }
2229

2230
    @DELETE
2231
    @AuthRequired
2232
    @Path("{id}/previewUrl")
2233
    public Response deletePreviewUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2234
        return response( req -> {
×
2235
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2236
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
2237
            if (privateUrl != null) {
×
2238
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
2239
                return ok("Private URL deleted.");
×
2240
            } else {
2241
                return notFound("No Private URL to delete.");
×
2242
            }
2243
        }, getRequestUser(crc));
×
2244
    }
2245

2246

2247
    @GET
2248
    @AuthRequired
2249
    @Path("{id}/thumbnail/candidates")
2250
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2251
        try {
2252
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2253
            boolean canUpdateThumbnail = false;
×
2254
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
2255
            if (!canUpdateThumbnail) {
×
2256
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
2257
            }
2258
            JsonArrayBuilder data = Json.createArrayBuilder();
×
2259
            boolean considerDatasetLogoAsCandidate = true;
×
2260
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
2261
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
2262
                String base64image = datasetThumbnail.getBase64image();
×
2263
                if (base64image != null) {
×
2264
                    logger.fine("found a candidate!");
×
2265
                    candidate.add("base64image", base64image);
×
2266
                }
2267
                DataFile dataFile = datasetThumbnail.getDataFile();
×
2268
                if (dataFile != null) {
×
2269
                    candidate.add("dataFileId", dataFile.getId());
×
2270
                }
2271
                data.add(candidate);
×
2272
            }
×
2273
            return ok(data);
×
2274
        } catch (WrappedResponse ex) {
×
2275
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
2276
        }
2277
    }
2278

2279
    @GET
2280
    @Produces({"image/png"})
2281
    @Path("{id}/thumbnail")
2282
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
2283
        try {
2284
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2285
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
2286
            if(is == null) {
×
2287
                return notFound("Thumbnail not available");
×
2288
            }
2289
            return Response.ok(is).build();
×
2290
        } catch (WrappedResponse wr) {
×
2291
            return notFound("Thumbnail not available");
×
2292
        }
2293
    }
2294

2295
    @GET
2296
    @Produces({ "image/png" })
2297
    @Path("{id}/logo")
2298
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
2299
        try {
2300
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2301
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
2302
            if (is == null) {
×
2303
                return notFound("Logo not available");
×
2304
            }
2305
            return Response.ok(is).build();
×
2306
        } catch (WrappedResponse wr) {
×
2307
            return notFound("Logo not available");
×
2308
        }
2309
    }
2310

2311
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
2312
    @POST
2313
    @AuthRequired
2314
    @Path("{id}/thumbnail/{dataFileId}")
2315
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
2316
        try {
2317
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
2318
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
2319
        } catch (WrappedResponse wr) {
×
2320
            return wr.getResponse();
×
2321
        }
2322
    }
2323

2324
    @POST
2325
    @AuthRequired
2326
    @Path("{id}/thumbnail")
2327
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2328
    @Produces("application/json")
2329
    @Operation(summary = "Uploads a logo for a dataset", 
2330
               description = "Uploads a logo for a dataset")
2331
    @APIResponse(responseCode = "200",
2332
               description = "Dataset logo uploaded successfully")
2333
    @Tag(name = "uploadDatasetLogo", 
2334
         description = "Uploads a logo for a dataset")
2335
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))          
2336
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
2337
        try {
2338
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
2339
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
2340
        } catch (WrappedResponse wr) {
×
2341
            return wr.getResponse();
×
2342
        }
2343
    }
2344

2345
    @DELETE
2346
    @AuthRequired
2347
    @Path("{id}/thumbnail")
2348
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2349
        try {
2350
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
2351
            return ok("Dataset thumbnail removed.");
×
2352
        } catch (WrappedResponse wr) {
×
2353
            return wr.getResponse();
×
2354
        }
2355
    }
2356

2357
    @Deprecated(forRemoval = true, since = "2024-07-07")
2358
    @GET
2359
    @AuthRequired
2360
    @Path("{identifier}/dataCaptureModule/rsync")
2361
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2362
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
2363
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
2364
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
2365
        }
2366
        Dataset dataset = null;
×
2367
        try {
2368
            dataset = findDatasetOrDie(id);
×
2369
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2370
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
2371
            
2372
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
2373
            if (lock == null) {
×
2374
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
2375
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
2376
            }
2377
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
2378
        } catch (WrappedResponse wr) {
×
2379
            return wr.getResponse();
×
2380
        } catch (EJBException ex) {
×
2381
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
2382
        }
2383
    }
2384
    
2385
    /**
2386
     * This api endpoint triggers the creation of a "package" file in a dataset
2387
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2388
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2389
     * The "package" can be downloaded over RSAL.
2390
     *
2391
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2392
     *
2393
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2394
     * But due to the possibly immense number of files (millions) the package approach was taken.
2395
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2396
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2397
     * -MAD 4.9.1
2398
     */
2399
    @POST
2400
    @AuthRequired
2401
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2402
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2403
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
2404
        AuthenticatedUser authenticatedUser = null;
×
2405
        try {
2406
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2407
        } catch (WrappedResponse ex) {
×
2408
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2409
        }
×
2410
        if (!authenticatedUser.isSuperuser()) {
×
2411
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2412
        }
2413
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2414
        try {
2415
            Dataset dataset = findDatasetOrDie(id);
×
2416
            if ("validation passed".equals(statusMessageFromDcm)) {
×
2417
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2418

2419
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2420
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2421
                int totalSize = jsonFromDcm.getInt("totalSize");
×
2422
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2423
                
2424
                if (storageDriverType.equals("file")) {
×
2425
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2426

2427
                    ImportMode importMode = ImportMode.MERGE;
×
2428
                    try {
2429
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2430
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2431
                        String message = jsonFromImportJobKickoff.getString("message");
×
2432
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2433
                        job.add("jobId", jobId);
×
2434
                        job.add("message", message);
×
2435
                        return ok(job);
×
2436
                    } catch (WrappedResponse wr) {
×
2437
                        String message = wr.getMessage();
×
2438
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2439
                    }
2440
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2441
                    
2442
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2443
                    try {
2444
                        
2445
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2446
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
2447
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2448
                        
2449
                        if (packageFile == null) {
×
2450
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
2451
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2452
                        }
2453
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2454
                        if (dcmLock == null) {
×
2455
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2456
                        } else {
2457
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
2458
                            dataset.removeLock(dcmLock);
×
2459
                        }
2460
                        
2461
                        // update version using the command engine to enforce user permissions and constraints
2462
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2463
                            try {
2464
                                Command<Dataset> cmd;
2465
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2466
                                commandEngine.submit(cmd);
×
2467
                            } catch (CommandException ex) {
×
2468
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
2469
                            }
×
2470
                        } else {
2471
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2472
                                    + "single version in draft mode.";
2473
                            logger.log(Level.SEVERE, constraintError);
×
2474
                        }
2475

2476
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2477
                        return ok(job);
×
2478
                        
2479
                    } catch (IOException e) {
×
2480
                        String message = e.getMessage();
×
2481
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2482
                    }
2483
                } else {
2484
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2485
                }
2486
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2487
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2488
                distinctAuthors.values().forEach((value) -> {
×
2489
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2490
                });
×
2491
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2492
                if (superUsers != null && !superUsers.isEmpty()) {
×
2493
                    superUsers.forEach((au) -> {
×
2494
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2495
                    });
×
2496
                }
2497
                return ok("User notified about checksum validation failure.");
×
2498
            } else {
2499
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2500
            }
2501
        } catch (WrappedResponse ex) {
×
2502
            return ex.getResponse();
×
2503
        }
2504
    }
2505
    
2506

2507
    @POST
2508
    @AuthRequired
2509
    @Path("{id}/submitForReview")
2510
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2511
        try {
2512
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
2513
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2514
            
2515
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2516
            
2517
            result.add("inReview", inReview);
×
2518
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2519
            return ok(result);
×
2520
        } catch (WrappedResponse wr) {
×
2521
            return wr.getResponse();
×
2522
        }
2523
    }
2524

2525
    @POST
2526
    @AuthRequired
2527
    @Path("{id}/returnToAuthor")
2528
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2529

2530
        if (jsonBody == null || jsonBody.isEmpty()) {
×
2531
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2532
        }
2533
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2534
        try {
2535
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2536
            String reasonForReturn = null;
×
2537
            reasonForReturn = json.getString("reasonForReturn");
×
2538
            if ((reasonForReturn == null || reasonForReturn.isEmpty())
×
2539
                    && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) {
×
2540
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2541
            }
2542
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2543
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2544

2545
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2546
            result.add("inReview", false);
×
2547
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2548
            return ok(result);
×
2549
        } catch (WrappedResponse wr) {
×
2550
            return wr.getResponse();
×
2551
        }
2552
    }
2553

2554
    @GET
2555
    @AuthRequired
2556
    @Path("{id}/curationStatus")
2557
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2558
        try {
2559
            Dataset ds = findDatasetOrDie(idSupplied);
×
2560
            DatasetVersion dsv = ds.getLatestVersion();
×
2561
            User user = getRequestUser(crc);
×
2562
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
2563
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2564
            } else {
2565
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2566
            }
2567
        } catch (WrappedResponse wr) {
×
2568
            return wr.getResponse();
×
2569
        }
2570
    }
2571

2572
    @PUT
2573
    @AuthRequired
2574
    @Path("{id}/curationStatus")
2575
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2576
        Dataset ds = null;
×
2577
        User u = null;
×
2578
        try {
2579
            ds = findDatasetOrDie(idSupplied);
×
2580
            u = getRequestUser(crc);
×
2581
        } catch (WrappedResponse wr) {
×
2582
            return wr.getResponse();
×
2583
        }
×
2584
        try {
2585
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2586
            return ok("Curation Status updated");
×
2587
        } catch (WrappedResponse wr) {
×
2588
            // Just change to Bad Request and send
2589
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2590
        }
2591
    }
2592

2593
    @DELETE
2594
    @AuthRequired
2595
    @Path("{id}/curationStatus")
2596
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2597
        Dataset ds = null;
×
2598
        User u = null;
×
2599
        try {
2600
            ds = findDatasetOrDie(idSupplied);
×
2601
            u = getRequestUser(crc);
×
2602
        } catch (WrappedResponse wr) {
×
2603
            return wr.getResponse();
×
2604
        }
×
2605
        try {
2606
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2607
            return ok("Curation Status deleted");
×
2608
        } catch (WrappedResponse wr) {
×
2609
            //Just change to Bad Request and send
2610
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2611
        }
2612
    }
2613

2614
    @GET
2615
    @AuthRequired
2616
    @Path("{id}/uploadurls")
2617
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2618
        try {
2619
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2620

2621
            boolean canUpdateDataset = false;
×
2622
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2623
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2624
            if (!canUpdateDataset) {
×
2625
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2626
            }
2627
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2628
            if (s3io == null) {
×
2629
                return error(Response.Status.NOT_FOUND,
×
2630
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2631
            }
2632
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
2633
            if (maxSize != null) {
×
2634
                if(fileSize > maxSize) {
×
2635
                    return error(Response.Status.BAD_REQUEST,
×
2636
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2637
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2638
                }
2639
            }
2640
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
2641
            if (limit != null) {
×
2642
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
2643
                    return error(Response.Status.BAD_REQUEST,
×
2644
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2645
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2646
                }
2647
            }
2648
            JsonObjectBuilder response = null;
×
2649
            String storageIdentifier = null;
×
2650
            try {
2651
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
2652
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2653

2654
            } catch (IOException io) {
×
2655
                logger.warning(io.getMessage());
×
2656
                throw new WrappedResponse(io,
×
2657
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
2658
            }
×
2659

2660
            response.add("storageIdentifier", storageIdentifier);
×
2661
            return ok(response);
×
2662
        } catch (WrappedResponse wr) {
×
2663
            return wr.getResponse();
×
2664
        }
2665
    }
2666

2667
    @DELETE
2668
    @AuthRequired
2669
    @Path("mpupload")
2670
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2671
        try {
2672
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2673
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2674
            User user = session.getUser();
×
2675
            if (!user.isAuthenticated()) {
×
2676
                try {
2677
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2678
                } catch (WrappedResponse ex) {
×
2679
                    logger.info(
×
2680
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2681
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2682
                    throw ex;
×
2683
                }
×
2684
            }
2685
            boolean allowed = false;
×
2686
            if (dataset != null) {
×
2687
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2688
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2689
            } else {
2690
                /*
2691
                 * The only legitimate case where a global id won't correspond to a dataset is
2692
                 * for uploads during creation. Given that this call will still fail unless all
2693
                 * three parameters correspond to an active multipart upload, it should be safe
2694
                 * to allow the attempt for an authenticated user. If there are concerns about
2695
                 * permissions, one could check with the current design that the user is allowed
2696
                 * to create datasets in some dataverse that is configured to use the storage
2697
                 * provider specified in the storageidentifier, but testing for the ability to
2698
                 * create a dataset in a specific dataverse would requiring changing the design
2699
                 * somehow (e.g. adding the ownerId to this call).
2700
                 */
2701
                allowed = true;
×
2702
            }
2703
            if (!allowed) {
×
2704
                return error(Response.Status.FORBIDDEN,
×
2705
                        "You are not permitted to abort file uploads with the supplied parameters.");
2706
            }
2707
            try {
2708
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2709
            } catch (IOException io) {
×
2710
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2711
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2712
                logger.warning(io.getMessage());
×
2713
                throw new WrappedResponse(io,
×
2714
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2715
            }
×
2716
            return Response.noContent().build();
×
2717
        } catch (WrappedResponse wr) {
×
2718
            return wr.getResponse();
×
2719
        }
2720
    }
2721

2722
    @PUT
2723
    @AuthRequired
2724
    @Path("mpupload")
2725
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2726
        try {
2727
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2728
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2729
            User user = session.getUser();
×
2730
            if (!user.isAuthenticated()) {
×
2731
                try {
2732
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2733
                } catch (WrappedResponse ex) {
×
2734
                    logger.info(
×
2735
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2736
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2737
                    throw ex;
×
2738
                }
×
2739
            }
2740
            boolean allowed = false;
×
2741
            if (dataset != null) {
×
2742
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2743
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2744
            } else {
2745
                /*
2746
                 * The only legitimate case where a global id won't correspond to a dataset is
2747
                 * for uploads during creation. Given that this call will still fail unless all
2748
                 * three parameters correspond to an active multipart upload, it should be safe
2749
                 * to allow the attempt for an authenticated user. If there are concerns about
2750
                 * permissions, one could check with the current design that the user is allowed
2751
                 * to create datasets in some dataverse that is configured to use the storage
2752
                 * provider specified in the storageidentifier, but testing for the ability to
2753
                 * create a dataset in a specific dataverse would requiring changing the design
2754
                 * somehow (e.g. adding the ownerId to this call).
2755
                 */
2756
                allowed = true;
×
2757
            }
2758
            if (!allowed) {
×
2759
                return error(Response.Status.FORBIDDEN,
×
2760
                        "You are not permitted to complete file uploads with the supplied parameters.");
2761
            }
2762
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
2763
            logger.info("Etags: " + partETagBody);
×
2764
            try {
2765
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2766
                for (String partNo : object.keySet()) {
×
2767
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2768
                }
×
2769
                for (PartETag et : eTagList) {
×
2770
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2771
                }
×
2772
            } catch (JsonException je) {
×
2773
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2774
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2775
            }
×
2776
            try {
2777
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2778
            } catch (IOException io) {
×
2779
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2780
                logger.warning(io.getMessage());
×
2781
                try {
2782
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2783
                } catch (IOException e) {
×
2784
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2785
                    logger.severe(io.getMessage());
×
2786
                }
×
2787

2788
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2789
            }
×
2790
            return ok("Multipart Upload completed");
×
2791
        } catch (WrappedResponse wr) {
×
2792
            return wr.getResponse();
×
2793
        }
2794
    }
2795

2796
    /**
2797
     * Add a File to an existing Dataset
2798
     *
2799
     * @param idSupplied
2800
     * @param jsonData
2801
     * @param fileInputStream
2802
     * @param contentDispositionHeader
2803
     * @param formDataBodyPart
2804
     * @return
2805
     */
2806
    @POST
2807
    @AuthRequired
2808
    @Path("{id}/add")
2809
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2810
    @Produces("application/json")
2811
    @Operation(summary = "Uploads a file for a dataset", 
2812
               description = "Uploads a file for a dataset")
2813
    @APIResponse(responseCode = "200",
2814
               description = "File uploaded successfully to dataset")
2815
    @Tag(name = "addFileToDataset", 
2816
         description = "Uploads a file for a dataset")
2817
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
2818
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2819
                    @PathParam("id") String idSupplied,
2820
                    @FormDataParam("jsonData") String jsonData,
2821
                    @FormDataParam("file") InputStream fileInputStream,
2822
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2823
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2824
                    ){
2825

2826
        if (!systemConfig.isHTTPUpload()) {
×
2827
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2828
        }
2829

2830
        // -------------------------------------
2831
        // (1) Get the user from the ContainerRequestContext
2832
        // -------------------------------------
2833
        User authUser;
2834
        authUser = getRequestUser(crc);
×
2835

2836
        // -------------------------------------
2837
        // (2) Get the Dataset Id
2838
        //  
2839
        // -------------------------------------
2840
        Dataset dataset;
2841
        
2842
        try {
2843
            dataset = findDatasetOrDie(idSupplied);
×
2844
        } catch (WrappedResponse wr) {
×
2845
            return wr.getResponse();
×
2846
        }
×
2847
        
2848
        //------------------------------------
2849
        // (2a) Make sure dataset does not have package file
2850
        //
2851
        // --------------------------------------
2852
        
2853
        for (DatasetVersion dv : dataset.getVersions()) {
×
2854
            if (dv.isHasPackageFile()) {
×
2855
                return error(Response.Status.FORBIDDEN,
×
2856
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2857
                );
2858
            }
2859
        }
×
2860

2861
        // (2a) Load up optional params via JSON
2862
        //---------------------------------------
2863
        OptionalFileParams optionalFileParams = null;
×
2864
        msgt("(api) jsonData: " + jsonData);
×
2865

2866
        try {
2867
            optionalFileParams = new OptionalFileParams(jsonData);
×
2868
        } catch (DataFileTagException ex) {
×
2869
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2870
        }
2871
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2872
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
2873
        }
×
2874
        
2875
        // -------------------------------------
2876
        // (3) Get the file name and content type
2877
        // -------------------------------------
2878
        String newFilename = null;
×
2879
        String newFileContentType = null;
×
2880
        String newStorageIdentifier = null;
×
2881
        if (null == contentDispositionHeader) {
×
2882
            if (optionalFileParams.hasStorageIdentifier()) {
×
2883
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
2884
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2885
                
2886
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
2887
                    return error(BAD_REQUEST,
×
2888
                            "Dataset store configuration does not allow provided storageIdentifier.");
2889
                }
2890
                if (optionalFileParams.hasFileName()) {
×
2891
                    newFilename = optionalFileParams.getFileName();
×
2892
                    if (optionalFileParams.hasMimetype()) {
×
2893
                        newFileContentType = optionalFileParams.getMimeType();
×
2894
                    }
2895
                }
2896
            } else {
2897
                return error(BAD_REQUEST,
×
2898
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2899
            }
2900
        } else {
2901
            newFilename = contentDispositionHeader.getFileName();
×
2902
            // Let's see if the form data part has the mime (content) type specified.
2903
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2904
            // because that defaults to "text/plain" when no "Content-Type:" header is
2905
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2906
            // is there. If not, we'll default to "application/octet-stream" - the generic
2907
            // unknown type. This will prompt the application to run type detection and
2908
            // potentially find something more accurate.
2909
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2910

2911
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2912
                if (header.equalsIgnoreCase("Content-Type")) {
×
2913
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2914
                }
2915
            }
×
2916
            if (newFileContentType == null) {
×
2917
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2918
            }
2919
        }
2920

2921

2922
        //-------------------
2923
        // (3) Create the AddReplaceFileHelper object
2924
        //-------------------
2925
        msg("ADD!");
×
2926

2927
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
2928
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2929
                ingestService,
2930
                datasetService,
2931
                fileService,
2932
                permissionSvc,
2933
                commandEngine,
2934
                systemConfig);
2935

2936

2937
        //-------------------
2938
        // (4) Run "runAddFileByDatasetId"
2939
        //-------------------
2940
        addFileHelper.runAddFileByDataset(dataset,
×
2941
                newFilename,
2942
                newFileContentType,
2943
                newStorageIdentifier,
2944
                fileInputStream,
2945
                optionalFileParams);
2946

2947

2948
        if (addFileHelper.hasError()){
×
2949
            //conflict response status added for 8859
2950
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
2951
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2952
            }
2953
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2954
        } else {
2955
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2956
            try {
2957
                //msgt("as String: " + addFileHelper.getSuccessResult());
2958
                /**
2959
                 * @todo We need a consistent, sane way to communicate a human
2960
                 * readable message to an API client suitable for human
2961
                 * consumption. Imagine if the UI were built in Angular or React
2962
                 * and we want to return a message from the API as-is to the
2963
                 * user. Human readable.
2964
                 */
2965
                logger.fine("successMsg: " + successMsg);
×
2966
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2967
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
2968
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2969
                } else {
2970
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2971
                }
2972

2973
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2974
            } catch (NoFilesException ex) {
×
2975
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
2976
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2977

2978
            }
2979
        }
2980
        
2981
    } // end: addFileToDataset
2982

2983

2984
    /**
2985
     * Clean storage of a Dataset
2986
     *
2987
     * @param idSupplied
2988
     * @return
2989
     */
2990
    @GET
2991
    @AuthRequired
2992
    @Path("{id}/cleanStorage")
2993
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2994
        // get user and dataset
2995
        User authUser = getRequestUser(crc);
×
2996

2997
        Dataset dataset;
2998
        try {
2999
            dataset = findDatasetOrDie(idSupplied);
×
3000
        } catch (WrappedResponse wr) {
×
3001
            return wr.getResponse();
×
3002
        }
×
3003
        
3004
        // check permissions
3005
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
3006
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
3007
        }
3008

3009
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
3010

3011
        // check if no legacy files are present
3012
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
3013
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
3014
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
3015
        }
3016

3017
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
3018
        List<String> deleted;
3019
        try {
3020
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
3021
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
3022
        } catch (IOException ex) {
×
3023
            logger.log(Level.SEVERE, null, ex);
×
3024
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
3025
        }
×
3026

3027
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
3028
        
3029
    }
3030

3031
    @GET
3032
    @AuthRequired
3033
    @Path("{id}/versions/{versionId1}/compare/{versionId2}")
3034
    public Response getCompareVersions(@Context ContainerRequestContext crc, @PathParam("id") String id,
3035
                                      @PathParam("versionId1") String versionId1,
3036
                                      @PathParam("versionId2") String versionId2,
3037
                                      @Context UriInfo uriInfo, @Context HttpHeaders headers) {
3038
        try {
3039
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
3040
            DatasetVersion dsv1 = getDatasetVersionOrDie(req, versionId1, findDatasetOrDie(id), uriInfo, headers);
×
3041
            DatasetVersion dsv2 = getDatasetVersionOrDie(req, versionId2, findDatasetOrDie(id), uriInfo, headers);
×
3042
            if (dsv1.getCreateTime().getTime() > dsv2.getCreateTime().getTime()) {
×
3043
                return error(BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.version.compare.incorrect.order"));
×
3044
            }
3045
            return ok(DatasetVersion.compareVersions(dsv1, dsv2));
×
3046
        } catch (WrappedResponse wr) {
×
3047
            return wr.getResponse();
×
3048
        }
3049
    }
3050

3051
    private static Set<String> getDatasetFilenames(Dataset dataset) {
3052
        Set<String> files = new HashSet<>();
×
3053
        for (DataFile dataFile: dataset.getFiles()) {
×
3054
            String storageIdentifier = dataFile.getStorageIdentifier();
×
3055
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
3056
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
3057
            files.add(locationParts[locationParts.length-1]);
×
3058
        }
×
3059
        return files;
×
3060
    }
3061

3062
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
3063
        return f -> {
1✔
3064
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
3065
        };
3066
    }
3067

3068
    private void msg(String m) {
3069
        //System.out.println(m);
3070
        logger.fine(m);
×
3071
    }
×
3072

3073
    private void dashes() {
3074
        msg("----------------");
×
3075
    }
×
3076

3077
    private void msgt(String m) {
3078
        dashes();
×
3079
        msg(m);
×
3080
        dashes();
×
3081
    }
×
3082

3083

3084
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
3085
            throws WrappedResponse {
3086
        switch (versionId) {
×
3087
            case DS_VERSION_LATEST:
3088
                return hdl.handleLatest();
×
3089
            case DS_VERSION_DRAFT:
3090
                return hdl.handleDraft();
×
3091
            case DS_VERSION_LATEST_PUBLISHED:
3092
                return hdl.handleLatestPublished();
×
3093
            default:
3094
                try {
3095
                    String[] versions = versionId.split("\\.");
×
3096
                    switch (versions.length) {
×
3097
                        case 1:
3098
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
3099
                        case 2:
3100
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
3101
                        default:
3102
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3103
                    }
3104
                } catch (NumberFormatException nfe) {
×
3105
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3106
                }
3107
        }
3108
    }
3109

3110
    /*
3111
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
3112
     * a deaccessioned dataset.
3113
     */
3114
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, 
3115
                                                  String versionNumber, 
3116
                                                  final Dataset ds,
3117
                                                  UriInfo uriInfo, 
3118
                                                  HttpHeaders headers) throws WrappedResponse {
3119
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
3120
        boolean checkFilePerms = false;
×
3121
        boolean includeDeaccessioned = false;
×
3122
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
×
3123
    }
3124
    
3125
    /*
3126
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3127
     */
3128
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3129
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
3130
        boolean checkPermsWhenDeaccessioned = true;
×
3131
        boolean bypassAccessCheck = false;
×
3132
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3133
    }
3134

3135
    /*
3136
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3137
     */
3138
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3139
                                                  UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
3140
        boolean bypassAccessCheck = false;
×
3141
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3142
    }
3143

3144
    /*
3145
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
3146
     */
3147
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3148
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
3149
            boolean bypassAccessCheck)
3150
            throws WrappedResponse {
3151

3152
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
3153

3154
        if (dsv == null || dsv.getId() == null) {
×
3155
            throw new WrappedResponse(
×
3156
                    notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
3157
        }
3158
        if (dsv.isReleased()&& uriInfo!=null) {
×
3159
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
3160
            mdcLogService.logEntry(entry);
×
3161
        }
3162
        return dsv;
×
3163
    }
3164
 
3165
    @GET
3166
    @Path("{identifier}/locks")
3167
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3168

3169
        Dataset dataset = null;
×
3170
        try {
3171
            dataset = findDatasetOrDie(id);
×
3172
            Set<DatasetLock> locks;
3173
            if (lockType == null) {
×
3174
                locks = dataset.getLocks();
×
3175
            } else {
3176
                // request for a specific type lock:
3177
                DatasetLock lock = dataset.getLockFor(lockType);
×
3178

3179
                locks = new HashSet<>();
×
3180
                if (lock != null) {
×
3181
                    locks.add(lock);
×
3182
                }
3183
            }
3184
            
3185
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3186

3187
        } catch (WrappedResponse wr) {
×
3188
            return wr.getResponse();
×
3189
        }
3190
    }
3191

3192
    @DELETE
3193
    @AuthRequired
3194
    @Path("{identifier}/locks")
3195
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3196

3197
        return response(req -> {
×
3198
            try {
3199
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3200
                if (!user.isSuperuser()) {
×
3201
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3202
                }
3203
                Dataset dataset = findDatasetOrDie(id);
×
3204
                
3205
                if (lockType == null) {
×
3206
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
3207
                    for (DatasetLock lock : dataset.getLocks()) {
×
3208
                        locks.add(lock.getReason());
×
3209
                    }
×
3210
                    if (!locks.isEmpty()) {
×
3211
                        for (DatasetLock.Reason locktype : locks) {
×
3212
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
3213
                            // refresh the dataset:
3214
                            dataset = findDatasetOrDie(id);
×
3215
                        }
×
3216
                        // kick of dataset reindexing, in case the locks removed 
3217
                        // affected the search card:
3218
                        indexService.asyncIndexDataset(dataset, true);
×
3219
                        return ok("locks removed");
×
3220
                    }
3221
                    return ok("dataset not locked");
×
3222
                }
3223
                // request for a specific type lock:
3224
                DatasetLock lock = dataset.getLockFor(lockType);
×
3225
                if (lock != null) {
×
3226
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
3227
                    // refresh the dataset:
3228
                    dataset = findDatasetOrDie(id);
×
3229
                    // ... and kick of dataset reindexing, in case the lock removed 
3230
                    // affected the search card:
3231
                    indexService.asyncIndexDataset(dataset, true);
×
3232
                    return ok("lock type " + lock.getReason() + " removed");
×
3233
                }
3234
                return ok("no lock type " + lockType + " on the dataset");
×
3235
            } catch (WrappedResponse wr) {
×
3236
                return wr.getResponse();
×
3237
            }
3238

3239
        }, getRequestUser(crc));
×
3240

3241
    }
3242
    
3243
    @POST
3244
    @AuthRequired
3245
    @Path("{identifier}/lock/{type}")
3246
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
3247
        return response(req -> {
×
3248
            try {
3249
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3250
                if (!user.isSuperuser()) {
×
3251
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3252
                }
3253
                Dataset dataset = findDatasetOrDie(id);
×
3254
                DatasetLock lock = dataset.getLockFor(lockType);
×
3255
                if (lock != null) {
×
3256
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
3257
                }
3258
                lock = new DatasetLock(lockType, user);
×
3259
                execCommand(new AddLockCommand(req, dataset, lock));
×
3260
                // refresh the dataset:
3261
                dataset = findDatasetOrDie(id);
×
3262
                // ... and kick of dataset reindexing:
3263
                indexService.asyncIndexDataset(dataset, true);
×
3264

3265
                return ok("dataset locked with lock type " + lockType);
×
3266
            } catch (WrappedResponse wr) {
×
3267
                return wr.getResponse();
×
3268
            }
3269

3270
        }, getRequestUser(crc));
×
3271
    }
3272
    
3273
    @GET
3274
    @AuthRequired
3275
    @Path("locks")
3276
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
3277
        // This API is here, under /datasets, and not under /admin, because we
3278
        // likely want it to be accessible to admin users who may not necessarily 
3279
        // have localhost access, that would be required to get to /api/admin in 
3280
        // most installations. It is still reasonable however to limit access to
3281
        // this api to admin users only.
3282
        AuthenticatedUser apiUser;
3283
        try {
3284
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
3285
        } catch (WrappedResponse ex) {
×
3286
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3287
        }
×
3288
        if (!apiUser.isSuperuser()) {
×
3289
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3290
        }
3291
        
3292
        // Locks can be optinally filtered by type, user or both.
3293
        DatasetLock.Reason lockTypeValue = null;
×
3294
        AuthenticatedUser user = null; 
×
3295
        
3296
        // For the lock type, we use a QueryParam of type String, instead of 
3297
        // DatasetLock.Reason; that would be less code to write, but this way 
3298
        // we can check if the value passed matches a valid lock type ("reason") 
3299
        // and provide a helpful error message if it doesn't. If you use a 
3300
        // QueryParam of an Enum type, trying to pass an invalid value to it 
3301
        // results in a potentially confusing "404/NOT FOUND - requested 
3302
        // resource is not available".
3303
        if (lockType != null && !lockType.isEmpty()) {
×
3304
            try {
3305
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
3306
            } catch (IllegalArgumentException iax) {
×
3307
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
3308
                for (Reason r: Reason.values()) {
×
3309
                    reasonJoiner.add(r.name());
×
3310
                };
3311
                String errorMessage = "Invalid lock type value: " + lockType + 
×
3312
                        "; valid lock types: " + reasonJoiner.toString();
×
3313
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
3314
            }
×
3315
        }
3316
        
3317
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
3318
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
3319
            if (user == null) {
×
3320
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
3321
            }
3322
        }
3323
        
3324
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
3325
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
3326
                            
3327
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3328
    }   
3329
    
3330
    
3331
    @GET
3332
    @Path("{id}/makeDataCount/citations")
3333
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
3334
        
3335
        try {
3336
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3337
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
3338
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
3339
            for (DatasetExternalCitations citation : externalCitations) {
×
3340
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
3341
                /**
3342
                 * In the future we can imagine storing and presenting more
3343
                 * information about the citation such as the title of the paper
3344
                 * and the names of the authors. For now, we'll at least give
3345
                 * the URL of the citation so people can click and find out more
3346
                 * about the citation.
3347
                 */
3348
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
3349
                datasetsCitations.add(candidateObj);
×
3350
            }
×
3351
            return ok(datasetsCitations);
×
3352

3353
        } catch (WrappedResponse wr) {
×
3354
            return wr.getResponse();
×
3355
        }
3356

3357
    }
3358

3359
    @GET
3360
    @Path("{id}/makeDataCount/{metric}")
3361
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
3362
        String nullCurrentMonth = null;
×
3363
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
3364
    }
3365

3366
    @GET
3367
    @AuthRequired
3368
    @Path("{identifier}/storagesize")
3369
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
3370
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
3371
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
3372
    }
3373

3374
    @GET
3375
    @AuthRequired
3376
    @Path("{identifier}/versions/{versionId}/downloadsize")
3377
    public Response getDownloadSize(@Context ContainerRequestContext crc,
3378
                                    @PathParam("identifier") String dvIdtf,
3379
                                    @PathParam("versionId") String version,
3380
                                    @QueryParam("contentType") String contentType,
3381
                                    @QueryParam("accessStatus") String accessStatus,
3382
                                    @QueryParam("categoryName") String categoryName,
3383
                                    @QueryParam("tabularTagName") String tabularTagName,
3384
                                    @QueryParam("searchText") String searchText,
3385
                                    @QueryParam("mode") String mode,
3386
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
3387
                                    @Context UriInfo uriInfo,
3388
                                    @Context HttpHeaders headers) {
3389

3390
        return response(req -> {
×
3391
            FileSearchCriteria fileSearchCriteria;
3392
            try {
3393
                fileSearchCriteria = new FileSearchCriteria(
×
3394
                        contentType,
3395
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
3396
                        categoryName,
3397
                        tabularTagName,
3398
                        searchText
3399
                );
3400
            } catch (IllegalArgumentException e) {
×
3401
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
3402
            }
×
3403
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
3404
            try {
3405
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
3406
            } catch (IllegalArgumentException e) {
×
3407
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
3408
            }
×
3409
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
3410
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
3411
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
3412
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
3413
            jsonObjectBuilder.add("message", message);
×
3414
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
3415
            return ok(jsonObjectBuilder);
×
3416
        }, getRequestUser(crc));
×
3417
    }
3418

3419
    @GET
3420
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
3421
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
3422
        try {
3423
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3424
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
3425
            MakeDataCountUtil.MetricType metricType = null;
×
3426
            try {
3427
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
3428
            } catch (IllegalArgumentException ex) {
×
3429
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
3430
            }
×
3431
            String monthYear = null;
×
3432
            if (yyyymm != null) {
×
3433
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
3434
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
3435
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
3436
            }
3437
            if (country != null) {
×
3438
                country = country.toLowerCase();
×
3439
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
3440
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3441
                }
3442
            }
3443
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
3444
            if (datasetMetrics == null) {
×
3445
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3446
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
3447
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3448
            }
3449
            Long viewsTotalRegular = null;
×
3450
            Long viewsUniqueRegular = null;
×
3451
            Long downloadsTotalRegular = null;
×
3452
            Long downloadsUniqueRegular = null;
×
3453
            Long viewsTotalMachine = null;
×
3454
            Long viewsUniqueMachine = null;
×
3455
            Long downloadsTotalMachine = null;
×
3456
            Long downloadsUniqueMachine = null;
×
3457
            Long viewsTotal = null;
×
3458
            Long viewsUnique = null;
×
3459
            Long downloadsTotal = null;
×
3460
            Long downloadsUnique = null;
×
3461
            switch (metricSupplied) {
×
3462
                case "viewsTotal":
3463
                    viewsTotal = datasetMetrics.getViewsTotal();
×
3464
                    break;
×
3465
                case "viewsTotalRegular":
3466
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
3467
                    break;
×
3468
                case "viewsTotalMachine":
3469
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
3470
                    break;
×
3471
                case "viewsUnique":
3472
                    viewsUnique = datasetMetrics.getViewsUnique();
×
3473
                    break;
×
3474
                case "viewsUniqueRegular":
3475
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
3476
                    break;
×
3477
                case "viewsUniqueMachine":
3478
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
3479
                    break;
×
3480
                case "downloadsTotal":
3481
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
3482
                    break;
×
3483
                case "downloadsTotalRegular":
3484
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
3485
                    break;
×
3486
                case "downloadsTotalMachine":
3487
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
3488
                    break;
×
3489
                case "downloadsUnique":
3490
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
3491
                    break;
×
3492
                case "downloadsUniqueRegular":
3493
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
3494
                    break;
×
3495
                case "downloadsUniqueMachine":
3496
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
3497
                    break;
×
3498
                default:
3499
                    break;
3500
            }
3501
            /**
3502
             * TODO: Think more about the JSON output and the API design.
3503
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3504
             * now, by country. We could return multiple metrics (viewsTotal,
3505
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3506
             */
3507
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
3508
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
3509
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3510
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3511
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3512
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3513
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3514
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3515
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3516
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3517
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3518
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3519
            return ok(jsonObjectBuilder);
×
3520
        } catch (WrappedResponse wr) {
×
3521
            return wr.getResponse();
×
3522
        } catch (Exception e) {
×
3523
            //bad date - caught in sanitize call
3524
            return error(BAD_REQUEST, e.getMessage());
×
3525
        }
3526
    }
3527
    
3528
    @GET
3529
    @AuthRequired
3530
    @Path("{identifier}/storageDriver")
3531
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3532
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3533
        
3534
        Dataset dataset; 
3535
        
3536
        try {
3537
            dataset = findDatasetOrDie(dvIdtf);
×
3538
        } catch (WrappedResponse ex) {
×
3539
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3540
        }
×
3541
        
3542
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3543
    }
3544
    
3545
    @PUT
3546
    @AuthRequired
3547
    @Path("{identifier}/storageDriver")
3548
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3549
            String storageDriverLabel,
3550
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3551
        
3552
        // Superuser-only:
3553
        AuthenticatedUser user;
3554
        try {
3555
            user = getRequestAuthenticatedUserOrDie(crc);
×
3556
        } catch (WrappedResponse ex) {
×
3557
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3558
        }
×
3559
        if (!user.isSuperuser()) {
×
3560
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3561
        }
3562

3563
        Dataset dataset;
3564

3565
        try {
3566
            dataset = findDatasetOrDie(dvIdtf);
×
3567
        } catch (WrappedResponse ex) {
×
3568
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3569
        }
×
3570
        
3571
        // We don't want to allow setting this to a store id that does not exist: 
3572
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
3573
            if (store.getKey().equals(storageDriverLabel)) {
×
3574
                dataset.setStorageDriverId(store.getValue());
×
3575
                datasetService.merge(dataset);
×
3576
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3577
            }
3578
        }
×
3579
        return error(Response.Status.BAD_REQUEST,
×
3580
                "No Storage Driver found for : " + storageDriverLabel);
3581
    }
3582
    
3583
    @DELETE
3584
    @AuthRequired
3585
    @Path("{identifier}/storageDriver")
3586
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3587
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3588
        
3589
        // Superuser-only:
3590
        AuthenticatedUser user;
3591
        try {
3592
            user = getRequestAuthenticatedUserOrDie(crc);
×
3593
        } catch (WrappedResponse ex) {
×
3594
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3595
        }
×
3596
        if (!user.isSuperuser()) {
×
3597
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3598
        }
3599

3600
        Dataset dataset;
3601

3602
        try {
3603
            dataset = findDatasetOrDie(dvIdtf);
×
3604
        } catch (WrappedResponse ex) {
×
3605
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3606
        }
×
3607
        
3608
        dataset.setStorageDriverId(null);
×
3609
        datasetService.merge(dataset);
×
3610
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3611
    }
3612

3613
    @GET
3614
    @AuthRequired
3615
    @Path("{identifier}/curationLabelSet")
3616
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3617
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3618

3619
        try {
3620
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3621
            if (!user.isSuperuser()) {
×
3622
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3623
            }
3624
        } catch (WrappedResponse wr) {
×
3625
            return wr.getResponse();
×
3626
        }
×
3627

3628
        Dataset dataset;
3629

3630
        try {
3631
            dataset = findDatasetOrDie(dvIdtf);
×
3632
        } catch (WrappedResponse ex) {
×
3633
            return ex.getResponse();
×
3634
        }
×
3635

3636
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3637
    }
3638

3639
    @PUT
3640
    @AuthRequired
3641
    @Path("{identifier}/curationLabelSet")
3642
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3643
                                        @PathParam("identifier") String dvIdtf,
3644
                                        @QueryParam("name") String curationLabelSet,
3645
                                        @Context UriInfo uriInfo,
3646
                                        @Context HttpHeaders headers) throws WrappedResponse {
3647

3648
        // Superuser-only:
3649
        AuthenticatedUser user;
3650
        try {
3651
            user = getRequestAuthenticatedUserOrDie(crc);
×
3652
        } catch (WrappedResponse ex) {
×
3653
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3654
        }
×
3655
        if (!user.isSuperuser()) {
×
3656
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3657
        }
3658

3659
        Dataset dataset;
3660

3661
        try {
3662
            dataset = findDatasetOrDie(dvIdtf);
×
3663
        } catch (WrappedResponse ex) {
×
3664
            return ex.getResponse();
×
3665
        }
×
3666
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3667
            dataset.setCurationLabelSetName(curationLabelSet);
×
3668
            datasetService.merge(dataset);
×
3669
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3670
        } else {
3671
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3672
                if (setName.equals(curationLabelSet)) {
×
3673
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3674
                    datasetService.merge(dataset);
×
3675
                    return ok("Curation Label Set Name set to: " + setName);
×
3676
                }
3677
            }
×
3678
        }
3679
        return error(Response.Status.BAD_REQUEST,
×
3680
            "No Such Curation Label Set");
3681
    }
3682

3683
    @DELETE
3684
    @AuthRequired
3685
    @Path("{identifier}/curationLabelSet")
3686
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3687
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3688

3689
        // Superuser-only:
3690
        AuthenticatedUser user;
3691
        try {
3692
            user = getRequestAuthenticatedUserOrDie(crc);
×
3693
        } catch (WrappedResponse ex) {
×
3694
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3695
        }
×
3696
        if (!user.isSuperuser()) {
×
3697
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3698
        }
3699

3700
        Dataset dataset;
3701

3702
        try {
3703
            dataset = findDatasetOrDie(dvIdtf);
×
3704
        } catch (WrappedResponse ex) {
×
3705
            return ex.getResponse();
×
3706
        }
×
3707

3708
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3709
        datasetService.merge(dataset);
×
3710
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3711
    }
3712

3713
    @GET
3714
    @AuthRequired
3715
    @Path("{identifier}/allowedCurationLabels")
3716
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3717
                                             @PathParam("identifier") String dvIdtf,
3718
                                             @Context UriInfo uriInfo,
3719
                                             @Context HttpHeaders headers) throws WrappedResponse {
3720
        AuthenticatedUser user = null;
×
3721
        try {
3722
            user = getRequestAuthenticatedUserOrDie(crc);
×
3723
        } catch (WrappedResponse wr) {
×
3724
            return wr.getResponse();
×
3725
        }
×
3726

3727
        Dataset dataset;
3728

3729
        try {
3730
            dataset = findDatasetOrDie(dvIdtf);
×
3731
        } catch (WrappedResponse ex) {
×
3732
            return ex.getResponse();
×
3733
        }
×
3734
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3735
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
3736
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3737
        } else {
3738
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3739
        }
3740
    }
3741

3742
    @GET
3743
    @AuthRequired
3744
    @Path("{identifier}/timestamps")
3745
    @Produces(MediaType.APPLICATION_JSON)
3746
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3747

3748
        Dataset dataset = null;
×
3749
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3750
        try {
3751
            dataset = findDatasetOrDie(id);
×
3752
            User u = getRequestUser(crc);
×
3753
            Set<Permission> perms = new HashSet<Permission>();
×
3754
            perms.add(Permission.ViewUnpublishedDataset);
×
3755
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3756
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3757
            logger.fine("CSD: " + canSeeDraft);
×
3758
            logger.fine("IT: " + dataset.getIndexTime());
×
3759
            logger.fine("MT: " + dataset.getModificationTime());
×
3760
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
3761
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3762
            // Basic info if it's released
3763
            if (dataset.isReleased() || canSeeDraft) {
×
3764
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
3765
                if (dataset.getPublicationDate() != null) {
×
3766
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3767
                }
3768

3769
                if (dataset.getLastExportTime() != null) {
×
3770
                    timestamps.add("lastMetadataExportTime",
×
3771
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3772

3773
                }
3774

3775
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
3776
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
3777
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3778
                }
3779
                // If the modification/permissionmodification time is
3780
                // set and the index time is null or is before the mod time, the relevant index is stale
3781
                timestamps.add("hasStaleIndex",
×
3782
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
3783
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3784
                                : false);
×
3785
                timestamps.add("hasStalePermissionIndex",
×
3786
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3787
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3788
                                : false);
×
3789
            }
3790
            // More detail if you can see a draft
3791
            if (canSeeDraft) {
×
3792
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
3793
                if (dataset.getIndexTime() != null) {
×
3794
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3795
                }
3796
                if (dataset.getPermissionModificationTime() != null) {
×
3797
                    timestamps.add("lastPermissionUpdateTime",
×
3798
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3799
                }
3800
                if (dataset.getPermissionIndexTime() != null) {
×
3801
                    timestamps.add("lastPermissionIndexTime",
×
3802
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3803
                }
3804
                if (dataset.getGlobalIdCreateTime() != null) {
×
3805
                    timestamps.add("globalIdCreateTime", formatter
×
3806
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3807
                }
3808

3809
            }
3810
            return ok(timestamps);
×
3811
        } catch (WrappedResponse wr) {
×
3812
            return wr.getResponse();
×
3813
        }
3814
    }
3815

3816

3817
/****************************
3818
 * Globus Support Section:
3819
 * 
3820
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3821
 * app is launched and makes a callback to the
3822
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3823
 * needed to set up it's display.
3824
 * 
3825
 * At some point after that, the user will make a selection as to which files to
3826
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3827
 * to indicate a transfer is about to start. In addition to providing the
3828
 * details of where to transfer the files to/from, Dataverse also grants the
3829
 * Globus principal involved the relevant rw or r permission for the dataset.
3830
 * 
3831
 * Once the transfer is started, the app records the task id and sends it to
3832
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3833
 * monitors the transfer task and when it ultimately succeeds for fails it
3834
 * revokes the principal's permission and, for the transfer in case, adds the
3835
 * files to the dataset. (The dataset is locked until the transfer completes.)
3836
 * 
3837
 * (If no transfer is started within a specified timeout, permissions will
3838
 * automatically be revoked - see the GlobusServiceBean for details.)
3839
 *
3840
 * The option to reference a file at a remote endpoint (rather than transfer it)
3841
 * follows the first two steps of the process above but completes with a call to
3842
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3843
 * files can be added to the dataset immediately.)
3844
 */
3845

3846
    /**
3847
     * Retrieve the parameters and signed URLs required to perform a globus
3848
     * transfer. This api endpoint is expected to be called as a signed callback
3849
     * after the globus-dataverse app/other app is launched, but it will accept
3850
     * other forms of authentication.
3851
     * 
3852
     * @param crc
3853
     * @param datasetId
3854
     */
3855
    @GET
3856
    @AuthRequired
3857
    @Path("{id}/globusUploadParameters")
3858
    @Produces(MediaType.APPLICATION_JSON)
3859
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3860
            @QueryParam(value = "locale") String locale) {
3861
        // -------------------------------------
3862
        // (1) Get the user from the ContainerRequestContext
3863
        // -------------------------------------
3864
        AuthenticatedUser authUser;
3865
        try {
3866
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3867
        } catch (WrappedResponse e) {
×
3868
            return e.getResponse();
×
3869
        }
×
3870
        // -------------------------------------
3871
        // (2) Get the Dataset Id
3872
        // -------------------------------------
3873
        Dataset dataset;
3874

3875
        try {
3876
            dataset = findDatasetOrDie(datasetId);
×
3877
        } catch (WrappedResponse wr) {
×
3878
            return wr.getResponse();
×
3879
        }
×
3880
        String storeId = dataset.getEffectiveStorageDriverId();
×
3881
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3882
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3883
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
3884
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3885
        }
3886

3887
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3888

3889
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3890
        String transferEndpoint = null;
×
3891
        JsonArray referenceEndpointsWithPaths = null;
×
3892
        if (managed) {
×
3893
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3894
        } else {
3895
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3896
        }
3897

3898
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3899
        queryParams.add("queryParameters",
×
3900
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3901
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3902
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3903
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3904
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3905
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3906
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3907
        substitutedParams.keySet().forEach((key) -> {
×
3908
            params.add(key, substitutedParams.get(key));
×
3909
        });
×
3910
        params.add("managed", Boolean.toString(managed));
×
3911
        if (managed) {
×
3912
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
3913
            if (maxSize != null) {
×
3914
                params.add("fileSizeLimit", maxSize);
×
3915
            }
3916
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
3917
            if (limit != null) {
×
3918
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3919
            }
3920
        }
3921
        if (transferEndpoint != null) {
×
3922
            params.add("endpoint", transferEndpoint);
×
3923
        } else {
3924
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3925
        }
3926
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3927
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3928
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3929
        allowedApiCalls.add(
×
3930
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3931
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3932
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3933
        if(managed) {
×
3934
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3935
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3936
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
3937
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3938
        } else {
3939
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3940
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3941
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
3942
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3943
        }
3944
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3945
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3946
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3947
                .add(URLTokenUtil.TIMEOUT, 5));
×
3948
        allowedApiCalls.add(
×
3949
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3950
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3951
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3952

3953
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3954
    }
3955

3956
    /**
3957
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3958
     * 
3959
     * @param crc
3960
     * @param datasetId
3961
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3962
     * @return
3963
     * @throws IOException
3964
     * @throws ExecutionException
3965
     * @throws InterruptedException
3966
     */
3967
    @POST
3968
    @AuthRequired
3969
    @Path("{id}/requestGlobusUploadPaths")
3970
    @Consumes(MediaType.APPLICATION_JSON)
3971
    @Produces(MediaType.APPLICATION_JSON)
3972
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3973
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3974

3975
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3976

3977
        if (!systemConfig.isGlobusUpload()) {
×
3978
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
3979
                    BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
3980
        }
3981

3982
        // -------------------------------------
3983
        // (1) Get the user from the ContainerRequestContext
3984
        // -------------------------------------
3985
        AuthenticatedUser authUser;
3986
        try {
3987
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3988
        } catch (WrappedResponse e) {
×
3989
            return e.getResponse();
×
3990
        }
×
3991

3992
        // -------------------------------------
3993
        // (2) Get the Dataset Id
3994
        // -------------------------------------
3995
        Dataset dataset;
3996

3997
        try {
3998
            dataset = findDatasetOrDie(datasetId);
×
3999
        } catch (WrappedResponse wr) {
×
4000
            return wr.getResponse();
×
4001
        }
×
4002
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
4003
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
4004

4005
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
4006
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4007
                try {
4008
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
4009
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
4010
                        return badRequest("No referencedFiles specified");
×
4011
                    }
4012
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
4013
                    return (ok(fileMap));
×
4014
                } catch (Exception e) {
×
4015
                    return badRequest(e.getLocalizedMessage());
×
4016
                }
4017
            } else {
4018
                try {
4019
                    String principal = params.getString("principal");
×
4020
                    int numberOfPaths = params.getInt("numberOfFiles");
×
4021
                    if (numberOfPaths <= 0) {
×
4022
                        return badRequest("numberOfFiles must be positive");
×
4023
                    }
4024

4025
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
4026
                    switch (response.getInt("status")) {
×
4027
                    case 201:
4028
                        return ok(response.getJsonObject("paths"));
×
4029
                    case 400:
4030
                        return badRequest("Unable to grant permission");
×
4031
                    case 409:
4032
                        return conflict("Permission already exists");
×
4033
                    default:
4034
                        return error(null, "Unexpected error when granting permission");
×
4035
                    }
4036

4037
                } catch (NullPointerException | ClassCastException e) {
×
4038
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
4039

4040
                }
4041
            }
4042
        } else {
4043
            return forbidden("User doesn't have permission to upload to this dataset");
×
4044
        }
4045

4046
    }
4047

4048
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
4049
     * 
4050
     * @param crc
4051
     * @param datasetId
4052
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
4053
     * @param uriInfo
4054
     * @return
4055
     * @throws IOException
4056
     * @throws ExecutionException
4057
     * @throws InterruptedException
4058
     */
4059
    @POST
4060
    @AuthRequired
4061
    @Path("{id}/addGlobusFiles")
4062
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4063
    @Produces("application/json")
4064
    @Operation(summary = "Uploads a Globus file for a dataset", 
4065
               description = "Uploads a Globus file for a dataset")
4066
    @APIResponse(responseCode = "200",
4067
               description = "Globus file uploaded successfully to dataset")
4068
    @Tag(name = "addGlobusFilesToDataset", 
4069
         description = "Uploads a Globus file for a dataset")
4070
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4071
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
4072
                                            @PathParam("id") String datasetId,
4073
                                            @FormDataParam("jsonData") String jsonData,
4074
                                            @Context UriInfo uriInfo
4075
    ) throws IOException, ExecutionException, InterruptedException {
4076

4077
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
4078

4079
        // -------------------------------------
4080
        // (1) Get the user from the API key
4081
        // -------------------------------------
4082
        AuthenticatedUser authUser;
4083
        try {
4084
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4085
        } catch (WrappedResponse ex) {
×
4086
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
4087
            );
4088
        }
×
4089

4090
        // -------------------------------------
4091
        // (2) Get the Dataset Id
4092
        // -------------------------------------
4093
        Dataset dataset;
4094

4095
        try {
4096
            dataset = findDatasetOrDie(datasetId);
×
4097
        } catch (WrappedResponse wr) {
×
4098
            return wr.getResponse();
×
4099
        }
×
4100
        
4101
        // Is Globus upload service available? 
4102
        
4103
        // ... on this Dataverse instance?
4104
        if (!systemConfig.isGlobusUpload()) {
×
4105
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
4106
        }
4107

4108
        // ... and on this specific Dataset? 
4109
        String storeId = dataset.getEffectiveStorageDriverId();
×
4110
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4111
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4112
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
4113
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
4114
        }
4115
        
4116
        // Check if the dataset is already locked
4117
        // We are reusing the code and logic used by various command to determine 
4118
        // if there are any locks on the dataset that would prevent the current 
4119
        // users from modifying it:
4120
        try {
4121
            DataverseRequest dataverseRequest = createDataverseRequest(authUser);
×
4122
            permissionService.checkEditDatasetLock(dataset, dataverseRequest, null); 
×
4123
        } catch (IllegalCommandException icex) {
×
4124
            return error(Response.Status.FORBIDDEN, "Dataset " + datasetId + " is locked: " + icex.getLocalizedMessage());
×
4125
        }
×
4126
        
4127
        JsonObject jsonObject = null;
×
4128
        try {
4129
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
4130
        } catch (Exception ex) {
×
4131
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
4132
            return badRequest("Error parsing json body");
×
4133

4134
        }
×
4135

4136
        //------------------------------------
4137
        // (2b) Make sure dataset does not have package file
4138
        // --------------------------------------
4139

4140
        for (DatasetVersion dv : dataset.getVersions()) {
×
4141
            if (dv.isHasPackageFile()) {
×
4142
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4143
                );
4144
            }
4145
        }
×
4146

4147

4148
        String lockInfoMessage = "Globus Upload API started ";
×
4149
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
4150
                (authUser).getId(), lockInfoMessage);
×
4151
        if (lock != null) {
×
4152
            dataset.addLock(lock);
×
4153
        } else {
4154
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
4155
        }
4156

4157
        if(uriInfo != null) {
×
4158
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
4159
        }
4160

4161
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
4162
        
4163
        // Async Call
4164
        try {
4165
            globusService.globusUpload(jsonObject, dataset, requestUrl, authUser);
×
4166
        } catch (IllegalArgumentException ex) {
×
4167
            return badRequest("Invalid parameters: "+ex.getMessage());
×
4168
        }
×
4169

4170
        return ok("Async call to Globus Upload started ");
×
4171

4172
    }
4173
    
4174
/**
4175
 * Retrieve the parameters and signed URLs required to perform a globus
4176
 * transfer/download. This api endpoint is expected to be called as a signed
4177
 * callback after the globus-dataverse app/other app is launched, but it will
4178
 * accept other forms of authentication.
4179
 * 
4180
 * @param crc
4181
 * @param datasetId
4182
 * @param locale
4183
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
4184
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
4185
 */
4186
    @GET
4187
    @AuthRequired
4188
    @Path("{id}/globusDownloadParameters")
4189
    @Produces(MediaType.APPLICATION_JSON)
4190
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4191
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
4192
        // -------------------------------------
4193
        // (1) Get the user from the ContainerRequestContext
4194
        // -------------------------------------
4195
        AuthenticatedUser authUser = null;
×
4196
        try {
4197
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4198
        } catch (WrappedResponse e) {
×
4199
            logger.fine("guest user globus download");
×
4200
        }
×
4201
        // -------------------------------------
4202
        // (2) Get the Dataset Id
4203
        // -------------------------------------
4204
        Dataset dataset;
4205

4206
        try {
4207
            dataset = findDatasetOrDie(datasetId);
×
4208
        } catch (WrappedResponse wr) {
×
4209
            return wr.getResponse();
×
4210
        }
×
4211
        String storeId = dataset.getEffectiveStorageDriverId();
×
4212
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4213
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4214
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
4215
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4216
        }
4217

4218
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
4219
        if (files == null) {
×
4220
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4221
        }
4222

4223
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
4224

4225
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
4226
        String transferEndpoint = null;
×
4227

4228
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
4229
        queryParams.add("queryParameters",
×
4230
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
4231
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
4232
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
4233
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
4234
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
4235
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
4236
        JsonObjectBuilder params = Json.createObjectBuilder();
×
4237
        substitutedParams.keySet().forEach((key) -> {
×
4238
            params.add(key, substitutedParams.get(key));
×
4239
        });
×
4240
        params.add("managed", Boolean.toString(managed));
×
4241
        if (managed) {
×
4242
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
4243
            params.add("endpoint", transferEndpoint);
×
4244
        }
4245
        params.add("files", files);
×
4246
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
4247
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
4248
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
4249
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4250
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
4251
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4252
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
4253
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4254
                .add(URLTokenUtil.URL_TEMPLATE,
×
4255
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
4256
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4257
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
4258
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
4259
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
4260
                .add(URLTokenUtil.TIMEOUT, 5));
×
4261
        allowedApiCalls.add(
×
4262
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
4263
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
4264
                        .add(URLTokenUtil.TIMEOUT, 5));
×
4265

4266
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
4267
    }
4268

4269
    /**
4270
     * Requests permissions for a given globus user to download the specified files
4271
     * the dataset and returns information about the paths to transfer from.
4272
     * 
4273
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
4274
     * 
4275
     * @param crc
4276
     * @param datasetId
4277
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
4278
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
4279
     * @throws IOException
4280
     * @throws ExecutionException
4281
     * @throws InterruptedException
4282
     */
4283
    @POST
4284
    @AuthRequired
4285
    @Path("{id}/requestGlobusDownload")
4286
    @Consumes(MediaType.APPLICATION_JSON)
4287
    @Produces(MediaType.APPLICATION_JSON)
4288
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4289
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
4290
            throws IOException, ExecutionException, InterruptedException {
4291

4292
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
4293

4294
        if (!systemConfig.isGlobusDownload()) {
×
4295
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4296
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4297
        }
4298

4299
        // -------------------------------------
4300
        // (1) Get the user from the ContainerRequestContext
4301
        // -------------------------------------
4302
        User user = getRequestUser(crc);
×
4303

4304
        // -------------------------------------
4305
        // (2) Get the Dataset Id
4306
        // -------------------------------------
4307
        Dataset dataset;
4308

4309
        try {
4310
            dataset = findDatasetOrDie(datasetId);
×
4311
        } catch (WrappedResponse wr) {
×
4312
            return wr.getResponse();
×
4313
        }
×
4314
        JsonObject body = null;
×
4315
        if (jsonBody != null) {
×
4316
            body = JsonUtil.getJsonObject(jsonBody);
×
4317
        }
4318
        Set<String> fileIds = null;
×
4319
        if (downloadId != null) {
×
4320
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
4321
            if (files != null) {
×
4322
                fileIds = files.keySet();
×
4323
            }
4324
        } else {
×
4325
            if ((body!=null) && body.containsKey("fileIds")) {
×
4326
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
4327
                fileIds = new HashSet<String>(fileVals.size());
×
4328
                for (JsonValue fileVal : fileVals) {
×
4329
                    String id = null;
×
4330
                    switch (fileVal.getValueType()) {
×
4331
                    case STRING:
4332
                        id = ((JsonString) fileVal).getString();
×
4333
                        break;
×
4334
                    case NUMBER:
4335
                        id = ((JsonNumber) fileVal).toString();
×
4336
                        break;
×
4337
                    default:
4338
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
4339
                    }
4340
                    ;
4341
                    fileIds.add(id);
×
4342
                }
×
4343
            } else {
×
4344
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
4345
            }
4346
        }
4347

4348
        if (fileIds.isEmpty()) {
×
4349
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4350
        }
4351
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
4352
        for (String id : fileIds) {
×
4353
            boolean published = false;
×
4354
            logger.info("File id: " + id);
×
4355

4356
            DataFile df = null;
×
4357
            try {
4358
                df = findDataFileOrDie(id);
×
4359
            } catch (WrappedResponse wr) {
×
4360
                return wr.getResponse();
×
4361
            }
×
4362
            if (!df.getOwner().equals(dataset)) {
×
4363
                return badRequest("All files must be in the dataset");
×
4364
            }
4365
            dataFiles.add(df);
×
4366

4367
            for (FileMetadata fm : df.getFileMetadatas()) {
×
4368
                if (fm.getDatasetVersion().isPublished()) {
×
4369
                    published = true;
×
4370
                    break;
×
4371
                }
4372
            }
×
4373

4374
            if (!published) {
×
4375
                // If the file is not published, they can still download the file, if the user
4376
                // has the permission to view unpublished versions:
4377

4378
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
4379
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
4380
                    return forbidden("User doesn't have permission to download file: " + id);
×
4381
                }
4382
            } else { // published and restricted and/or embargoed
4383
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
4384
                    // This line also handles all three authenticated session user, token user, and
4385
                    // guest cases.
4386
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
4387
                        return forbidden("User doesn't have permission to download file: " + id);
×
4388
                    }
4389

4390
            }
4391
        }
×
4392
        // Allowed to download all requested files
4393
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
4394
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4395
            // If managed, give the principal read permissions
4396
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
4397
            switch (status) {
×
4398
            case 201:
4399
                return ok(files);
×
4400
            case 400:
4401
                return badRequest("Unable to grant permission");
×
4402
            case 409:
4403
                return conflict("Permission already exists");
×
4404
            default:
4405
                return error(null, "Unexpected error when granting permission");
×
4406
            }
4407

4408
        }
4409

4410
        return ok(files);
×
4411
    }
4412

4413
    /**
4414
     * Monitors a globus download and removes permissions on the dir/dataset when
4415
     * the specified transfer task is completed.
4416
     * 
4417
     * @param crc
4418
     * @param datasetId
4419
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
4420
     *                  Globus task to monitor.
4421
     * @return
4422
     * @throws IOException
4423
     * @throws ExecutionException
4424
     * @throws InterruptedException
4425
     */
4426
    @POST
4427
    @AuthRequired
4428
    @Path("{id}/monitorGlobusDownload")
4429
    @Consumes(MediaType.APPLICATION_JSON)
4430
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4431
            String jsonData) throws IOException, ExecutionException, InterruptedException {
4432

4433
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
4434

4435
        if (!systemConfig.isGlobusDownload()) {
×
4436
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4437
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4438
        }
4439

4440
        // -------------------------------------
4441
        // (1) Get the user from the ContainerRequestContext
4442
        // -------------------------------------
4443
        User authUser;
4444
        authUser = getRequestUser(crc);
×
4445

4446
        // -------------------------------------
4447
        // (2) Get the Dataset Id
4448
        // -------------------------------------
4449
        Dataset dataset;
4450

4451
        try {
4452
            dataset = findDatasetOrDie(datasetId);
×
4453
        } catch (WrappedResponse wr) {
×
4454
            return wr.getResponse();
×
4455
        }
×
4456

4457
        // Async Call
4458
        globusService.globusDownload(jsonData, dataset, authUser);
×
4459

4460
        return ok("Async call to Globus Download started");
×
4461

4462
    }
4463

4464
    /**
4465
     * Add multiple Files to an existing Dataset
4466
     *
4467
     * @param idSupplied
4468
     * @param jsonData
4469
     * @return
4470
     */
4471
    @POST
4472
    @AuthRequired
4473
    @Path("{id}/addFiles")
4474
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4475
    @Produces("application/json")
4476
    @Operation(summary = "Uploads a set of files to a dataset", 
4477
               description = "Uploads a set of files to a dataset")
4478
    @APIResponse(responseCode = "200",
4479
               description = "Files uploaded successfully to dataset")
4480
    @Tag(name = "addFilesToDataset", 
4481
         description = "Uploads a set of files to a dataset")
4482
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4483
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4484
            @FormDataParam("jsonData") String jsonData) {
4485

4486
        if (!systemConfig.isHTTPUpload()) {
×
4487
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4488
        }
4489

4490
        // -------------------------------------
4491
        // (1) Get the user from the ContainerRequestContext
4492
        // -------------------------------------
4493
        User authUser;
4494
        authUser = getRequestUser(crc);
×
4495

4496
        // -------------------------------------
4497
        // (2) Get the Dataset Id
4498
        // -------------------------------------
4499
        Dataset dataset;
4500

4501
        try {
4502
            dataset = findDatasetOrDie(idSupplied);
×
4503
        } catch (WrappedResponse wr) {
×
4504
            return wr.getResponse();
×
4505
        }
×
4506

4507
        dataset.getLocks().forEach(dl -> {
×
4508
            logger.info(dl.toString());
×
4509
        });
×
4510

4511
        //------------------------------------
4512
        // (2a) Make sure dataset does not have package file
4513
        // --------------------------------------
4514

4515
        for (DatasetVersion dv : dataset.getVersions()) {
×
4516
            if (dv.isHasPackageFile()) {
×
4517
                return error(Response.Status.FORBIDDEN,
×
4518
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4519
                );
4520
            }
4521
        }
×
4522

4523
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4524

4525
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4526
                dvRequest,
4527
                this.ingestService,
4528
                this.datasetService,
4529
                this.fileService,
4530
                this.permissionSvc,
4531
                this.commandEngine,
4532
                this.systemConfig
4533
        );
4534

4535
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4536

4537
    }
4538

4539
    /**
4540
     * Replace multiple Files to an existing Dataset
4541
     *
4542
     * @param idSupplied
4543
     * @param jsonData
4544
     * @return
4545
     */
4546
    @POST
4547
    @AuthRequired
4548
    @Path("{id}/replaceFiles")
4549
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4550
    @Produces("application/json")
4551
    @Operation(summary = "Replace a set of files to a dataset", 
4552
               description = "Replace a set of files to a dataset")
4553
    @APIResponse(responseCode = "200",
4554
               description = "Files replaced successfully to dataset")
4555
    @Tag(name = "replaceFilesInDataset", 
4556
         description = "Replace a set of files to a dataset")
4557
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) 
4558
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4559
                                          @PathParam("id") String idSupplied,
4560
                                          @FormDataParam("jsonData") String jsonData) {
4561

4562
        if (!systemConfig.isHTTPUpload()) {
×
4563
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4564
        }
4565

4566
        // -------------------------------------
4567
        // (1) Get the user from the ContainerRequestContext
4568
        // -------------------------------------
4569
        User authUser;
4570
        authUser = getRequestUser(crc);
×
4571

4572
        // -------------------------------------
4573
        // (2) Get the Dataset Id
4574
        // -------------------------------------
4575
        Dataset dataset;
4576

4577
        try {
4578
            dataset = findDatasetOrDie(idSupplied);
×
4579
        } catch (WrappedResponse wr) {
×
4580
            return wr.getResponse();
×
4581
        }
×
4582

4583
        dataset.getLocks().forEach(dl -> {
×
4584
            logger.info(dl.toString());
×
4585
        });
×
4586

4587
        //------------------------------------
4588
        // (2a) Make sure dataset does not have package file
4589
        // --------------------------------------
4590

4591
        for (DatasetVersion dv : dataset.getVersions()) {
×
4592
            if (dv.isHasPackageFile()) {
×
4593
                return error(Response.Status.FORBIDDEN,
×
4594
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4595
                );
4596
            }
4597
        }
×
4598

4599
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4600

4601
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4602
                dvRequest,
4603
                this.ingestService,
4604
                this.datasetService,
4605
                this.fileService,
4606
                this.permissionSvc,
4607
                this.commandEngine,
4608
                this.systemConfig
4609
        );
4610

4611
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4612

4613
    }
4614

4615
    /**
4616
     * API to find curation assignments and statuses
4617
     *
4618
     * @return
4619
     * @throws WrappedResponse
4620
     */
4621
    @GET
4622
    @AuthRequired
4623
    @Path("/listCurationStates")
4624
    @Produces("text/csv")
4625
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4626

4627
        try {
4628
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
4629
            if (!user.isSuperuser()) {
×
4630
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4631
            }
4632
        } catch (WrappedResponse wr) {
×
4633
            return wr.getResponse();
×
4634
        }
×
4635

4636
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4637
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
4638
        allRoles.forEach(r -> {
×
4639
            if (r.permissions().contains(Permission.PublishDataset))
×
4640
                curationRoles.add(r);
×
4641
        });
×
4642
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4643
        curationRoles.forEach(r -> {
×
4644
            assignees.put(r.getAlias(), null);
×
4645
        });
×
4646

4647
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4648
                BundleUtil.getStringFromBundle("dataset"),
×
4649
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4650
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4651
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4652
                String.join(",", assignees.keySet())));
×
4653
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4654
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4655
            curationRoles.forEach(r -> {
×
4656
                assignees.put(r.getAlias(), new HashSet<String>());
×
4657
            });
×
4658
            for (RoleAssignment ra : ras) {
×
4659
                if (curationRoles.contains(ra.getRole())) {
×
4660
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4661
                }
4662
            }
×
4663
            DatasetVersion dsv = dataset.getLatestVersion();
×
4664
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4665
            String status = dsv.getExternalStatusLabel();
×
4666
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4667
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4668
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4669
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4670
            List<String> sList = new ArrayList<String>();
×
4671
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4672
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4673
        }
×
4674
        csvSB.append("\n");
×
4675
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4676
    }
4677

4678
    // APIs to manage archival status
4679

4680
    @GET
4681
    @AuthRequired
4682
    @Produces(MediaType.APPLICATION_JSON)
4683
    @Path("/{id}/{version}/archivalStatus")
4684
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4685
                                                    @PathParam("id") String datasetId,
4686
                                                    @PathParam("version") String versionNumber,
4687
                                                    @Context UriInfo uriInfo,
4688
                                                    @Context HttpHeaders headers) {
4689

4690
        try {
4691
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4692
            if (!au.isSuperuser()) {
×
4693
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4694
            }
4695
            DataverseRequest req = createDataverseRequest(au);
×
4696
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4697
                    headers);
4698

4699
            if (dsv.getArchivalCopyLocation() == null) {
×
4700
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4701
            } else {
4702
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
4703
                return ok(status);
×
4704
            }
4705
        } catch (WrappedResponse wr) {
×
4706
            return wr.getResponse();
×
4707
        }
4708
    }
4709

4710
    @PUT
4711
    @AuthRequired
4712
    @Consumes(MediaType.APPLICATION_JSON)
4713
    @Path("/{id}/{version}/archivalStatus")
4714
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4715
                                                    @PathParam("id") String datasetId,
4716
                                                    @PathParam("version") String versionNumber,
4717
                                                    String newStatus,
4718
                                                    @Context UriInfo uriInfo,
4719
                                                    @Context HttpHeaders headers) {
4720

4721
        logger.fine(newStatus);
×
4722
        try {
4723
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4724

4725
            if (!au.isSuperuser()) {
×
4726
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4727
            }
4728
            
4729
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
4730
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4731
            
4732
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4733
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
4734
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
4735
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4736

4737
                    DataverseRequest req = createDataverseRequest(au);
×
4738
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4739
                            uriInfo, headers);
4740

4741
                    if (dsv == null) {
×
4742
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4743
                    }
4744
                    if (isSingleVersionArchiving()) {
×
4745
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
4746
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
4747
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4748
                            }
4749
                        }
×
4750
                    }
4751

4752
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
4753
                    dsv = datasetversionService.merge(dsv);
×
4754
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
4755
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4756

4757
                    return ok("Status updated");
×
4758
                }
4759
            }
4760
        } catch (WrappedResponse wr) {
×
4761
            return wr.getResponse();
×
4762
        } catch (JsonException| IllegalStateException ex) {
×
4763
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4764
        }
×
4765
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4766
    }
4767
    
4768
    @DELETE
4769
    @AuthRequired
4770
    @Produces(MediaType.APPLICATION_JSON)
4771
    @Path("/{id}/{version}/archivalStatus")
4772
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4773
                                                       @PathParam("id") String datasetId,
4774
                                                       @PathParam("version") String versionNumber,
4775
                                                       @Context UriInfo uriInfo,
4776
                                                       @Context HttpHeaders headers) {
4777

4778
        try {
4779
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4780
            if (!au.isSuperuser()) {
×
4781
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4782
            }
4783

4784
            DataverseRequest req = createDataverseRequest(au);
×
4785
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4786
                    headers);
4787
            if (dsv == null) {
×
4788
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4789
            }
4790
            dsv.setArchivalCopyLocation(null);
×
4791
            dsv = datasetversionService.merge(dsv);
×
4792

4793
            return ok("Status deleted");
×
4794

4795
        } catch (WrappedResponse wr) {
×
4796
            return wr.getResponse();
×
4797
        }
4798
    }
4799
    
4800
    private boolean isSingleVersionArchiving() {
4801
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
4802
        if (className != null) {
×
4803
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4804
            try {
4805
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
4806
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
4807
            } catch (ClassNotFoundException e) {
×
4808
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4809
            } catch (ClassCastException cce) {
×
4810
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
4811
            }
×
4812
        }
4813
        return false;
×
4814
    }
4815
    
4816
    // This method provides a callback for an external tool to retrieve it's
4817
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4818
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4819
    // will be plain/unsigned.
4820
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4821
    // for a draft dataset), or public case.
4822
    @GET
4823
    @AuthRequired
4824
    @Path("{id}/versions/{version}/toolparams/{tid}")
4825
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4826
                                            @PathParam("tid") long externalToolId,
4827
                                            @PathParam("id") String datasetId,
4828
                                            @PathParam("version") String version,
4829
                                            @QueryParam(value = "locale") String locale) {
4830
        try {
4831
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
4832
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
4833
            if (target == null) {
×
4834
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4835
            }
4836
            
4837
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
4838
            if(externalTool==null) {
×
4839
                return error(BAD_REQUEST, "External tool not found.");
×
4840
            }
4841
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
4842
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4843
            }
4844
            ApiToken apiToken = null;
×
4845
            User u = getRequestUser(crc);
×
4846
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4847

4848
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4849
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
4850
        } catch (WrappedResponse wr) {
×
4851
            return wr.getResponse();
×
4852
        }
4853
    }
4854

4855
    @GET
4856
    @Path("summaryFieldNames")
4857
    public Response getDatasetSummaryFieldNames() {
4858
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
4859
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
4860
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4861
        for (String fieldName : fieldNames) {
×
4862
            fieldNamesArrayBuilder.add(fieldName);
×
4863
        }
4864
        return ok(fieldNamesArrayBuilder);
×
4865
    }
4866

4867
    @GET
4868
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4869
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4870
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4871
        if (privateUrlUser == null) {
×
4872
            return notFound("Private URL user not found");
×
4873
        }
4874
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4875
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4876
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4877
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4878
        }
4879
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4880
        if (dsv == null || dsv.getId() == null) {
×
4881
            return notFound("Dataset version not found");
×
4882
        }
4883
        JsonObjectBuilder responseJson;
4884
        if (isAnonymizedAccess) {
×
4885
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
4886
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4887
        } else {
×
4888
            responseJson = json(dsv, null, true, returnOwners);
×
4889
        }
4890
        return ok(responseJson);
×
4891
    }
4892
    
4893
    @GET
4894
    @Path("previewUrlDatasetVersion/{previewUrlToken}")
4895
    public Response getPreviewUrlDatasetVersion(@PathParam("previewUrlToken") String previewUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4896
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken);
×
4897
        if (privateUrlUser == null) {
×
4898
            return notFound("Private URL user not found");
×
4899
        }
4900
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4901
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4902
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4903
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4904
        }
4905
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken);
×
4906
        if (dsv == null || dsv.getId() == null) {
×
4907
            return notFound("Dataset version not found");
×
4908
        }
4909
        JsonObjectBuilder responseJson;
4910
        if (isAnonymizedAccess) {
×
4911
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
4912
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4913
        } else {
×
4914
            responseJson = json(dsv, null, true, returnOwners);
×
4915
        }
4916
        return ok(responseJson);
×
4917
    }
4918
    
4919

4920
    @GET
4921
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4922
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
4923
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4924
        if (privateUrlUser == null) {
×
4925
            return notFound("Private URL user not found");
×
4926
        }
4927
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4928
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
4929
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4930
    }
4931
    
4932
    @GET
4933
    @Path("previewUrlDatasetVersion/{previewUrlToken}/citation")
4934
    public Response getPreviewUrlDatasetVersionCitation(@PathParam("previewUrlToken") String previewUrlToken) {
4935
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(previewUrlToken);
×
4936
        if (privateUrlUser == null) {
×
4937
            return notFound("Private URL user not found");
×
4938
        }
4939
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(previewUrlToken);
×
4940
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
4941
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4942
    }
4943

4944
    @GET
4945
    @AuthRequired
4946
    @Path("{id}/versions/{versionId}/citation")
4947
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4948
                                              @PathParam("id") String datasetId,
4949
                                              @PathParam("versionId") String versionId,
4950
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4951
                                              @Context UriInfo uriInfo,
4952
                                              @Context HttpHeaders headers) {
4953
        boolean checkFilePerms = false;
×
4954
        return response(req -> ok(
×
4955
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers,
×
4956
                        includeDeaccessioned, checkFilePerms).getCitation(true, false)),
×
4957
                getRequestUser(crc));
×
4958
    }
4959

4960
    @POST
4961
    @AuthRequired
4962
    @Path("{id}/versions/{versionId}/deaccession")
4963
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
4964
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
4965
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4966
        }
4967
        return response(req -> {
×
4968
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4969
            try {
4970
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4971
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
4972
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
4973
                if (deaccessionForwardURL != null) {
×
4974
                    try {
4975
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4976
                    } catch (IllegalArgumentException iae) {
×
4977
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
4978
                    }
×
4979
                }
4980
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4981
                
4982
                return ok("Dataset " + 
×
4983
                        (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + 
×
4984
                        " deaccessioned for version " + versionId);
4985
            } catch (JsonParsingException jpe) {
×
4986
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4987
            }
4988
        }, getRequestUser(crc));
×
4989
    }
4990

4991
    @GET
4992
    @AuthRequired
4993
    @Path("{identifier}/guestbookEntryAtRequest")
4994
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4995
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4996

4997
        Dataset dataset;
4998

4999
        try {
5000
            dataset = findDatasetOrDie(dvIdtf);
×
5001
        } catch (WrappedResponse ex) {
×
5002
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5003
        }
×
5004
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
5005
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
5006
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
5007
        }
5008
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
5009
    }
5010

5011
    @PUT
5012
    @AuthRequired
5013
    @Path("{identifier}/guestbookEntryAtRequest")
5014
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5015
                                               boolean gbAtRequest,
5016
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
5017

5018
        // Superuser-only:
5019
        AuthenticatedUser user;
5020
        try {
5021
            user = getRequestAuthenticatedUserOrDie(crc);
×
5022
        } catch (WrappedResponse ex) {
×
5023
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5024
        }
×
5025
        if (!user.isSuperuser()) {
×
5026
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5027
        }
5028

5029
        Dataset dataset;
5030

5031
        try {
5032
            dataset = findDatasetOrDie(dvIdtf);
×
5033
        } catch (WrappedResponse ex) {
×
5034
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5035
        }
×
5036
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
5037
        if (!gbAtRequestOpt.isPresent()) {
×
5038
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
5039
        }
5040
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
5041
        dataset.setGuestbookEntryAtRequest(choice);
×
5042
        datasetService.merge(dataset);
×
5043
        return ok("Guestbook Entry At Request set to: " + choice);
×
5044
    }
5045

5046
    @DELETE
5047
    @AuthRequired
5048
    @Path("{identifier}/guestbookEntryAtRequest")
5049
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5050
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
5051

5052
        // Superuser-only:
5053
        AuthenticatedUser user;
5054
        try {
5055
            user = getRequestAuthenticatedUserOrDie(crc);
×
5056
        } catch (WrappedResponse ex) {
×
5057
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5058
        }
×
5059
        if (!user.isSuperuser()) {
×
5060
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5061
        }
5062

5063
        Dataset dataset;
5064

5065
        try {
5066
            dataset = findDatasetOrDie(dvIdtf);
×
5067
        } catch (WrappedResponse ex) {
×
5068
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5069
        }
×
5070

5071
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
5072
        datasetService.merge(dataset);
×
5073
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
5074
    }
5075

5076
    @GET
5077
    @AuthRequired
5078
    @Path("{id}/userPermissions")
5079
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
5080
        Dataset dataset;
5081
        try {
5082
            dataset = findDatasetOrDie(datasetId);
×
5083
        } catch (WrappedResponse wr) {
×
5084
            return wr.getResponse();
×
5085
        }
×
5086
        User requestUser = getRequestUser(crc);
×
5087
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
5088
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
5089
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
5090
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
5091
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
5092
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
5093
        return ok(jsonObjectBuilder);
×
5094
    }
5095

5096
    @GET
5097
    @AuthRequired
5098
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
5099
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
5100
                                                 @PathParam("id") String datasetId,
5101
                                                 @PathParam("versionId") String versionId,
5102
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
5103
                                                 @Context UriInfo uriInfo,
5104
                                                 @Context HttpHeaders headers) {
5105
        return response(req -> {
×
5106
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
5107
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
5108
        }, getRequestUser(crc));
×
5109
    }
5110
    
5111
    /**
5112
     * Get the PidProvider that will be used for generating new DOIs in this dataset
5113
     *
5114
     * @return - the id of the effective PID generator for the given dataset
5115
     * @throws WrappedResponse
5116
     */
5117
    @GET
5118
    @AuthRequired
5119
    @Path("{identifier}/pidGenerator")
5120
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5121
            @Context HttpHeaders headers) throws WrappedResponse {
5122

5123
        Dataset dataset;
5124

5125
        try {
5126
            dataset = findDatasetOrDie(dvIdtf);
×
5127
        } catch (WrappedResponse ex) {
×
5128
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5129
        }
×
5130
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
5131
        if(pidProvider == null) {
×
5132
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
5133
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
5134
        }
5135
        String pidGeneratorId = pidProvider.getId();
×
5136
        return ok(pidGeneratorId);
×
5137
    }
5138

5139
    @PUT
5140
    @AuthRequired
5141
    @Path("{identifier}/pidGenerator")
5142
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
5143
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
5144

5145
        // Superuser-only:
5146
        AuthenticatedUser user;
5147
        try {
5148
            user = getRequestAuthenticatedUserOrDie(crc);
×
5149
        } catch (WrappedResponse ex) {
×
5150
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
5151
        }
×
5152
        if (!user.isSuperuser()) {
×
5153
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5154
        }
5155

5156
        Dataset dataset;
5157

5158
        try {
5159
            dataset = findDatasetOrDie(datasetId);
×
5160
        } catch (WrappedResponse ex) {
×
5161
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5162
        }
×
5163
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
5164
            dataset.setPidGeneratorId(generatorId);
×
5165
            datasetService.merge(dataset);
×
5166
            return ok("PID Generator set to: " + generatorId);
×
5167
        } else {
5168
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
5169
        }
5170

5171
    }
5172

5173
    @DELETE
5174
    @AuthRequired
5175
    @Path("{identifier}/pidGenerator")
5176
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5177
            @Context HttpHeaders headers) throws WrappedResponse {
5178

5179
        // Superuser-only:
5180
        AuthenticatedUser user;
5181
        try {
5182
            user = getRequestAuthenticatedUserOrDie(crc);
×
5183
        } catch (WrappedResponse ex) {
×
5184
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5185
        }
×
5186
        if (!user.isSuperuser()) {
×
5187
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5188
        }
5189

5190
        Dataset dataset;
5191

5192
        try {
5193
            dataset = findDatasetOrDie(dvIdtf);
×
5194
        } catch (WrappedResponse ex) {
×
5195
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5196
        }
×
5197

5198
        dataset.setPidGenerator(null);
×
5199
        datasetService.merge(dataset);
×
5200
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
5201
    }
5202

5203
    @GET
5204
    @Path("datasetTypes")
5205
    public Response getDatasetTypes() {
5206
        JsonArrayBuilder jab = Json.createArrayBuilder();
×
5207
        for (DatasetType datasetType : datasetTypeSvc.listAll()) {
×
5208
            jab.add(datasetType.toJson());
×
5209
        }
×
5210
        return ok(jab);
×
5211
    }
5212

5213
    @GET
5214
    @Path("datasetTypes/{idOrName}")
5215
    public Response getDatasetTypes(@PathParam("idOrName") String idOrName) {
5216
        DatasetType datasetType = null;
×
5217
        if (StringUtils.isNumeric(idOrName)) {
×
5218
            try {
5219
                long id = Long.parseLong(idOrName);
×
5220
                datasetType = datasetTypeSvc.getById(id);
×
5221
            } catch (NumberFormatException ex) {
×
5222
                return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
×
5223
            }
×
5224
        } else {
5225
            datasetType = datasetTypeSvc.getByName(idOrName);
×
5226
        }
5227
        if (datasetType != null) {
×
5228
            return ok(datasetType.toJson());
×
5229
        } else {
5230
            return error(NOT_FOUND, "Could not find a dataset type with name " + idOrName);
×
5231
        }
5232
    }
5233

5234
    @POST
5235
    @AuthRequired
5236
    @Path("datasetTypes")
5237
    public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) {
5238
        AuthenticatedUser user;
5239
        try {
5240
            user = getRequestAuthenticatedUserOrDie(crc);
×
5241
        } catch (WrappedResponse ex) {
×
5242
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5243
        }
×
5244
        if (!user.isSuperuser()) {
×
5245
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5246
        }
5247

5248
        if (jsonIn == null || jsonIn.isEmpty()) {
×
5249
            return error(BAD_REQUEST, "JSON input was null or empty!");
×
5250
        }
5251

5252
        String nameIn = null;
×
5253
        try {
5254
            JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn);
×
5255
            nameIn = jsonObject.getString("name", null);
×
5256
        } catch (JsonParsingException ex) {
×
5257
            return error(BAD_REQUEST, "Problem parsing supplied JSON: " + ex.getLocalizedMessage());
×
5258
        }
×
5259
        if (nameIn == null) {
×
5260
            return error(BAD_REQUEST, "A name for the dataset type is required");
×
5261
        }
5262
        if (StringUtils.isNumeric(nameIn)) {
×
5263
            // getDatasetTypes supports id or name so we don't want a names that looks like an id
5264
            return error(BAD_REQUEST, "The name of the type cannot be only digits.");
×
5265
        }
5266

5267
        try {
5268
            DatasetType datasetType = new DatasetType();
×
5269
            datasetType.setName(nameIn);
×
5270
            DatasetType saved = datasetTypeSvc.save(datasetType);
×
5271
            Long typeId = saved.getId();
×
5272
            String name = saved.getName();
×
5273
            return ok(saved.toJson());
×
5274
        } catch (WrappedResponse ex) {
×
5275
            return error(BAD_REQUEST, ex.getMessage());
×
5276
        }
5277
    }
5278

5279
    @DELETE
5280
    @AuthRequired
5281
    @Path("datasetTypes/{id}")
5282
    public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) {
5283
        AuthenticatedUser user;
5284
        try {
5285
            user = getRequestAuthenticatedUserOrDie(crc);
×
5286
        } catch (WrappedResponse ex) {
×
5287
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5288
        }
×
5289
        if (!user.isSuperuser()) {
×
5290
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5291
        }
5292

5293
        if (doomed == null || doomed.isEmpty()) {
×
5294
            throw new IllegalArgumentException("ID is required!");
×
5295
        }
5296

5297
        long idToDelete;
5298
        try {
5299
            idToDelete = Long.parseLong(doomed);
×
5300
        } catch (NumberFormatException e) {
×
5301
            throw new IllegalArgumentException("ID must be a number");
×
5302
        }
×
5303

5304
        DatasetType datasetTypeToDelete = datasetTypeSvc.getById(idToDelete);
×
5305
        if (datasetTypeToDelete == null) {
×
5306
            return error(BAD_REQUEST, "Could not find dataset type with id " + idToDelete);
×
5307
        }
5308

5309
        if (DatasetType.DEFAULT_DATASET_TYPE.equals(datasetTypeToDelete.getName())) {
×
5310
            return error(Status.FORBIDDEN, "You cannot delete the default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE);
×
5311
        }
5312

5313
        try {
5314
            int numDeleted = datasetTypeSvc.deleteById(idToDelete);
×
5315
            if (numDeleted == 1) {
×
5316
                return ok("deleted");
×
5317
            } else {
5318
                return error(BAD_REQUEST, "Something went wrong. Number of dataset types deleted: " + numDeleted);
×
5319
            }
5320
        } catch (WrappedResponse ex) {
×
5321
            return error(BAD_REQUEST, ex.getMessage());
×
5322
        }
5323
    }
5324

5325
    @AuthRequired
5326
    @PUT
5327
    @Path("datasetTypes/{idOrName}")
5328
    public Response updateDatasetTypeLinksWithMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("idOrName") String idOrName, String jsonBody) {
5329
        DatasetType datasetType = null;
×
5330
        if (StringUtils.isNumeric(idOrName)) {
×
5331
            try {
5332
                long id = Long.parseLong(idOrName);
×
5333
                datasetType = datasetTypeSvc.getById(id);
×
5334
            } catch (NumberFormatException ex) {
×
5335
                return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
×
5336
            }
×
5337
        } else {
5338
            datasetType = datasetTypeSvc.getByName(idOrName);
×
5339
        }
5340
        JsonArrayBuilder datasetTypesBefore = Json.createArrayBuilder();
×
5341
        for (MetadataBlock metadataBlock : datasetType.getMetadataBlocks()) {
×
5342
            datasetTypesBefore.add(metadataBlock.getName());
×
5343
        }
×
5344
        JsonArrayBuilder datasetTypesAfter = Json.createArrayBuilder();
×
5345
        List<MetadataBlock> metadataBlocksToSave = new ArrayList<>();
×
5346
        if (jsonBody != null && !jsonBody.isEmpty()) {
×
5347
            JsonArray json = JsonUtil.getJsonArray(jsonBody);
×
5348
            for (JsonString jsonValue : json.getValuesAs(JsonString.class)) {
×
5349
                String name = jsonValue.getString();
×
5350
                MetadataBlock metadataBlock = metadataBlockSvc.findByName(name);
×
5351
                if (metadataBlock != null) {
×
5352
                    metadataBlocksToSave.add(metadataBlock);
×
5353
                    datasetTypesAfter.add(name);
×
5354
                } else {
5355
                    String availableBlocks = metadataBlockSvc.listMetadataBlocks().stream().map(MetadataBlock::getName).collect(Collectors.joining(", "));
×
5356
                    return badRequest("Metadata block not found: " + name + ". Available metadata blocks: " + availableBlocks);
×
5357
                }
5358
            }
×
5359
        }
5360
        try {
5361
            execCommand(new UpdateDatasetTypeLinksToMetadataBlocksCommand(createDataverseRequest(getRequestUser(crc)), datasetType, metadataBlocksToSave));
×
5362
            return ok(Json.createObjectBuilder()
×
5363
                    .add("linkedMetadataBlocks", Json.createObjectBuilder()
×
5364
                            .add("before", datasetTypesBefore)
×
5365
                            .add("after", datasetTypesAfter))
×
5366
            );
5367

5368
        } catch (WrappedResponse ex) {
×
5369
            return ex.getResponse();
×
5370
        }
5371
    }
5372

5373
    @PUT
5374
    @AuthRequired
5375
    @Path("{id}/deleteFiles")
5376
    @Consumes(MediaType.APPLICATION_JSON)
5377
    public Response deleteDatasetFiles(@Context ContainerRequestContext crc, @PathParam("id") String id,
5378
            JsonArray fileIds) {
5379
        try {
NEW
5380
            getRequestAuthenticatedUserOrDie(crc);
×
NEW
5381
        } catch (WrappedResponse ex) {
×
NEW
5382
            return ex.getResponse();
×
NEW
5383
        }
×
NEW
5384
        return response(req -> {
×
NEW
5385
            Dataset dataset = findDatasetOrDie(id);
×
5386
            // Convert JsonArray to List<Long>
NEW
5387
            List<Long> fileIdList = new ArrayList<>();
×
NEW
5388
            for (JsonValue value : fileIds) {
×
NEW
5389
                fileIdList.add(((JsonNumber) value).longValue());
×
NEW
5390
            }
×
5391
            // Find the files to be deleted
NEW
5392
            List<FileMetadata> filesToDelete = dataset.getOrCreateEditVersion().getFileMetadatas().stream()
×
NEW
5393
                    .filter(fileMetadata -> fileIdList.contains(fileMetadata.getDataFile().getId()))
×
NEW
5394
                    .collect(Collectors.toList());
×
5395

NEW
5396
            if (filesToDelete.isEmpty()) {
×
NEW
5397
                return badRequest("No files found with the provided IDs.");
×
5398
            }
5399

NEW
5400
            if (filesToDelete.size() != fileIds.size()) {
×
NEW
5401
                return badRequest(
×
5402
                        "Some files listed are not present in the latest dataset version and cannot be deleted.");
5403
            }
5404
            try {
5405

NEW
5406
                UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, req, filesToDelete);
×
5407

NEW
5408
                commandEngine.submit(update_cmd);
×
NEW
5409
                for (FileMetadata fm : filesToDelete) {
×
NEW
5410
                    DataFile dataFile = fm.getDataFile();
×
NEW
5411
                    boolean deletePhysicalFile = !dataFile.isReleased();
×
NEW
5412
                    if (deletePhysicalFile) {
×
5413
                        try {
NEW
5414
                            fileService.finalizeFileDelete(dataFile.getId(),
×
NEW
5415
                                    fileService.getPhysicalFileToDelete(dataFile));
×
NEW
5416
                        } catch (IOException ioex) {
×
NEW
5417
                            logger.warning("Failed to delete the physical file associated with the deleted datafile id="
×
NEW
5418
                                    + dataFile.getId() + ", storage location: "
×
NEW
5419
                                    + fileService.getPhysicalFileToDelete(dataFile));
×
NEW
5420
                        }
×
5421
                    }
NEW
5422
                }
×
NEW
5423
            } catch (PermissionException ex) {
×
NEW
5424
                return error(FORBIDDEN, "You do not have permission to delete files ont this dataset.");
×
NEW
5425
            } catch (CommandException ex) {
×
NEW
5426
                return error(BAD_REQUEST,
×
NEW
5427
                        "File deletes failed for dataset ID " + id + " (CommandException): " + ex.getMessage());
×
NEW
5428
            } catch (EJBException ex) {
×
NEW
5429
                return error(jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR,
×
NEW
5430
                        "File deletes failed for dataset ID " + id + "(EJBException): " + ex.getMessage());
×
NEW
5431
            }
×
NEW
5432
            return ok(fileIds.size() + " files deleted successfully");
×
5433

NEW
5434
        }, getRequestUser(crc));
×
5435
    }
5436
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc