• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #22985

23 Aug 2024 06:32PM CUT coverage: 20.61% (-0.2%) from 20.791%
#22985

Pull #10781

github

landreev
added an upfront locks check to the /addGlobusFiles api #10623
Pull Request #10781: Improved handling of Globus uploads

4 of 417 new or added lines in 15 files covered. (0.96%)

4194 existing lines in 35 files now uncovered.

17388 of 84365 relevant lines covered (20.61%)

0.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.17
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4

5
import edu.harvard.iq.dataverse.*;
6
import edu.harvard.iq.dataverse.DatasetLock.Reason;
7
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
8
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
9
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
10
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
11
import edu.harvard.iq.dataverse.authorization.DataverseRole;
12
import edu.harvard.iq.dataverse.authorization.Permission;
13
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
14
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
15
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
16
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
17
import edu.harvard.iq.dataverse.authorization.users.User;
18
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
19
import edu.harvard.iq.dataverse.dataaccess.*;
20
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
21
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
22
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
23
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
24
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
25
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
26
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
27
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
28
import edu.harvard.iq.dataverse.engine.command.Command;
29
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
30
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
31
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
32
import edu.harvard.iq.dataverse.engine.command.impl.*;
33
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
34
import edu.harvard.iq.dataverse.export.ExportService;
35
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
36
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
37
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
38
import edu.harvard.iq.dataverse.globus.GlobusUtil;
39
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
40
import edu.harvard.iq.dataverse.makedatacount.*;
41
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
42
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
43
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
44
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
46
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
47
import edu.harvard.iq.dataverse.search.IndexServiceBean;
48
import edu.harvard.iq.dataverse.settings.FeatureFlags;
49
import edu.harvard.iq.dataverse.settings.JvmSettings;
50
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
51
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
52
import edu.harvard.iq.dataverse.util.*;
53
import edu.harvard.iq.dataverse.util.bagit.OREMap;
54
import edu.harvard.iq.dataverse.util.json.*;
55
import edu.harvard.iq.dataverse.workflow.Workflow;
56
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
57
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
58
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
59
import jakarta.ejb.EJB;
60
import jakarta.ejb.EJBException;
61
import jakarta.inject.Inject;
62
import jakarta.json.*;
63
import jakarta.json.stream.JsonParsingException;
64
import jakarta.servlet.http.HttpServletRequest;
65
import jakarta.servlet.http.HttpServletResponse;
66
import jakarta.ws.rs.*;
67
import jakarta.ws.rs.container.ContainerRequestContext;
68
import jakarta.ws.rs.core.*;
69
import jakarta.ws.rs.core.Response.Status;
70
import org.apache.commons.lang3.StringUtils;
71
import org.eclipse.microprofile.openapi.annotations.Operation;
72
import org.eclipse.microprofile.openapi.annotations.media.Content;
73
import org.eclipse.microprofile.openapi.annotations.media.Schema;
74
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
75
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
76
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
77
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
78
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
79
import org.glassfish.jersey.media.multipart.FormDataParam;
80

81
import java.io.IOException;
82
import java.io.InputStream;
83
import java.net.URI;
84
import java.sql.Timestamp;
85
import java.text.MessageFormat;
86
import java.text.SimpleDateFormat;
87
import java.time.LocalDate;
88
import java.time.LocalDateTime;
89
import java.time.ZoneId;
90
import java.time.format.DateTimeFormatter;
91
import java.time.format.DateTimeParseException;
92
import java.util.*;
93
import java.util.Map.Entry;
94
import java.util.concurrent.ExecutionException;
95
import java.util.function.Predicate;
96
import java.util.logging.Level;
97
import java.util.logging.Logger;
98
import java.util.regex.Pattern;
99
import java.util.stream.Collectors;
100

101
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
102
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
103
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
104
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
105
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
106

107
@Path("datasets")
UNCOV
108
public class Datasets extends AbstractApiBean {
×
109

110
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
111
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
112
    
113
    @Inject DataverseSession session;
114

115
    @EJB
116
    DatasetServiceBean datasetService;
117

118
    @EJB
119
    DataverseServiceBean dataverseService;
120
    
121
    @EJB
122
    GlobusServiceBean globusService;
123

124
    @EJB
125
    UserNotificationServiceBean userNotificationService;
126
    
127
    @EJB
128
    PermissionServiceBean permissionService;
129
    
130
    @EJB
131
    AuthenticationServiceBean authenticationServiceBean;
132
    
133
    @EJB
134
    DDIExportServiceBean ddiExportService;
135

136
    @EJB
137
    MetadataBlockServiceBean metadataBlockService;
138
    
139
    @EJB
140
    DataFileServiceBean fileService;
141

142
    @EJB
143
    IngestServiceBean ingestService;
144

145
    @EJB
146
    EjbDataverseEngine commandEngine;
147
    
148
    @EJB
149
    IndexServiceBean indexService;
150

151
    @EJB
152
    S3PackageImporter s3PackageImporter;
153
     
154
    @EJB
155
    SettingsServiceBean settingsService;
156

157
    // TODO: Move to AbstractApiBean
158
    @EJB
159
    DatasetMetricsServiceBean datasetMetricsSvc;
160
    
161
    @EJB
162
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
163

164
    @EJB
165
    EmbargoServiceBean embargoService;
166

167
    @EJB
168
    RetentionServiceBean retentionService;
169

170
    @Inject
171
    MakeDataCountLoggingServiceBean mdcLogService;
172
    
173
    @Inject
174
    DataverseRequestServiceBean dvRequestService;
175

176
    @Inject
177
    WorkflowServiceBean wfService;
178
    
179
    @Inject
180
    DataverseRoleServiceBean dataverseRoleService;
181

182
    @EJB
183
    DatasetVersionServiceBean datasetversionService;
184

185
    @Inject
186
    PrivateUrlServiceBean privateUrlService;
187

188
    @Inject
189
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
190

191
    /**
192
     * Used to consolidate the way we parse and handle dataset versions.
193
     * @param <T> 
194
     */
195
    public interface DsVersionHandler<T> {
196
        T handleLatest();
197
        T handleDraft();
198
        T handleSpecific( long major, long minor );
199
        T handleLatestPublished();
200
    }
201
    
202
    @GET
203
    @AuthRequired
204
    @Path("{id}")
205
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
206
        return response( req -> {
×
207
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true)));
×
208
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
UNCOV
209
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
210
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
211
            if((latest != null) && latest.isReleased()) {
×
212
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
UNCOV
213
                mdcLogService.logEntry(entry);
×
214
            }
215
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
UNCOV
216
        }, getRequestUser(crc));
×
217
    }
218
    
219
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
220
    // to obtain the dataset that we are trying to export - which would handle
221
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
222
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
223
    @GET
224
    @Path("/export")
225
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" })
226
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
227

228
        try {
229
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
230
            if (dataset == null) {
×
UNCOV
231
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
232
            }
233
            
UNCOV
234
            ExportService instance = ExportService.getInstance();
×
235
            
UNCOV
236
            InputStream is = instance.getExport(dataset, exporter);
×
237
           
UNCOV
238
            String mediaType = instance.getMediaType(exporter);
×
239
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
240
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
UNCOV
241
            mdcLogService.logEntry(entry);
×
242
            
243
            return Response.ok()
×
244
                    .entity(is)
×
245
                    .type(mediaType).
×
246
                    build();
×
247
        } catch (Exception wr) {
×
248
            logger.warning(wr.getMessage());
×
UNCOV
249
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
250
        }
251
    }
252

253
    @DELETE
254
    @AuthRequired
255
    @Path("{id}")
256
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
257
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
258
        // (and there's a comment that says "TODO: remove this command")
259
        // do we need an exposed API call for it? 
260
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
261
        // if the dataset only has 1 version... In other words, the functionality 
262
        // currently provided by this API is covered between the "deleteDraftVersion" and
263
        // "destroyDataset" API calls.  
264
        // (The logic below follows the current implementation of the underlying 
265
        // commands!)
266

267
        User u = getRequestUser(crc);
×
268
        return response( req -> {
×
269
            Dataset doomed = findDatasetOrDie(id);
×
270
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
UNCOV
271
            boolean destroy = false;
×
272
            
273
            if (doomed.getVersions().size() == 1) {
×
274
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
UNCOV
275
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
276
                }
UNCOV
277
                destroy = true;
×
278
            } else {
279
                if (!doomedVersion.isDraft()) {
×
UNCOV
280
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
281
                }
282
            }
283
            
284
            // Gather the locations of the physical files that will need to be 
285
            // deleted once the destroy command execution has been finalized:
UNCOV
286
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
287
            
UNCOV
288
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
289
            
290
            // If we have gotten this far, the destroy command has succeeded, 
291
            // so we can finalize it by permanently deleting the physical files:
292
            // (DataFileService will double-check that the datafiles no 
293
            // longer exist in the database, before attempting to delete 
294
            // the physical files)
295
            if (!deleteStorageLocations.isEmpty()) {
×
UNCOV
296
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
297
            }
298
            
UNCOV
299
            return ok("Dataset " + id + " deleted");
×
300
        }, u);
301
    }
302
        
303
    @DELETE
304
    @AuthRequired
305
    @Path("{id}/destroy")
306
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
307

308
        User u = getRequestUser(crc);
×
UNCOV
309
        return response(req -> {
×
310
            // first check if dataset is released, and if so, if user is a superuser
UNCOV
311
            Dataset doomed = findDatasetOrDie(id);
×
312

313
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
UNCOV
314
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
315
            }
316

317
            // Gather the locations of the physical files that will need to be 
318
            // deleted once the destroy command execution has been finalized:
UNCOV
319
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
320

UNCOV
321
            execCommand(new DestroyDatasetCommand(doomed, req));
×
322

323
            // If we have gotten this far, the destroy command has succeeded, 
324
            // so we can finalize permanently deleting the physical files:
325
            // (DataFileService will double-check that the datafiles no 
326
            // longer exist in the database, before attempting to delete 
327
            // the physical files)
328
            if (!deleteStorageLocations.isEmpty()) {
×
UNCOV
329
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
330
            }
331

UNCOV
332
            return ok("Dataset " + id + " destroyed");
×
333
        }, u);
334
    }
335
    
336
    @DELETE
337
    @AuthRequired
338
    @Path("{id}/versions/{versionId}")
339
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
340
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
UNCOV
341
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
342
        }
343

344
        return response( req -> {
×
345
            Dataset dataset = findDatasetOrDie(id);
×
UNCOV
346
            DatasetVersion doomed = dataset.getLatestVersion();
×
347
            
348
            if (!doomed.isDraft()) {
×
UNCOV
349
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
350
            }
351
            
352
            // Gather the locations of the physical files that will need to be 
353
            // deleted once the destroy command execution has been finalized:
354
            
UNCOV
355
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
356
            
UNCOV
357
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
358
            
359
            // If we have gotten this far, the delete command has succeeded - 
360
            // by either deleting the Draft version of a published dataset, 
361
            // or destroying an unpublished one. 
362
            // This means we can finalize permanently deleting the physical files:
363
            // (DataFileService will double-check that the datafiles no 
364
            // longer exist in the database, before attempting to delete 
365
            // the physical files)
366
            if (!deleteStorageLocations.isEmpty()) {
×
UNCOV
367
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
368
            }
369
            
370
            return ok("Draft version of dataset " + id + " deleted");
×
UNCOV
371
        }, getRequestUser(crc));
×
372
    }
373
        
374
    @DELETE
375
    @AuthRequired
376
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
377
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
378
                boolean index = true;
×
379
        return response(req -> {
×
380
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
381
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
UNCOV
382
        }, getRequestUser(crc));
×
383
    }
384
        
385
    @PUT
386
    @AuthRequired
387
    @Path("{id}/citationdate")
388
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
389
        return response( req -> {
×
390
            if ( dsfTypeName.trim().isEmpty() ){
×
UNCOV
391
                return badRequest("Please provide a dataset field type in the requst body.");
×
392
            }
393
            DatasetFieldType dsfType = null;
×
394
            if (!":publicationDate".equals(dsfTypeName)) {
×
395
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
396
                if (dsfType == null) {
×
UNCOV
397
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
398
                }
399
            }
400

401
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
402
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
UNCOV
403
        }, getRequestUser(crc));
×
404
    }
405
    
406
    @DELETE
407
    @AuthRequired
408
    @Path("{id}/citationdate")
409
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
410
        return response( req -> {
×
411
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
412
            return ok("Citation Date for dataset " + id + " set to default");
×
UNCOV
413
        }, getRequestUser(crc));
×
414
    }
415
    
416
    @GET
417
    @AuthRequired
418
    @Path("{id}/versions")
419
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
420

421
        return response( req -> {
×
422
            Dataset dataset = findDatasetOrDie(id);
×
UNCOV
423
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
424

425
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
426
                                .stream()
×
427
                                .map( d -> json(d, deepLookup) )
×
428
                                .collect(toJsonArray()));
×
UNCOV
429
        }, getRequestUser(crc));
×
430
    }
431
    
432
    @GET
433
    @AuthRequired
434
    @Path("{id}/versions/{versionId}")
435
    public Response getVersion(@Context ContainerRequestContext crc,
436
                               @PathParam("id") String datasetId,
437
                               @PathParam("versionId") String versionId,
438
                               @QueryParam("excludeFiles") Boolean excludeFiles,
439
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
440
                               @QueryParam("returnOwners") boolean returnOwners,
441
                               @Context UriInfo uriInfo,
442
                               @Context HttpHeaders headers) {
UNCOV
443
        return response( req -> {
×
444
            
445
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
UNCOV
446
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
447
            
448
            Dataset dataset = findDatasetOrDie(datasetId);
×
UNCOV
449
            DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, 
×
450
                                                                            versionId, 
451
                                                                            dataset, 
452
                                                                            uriInfo, 
453
                                                                            headers, 
454
                                                                            includeDeaccessioned,
455
                                                                            checkPerms);
456

457
            if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) {
×
UNCOV
458
                return notFound("Dataset version not found");
×
459
            }
460

461
            if (excludeFiles == null ? true : !excludeFiles) {
×
UNCOV
462
                requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId());
×
463
            }
464

UNCOV
465
            JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
×
466
                                                 null, 
UNCOV
467
                                                 excludeFiles == null ? true : !excludeFiles, 
×
468
                                                 returnOwners);
UNCOV
469
            return ok(jsonBuilder);
×
470

UNCOV
471
        }, getRequestUser(crc));
×
472
    }
473

474
    @GET
475
    @AuthRequired
476
    @Path("{id}/versions/{versionId}/files")
477
    public Response getVersionFiles(@Context ContainerRequestContext crc,
478
                                    @PathParam("id") String datasetId,
479
                                    @PathParam("versionId") String versionId,
480
                                    @QueryParam("limit") Integer limit,
481
                                    @QueryParam("offset") Integer offset,
482
                                    @QueryParam("contentType") String contentType,
483
                                    @QueryParam("accessStatus") String accessStatus,
484
                                    @QueryParam("categoryName") String categoryName,
485
                                    @QueryParam("tabularTagName") String tabularTagName,
486
                                    @QueryParam("searchText") String searchText,
487
                                    @QueryParam("orderCriteria") String orderCriteria,
488
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
489
                                    @Context UriInfo uriInfo,
490
                                    @Context HttpHeaders headers) {
491
        return response(req -> {
×
UNCOV
492
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned);
×
493
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
494
            try {
495
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
496
            } catch (IllegalArgumentException e) {
×
497
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
UNCOV
498
            }
×
499
            FileSearchCriteria fileSearchCriteria;
500
            try {
UNCOV
501
                fileSearchCriteria = new FileSearchCriteria(
×
502
                        contentType,
UNCOV
503
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
504
                        categoryName,
505
                        tabularTagName,
506
                        searchText
507
                );
508
            } catch (IllegalArgumentException e) {
×
509
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
510
            }
×
511
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
512
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
UNCOV
513
        }, getRequestUser(crc));
×
514
    }
515

516
    @GET
517
    @AuthRequired
518
    @Path("{id}/versions/{versionId}/files/counts")
519
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
520
                                         @PathParam("id") String datasetId,
521
                                         @PathParam("versionId") String versionId,
522
                                         @QueryParam("contentType") String contentType,
523
                                         @QueryParam("accessStatus") String accessStatus,
524
                                         @QueryParam("categoryName") String categoryName,
525
                                         @QueryParam("tabularTagName") String tabularTagName,
526
                                         @QueryParam("searchText") String searchText,
527
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
528
                                         @Context UriInfo uriInfo,
529
                                         @Context HttpHeaders headers) {
UNCOV
530
        return response(req -> {
×
531
            FileSearchCriteria fileSearchCriteria;
532
            try {
UNCOV
533
                fileSearchCriteria = new FileSearchCriteria(
×
534
                        contentType,
UNCOV
535
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
536
                        categoryName,
537
                        tabularTagName,
538
                        searchText
539
                );
540
            } catch (IllegalArgumentException e) {
×
541
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
542
            }
×
543
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
544
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
545
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
546
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
547
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
548
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
549
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
550
            return ok(jsonObjectBuilder);
×
UNCOV
551
        }, getRequestUser(crc));
×
552
    }
553

554
    @GET
555
    @AuthRequired
556
    @Path("{id}/dirindex")
557
    @Produces("text/html")
558
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
559

560
        folderName = folderName == null ? "" : folderName;
×
UNCOV
561
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
562
        
563
        DatasetVersion version;
564
        try {
565
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
566
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
567
        } catch (WrappedResponse wr) {
×
568
            return wr.getResponse();
×
UNCOV
569
        }
×
570
        
UNCOV
571
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
572
        
573
        // return "NOT FOUND" if there is no such folder in the dataset version:
574
        
575
        if ("".equals(output)) {
×
UNCOV
576
            return notFound("Folder " + folderName + " does not exist");
×
577
        }
578
        
579
        
580
        String indexFileName = folderName.equals("") ? ".index.html"
×
581
                : ".index-" + folderName.replace('/', '_') + ".html";
×
UNCOV
582
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
583

584
        
585
        return Response.ok()
×
UNCOV
586
                .entity(output)
×
587
                //.type("application/html").
UNCOV
588
                .build();
×
589
    }
590
    
591
    @GET
592
    @AuthRequired
593
    @Path("{id}/versions/{versionId}/metadata")
594
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
595
        return response( req -> ok(
×
596
                    jsonByBlocks(
×
597
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
UNCOV
598
                                .getDatasetFields())), getRequestUser(crc));
×
599
    }
600
    
601
    @GET
602
    @AuthRequired
603
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
604
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
605
                                            @PathParam("id") String datasetId,
606
                                            @PathParam("versionNumber") String versionNumber,
607
                                            @PathParam("block") String blockName,
608
                                            @Context UriInfo uriInfo,
609
                                            @Context HttpHeaders headers) {
610
        
611
        return response( req -> {
×
UNCOV
612
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
613
            
614
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
615
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
616
                if ( p.getKey().getName().equals(blockName) ) {
×
UNCOV
617
                    return ok(json(p.getKey(), p.getValue()));
×
618
                }
619
            }
×
620
            return notFound("metadata block named " + blockName + " not found");
×
UNCOV
621
        }, getRequestUser(crc));
×
622
    }
623

624
    /**
625
     * Add Signposting
626
     * @param datasetId
627
     * @param versionId
628
     * @param uriInfo
629
     * @param headers
630
     * @return
631
     */
632
    @GET
633
    @AuthRequired
634
    @Path("{id}/versions/{versionId}/linkset")
635
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
636
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
637
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
UNCOV
638
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
639
        }
UNCOV
640
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
641
        try {
642
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
643
            return Response
×
644
                    .ok(Json.createObjectBuilder()
×
UNCOV
645
                            .add("linkset",
×
646
                                    new SignpostingResources(systemConfig, dsv,
647
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
648
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
649
                                                    .getJsonLinkset())
×
650
                            .build())
×
651
                    .type(MediaType.APPLICATION_JSON).build();
×
652
        } catch (WrappedResponse wr) {
×
UNCOV
653
            return wr.getResponse();
×
654
        }
655
    }
656

657
    @GET
658
    @AuthRequired
659
    @Path("{id}/modifyRegistration")
660
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
661
        return response( req -> {
×
662
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
663
            return ok("Dataset " + id + " target url updated");
×
UNCOV
664
        }, getRequestUser(crc));
×
665
    }
666
    
667
    @POST
668
    @AuthRequired
669
    @Path("/modifyRegistrationAll")
670
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
671
        return response( req -> {
×
UNCOV
672
            datasetService.findAll().forEach( ds -> {
×
673
                try {
674
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
675
                } catch (WrappedResponse ex) {
×
676
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
677
                }
×
678
            });
×
679
            return ok("Update All Dataset target url completed");
×
UNCOV
680
        }, getRequestUser(crc));
×
681
    }
682
    
683
    @POST
684
    @AuthRequired
685
    @Path("{id}/modifyRegistrationMetadata")
686
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
687

688
        try {
689
            Dataset dataset = findDatasetOrDie(id);
×
690
            if (!dataset.isReleased()) {
×
UNCOV
691
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
692
            }
693
        } catch (WrappedResponse ex) {
×
694
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
UNCOV
695
        }
×
696

697
        return response(req -> {
×
698
            Dataset dataset = findDatasetOrDie(id);
×
699
            execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req));
×
700
            List<String> args = Arrays.asList(dataset.getIdentifier());
×
701
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
UNCOV
702
        }, getRequestUser(crc));
×
703
    }
704
    
705
    @GET
706
    @AuthRequired
707
    @Path("/modifyRegistrationPIDMetadataAll")
708
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
709
        return response( req -> {
×
UNCOV
710
            datasetService.findAll().forEach( ds -> {
×
711
                try {
712
                    logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier());
×
713
                    if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) {
×
714
                        if (ds.isReleased()) {
×
UNCOV
715
                            logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered");
×
716
                        }
717
                    } else {
UNCOV
718
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
719
                    }
720
                } catch (WrappedResponse ex) {
×
721
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
722
                }
×
723
            });
×
724
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
UNCOV
725
        }, getRequestUser(crc));
×
726
    }
727
  
728
    @PUT
729
    @AuthRequired
730
    @Path("{id}/versions/{versionId}")
731
    @Consumes(MediaType.APPLICATION_JSON)
732
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
733
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
UNCOV
734
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
735
        }
736
        
737
        try {
738
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
739
            Dataset ds = findDatasetOrDie(id);
×
740
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
UNCOV
741
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
742
            
743
            // clear possibly stale fields from the incoming dataset version.
744
            // creation and modification dates are updated by the commands.
745
            incomingVersion.setId(null);
×
746
            incomingVersion.setVersionNumber(null);
×
747
            incomingVersion.setMinorVersionNumber(null);
×
748
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
749
            incomingVersion.setDataset(ds);
×
750
            incomingVersion.setCreateTime(null);
×
UNCOV
751
            incomingVersion.setLastUpdateTime(null);
×
752
            
753
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
UNCOV
754
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
755
            }
756
            
UNCOV
757
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
758
            
759
            DatasetVersion managedVersion;
760
            if (updateDraft) {
×
761
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
762
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
763
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
764
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
765
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
766
                if (!hasValidTerms) {
×
UNCOV
767
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
768
                }
769
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
770
                managedVersion = managedDataset.getOrCreateEditVersion();
×
771
            } else {
×
772
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
773
                if (!hasValidTerms) {
×
UNCOV
774
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
775
                }
UNCOV
776
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
777
            }
UNCOV
778
            return ok( json(managedVersion, true) );
×
779
                    
780
        } catch (JsonParseException ex) {
×
781
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
UNCOV
782
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
783
            
784
        } catch (WrappedResponse ex) {
×
UNCOV
785
            return ex.getResponse();
×
786
            
787
        }
788
    }
789

790
    @GET
791
    @AuthRequired
792
    @Path("{id}/versions/{versionId}/metadata")
793
    @Produces("application/ld+json, application/json-ld")
794
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
795
        try {
796
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
797
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
798
            OREMap ore = new OREMap(dsv,
×
799
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
UNCOV
800
            return ok(ore.getOREMapBuilder(true));
×
801

802
        } catch (WrappedResponse ex) {
×
803
            ex.printStackTrace();
×
804
            return ex.getResponse();
×
805
        } catch (Exception jpe) {
×
806
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
807
            jpe.printStackTrace();
×
UNCOV
808
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
809
        }
810
    }
811

812
    @GET
813
    @AuthRequired
814
    @Path("{id}/metadata")
815
    @Produces("application/ld+json, application/json-ld")
816
    public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
UNCOV
817
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
818
    }
819

820
    @PUT
821
    @AuthRequired
822
    @Path("{id}/metadata")
823
    @Consumes("application/ld+json, application/json-ld")
824
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
825

826
        try {
827
            Dataset ds = findDatasetOrDie(id);
×
UNCOV
828
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
829
            //Get draft state as of now
830

UNCOV
831
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
832
            //Get the current draft or create a new version to update
833
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
834
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
835
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
836
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
837
            if (!hasValidTerms) {
×
UNCOV
838
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
839
            }
840
            DatasetVersion managedVersion;
841
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
842
            managedVersion = managedDataset.getLatestVersion();
×
843
            String info = updateDraft ? "Version Updated" : "Version Created";
×
UNCOV
844
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
845

846
        } catch (WrappedResponse ex) {
×
847
            return ex.getResponse();
×
848
        } catch (JsonParsingException jpe) {
×
849
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
UNCOV
850
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
851
        }
852
    }
853

854
    @PUT
855
    @AuthRequired
856
    @Path("{id}/metadata/delete")
857
    @Consumes("application/ld+json, application/json-ld")
858
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
859
        try {
860
            Dataset ds = findDatasetOrDie(id);
×
UNCOV
861
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
862
            //Get draft state as of now
863

UNCOV
864
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
865
            //Get the current draft or create a new version to update
866
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
867
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
UNCOV
868
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
869
            DatasetVersion managedVersion;
870
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
871
            managedVersion = managedDataset.getLatestVersion();
×
872
            String info = updateDraft ? "Version Updated" : "Version Created";
×
UNCOV
873
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
874

875
        } catch (WrappedResponse ex) {
×
876
            ex.printStackTrace();
×
877
            return ex.getResponse();
×
878
        } catch (JsonParsingException jpe) {
×
879
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
880
            jpe.printStackTrace();
×
UNCOV
881
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
882
        }
883
    }
884

885
    @PUT
886
    @AuthRequired
887
    @Path("{id}/deleteMetadata")
888
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
889

UNCOV
890
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
891

UNCOV
892
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
893
    }
894

895
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
896
        try {
897

898
            Dataset ds = findDatasetOrDie(id);
×
UNCOV
899
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
900
            //Get the current draft or create a new version to update
901
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
902
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
903
            List<DatasetField> fields = new LinkedList<>();
×
UNCOV
904
            DatasetField singleField = null;
×
905

906
            JsonArray fieldsJson = json.getJsonArray("fields");
×
907
            if (fieldsJson == null) {
×
908
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
UNCOV
909
                fields.add(singleField);
×
910
            } else {
UNCOV
911
                fields = jsonParser().parseMultipleFields(json);
×
912
            }
913

UNCOV
914
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
915

916
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
917
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
UNCOV
918
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
919

920
            for (DatasetField updateField : fields) {
×
921
                boolean found = false;
×
922
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
923
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
924
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
925
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
926
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
927
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
928
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
929
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
930
                                                found = true;
×
UNCOV
931
                                                controlledVocabularyItemsToRemove.add(existing);
×
932
                                            }
933
                                        }
×
934
                                        if (!found) {
×
935
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
UNCOV
936
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
937
                                        }
938
                                    }
×
939
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
940
                                        dsf.getControlledVocabularyValues().remove(remove);
×
UNCOV
941
                                    }
×
942

943
                                } else {
944
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
945
                                        found = true;
×
UNCOV
946
                                        dsf.setSingleControlledVocabularyValue(null);
×
947
                                    }
948

949
                                }
950
                            } else {
951
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
952
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
953
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
954
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
955
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
956
                                                    found = true;
×
UNCOV
957
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
958
                                                }
959
                                            }
×
960
                                            if (!found) {
×
961
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
UNCOV
962
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
963
                                            }
964
                                        }
×
965
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
966
                                            dsf.getDatasetFieldValues().remove(remove);
×
UNCOV
967
                                        });
×
968

969
                                    } else {
970
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
971
                                            found = true;
×
UNCOV
972
                                            dsf.setSingleValue(null);
×
973
                                        }
974

975
                                    }
976
                                } else {
977
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
978
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
979
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
980
                                            String existingString = getCompoundDisplayValue(existing);
×
981
                                            if (existingString.equals(deleteVal)) {
×
982
                                                found = true;
×
UNCOV
983
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
984
                                            }
985
                                        }
×
986
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
987
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
988
                                        });
×
989
                                        if (!found) {
×
990
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
UNCOV
991
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
992
                                        }
UNCOV
993
                                    }
×
994
                                }
995
                            }
996
                        } else {
997
                            found = true;
×
998
                            dsf.setSingleValue(null);
×
UNCOV
999
                            dsf.setSingleControlledVocabularyValue(null);
×
1000
                        }
UNCOV
1001
                        break;
×
1002
                    }
1003
                }
×
1004
                if (!found){
×
1005
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
1006
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
UNCOV
1007
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1008
                }
UNCOV
1009
            }
×
1010

1011

1012
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
UNCOV
1013
            return ok(json(managedVersion, true));
×
1014

1015
        } catch (JsonParseException ex) {
×
1016
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
UNCOV
1017
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1018

1019
        } catch (WrappedResponse ex) {
×
1020
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
UNCOV
1021
            return ex.getResponse();
×
1022

1023
        }
1024
    
1025
    }
1026
    
1027
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
1028
        String returnString = "";
×
1029
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
1030
            for (String value : dsf.getValues()) {
×
1031
                if (!(value == null)) {
×
UNCOV
1032
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1033
                }
1034
            }
×
1035
        }
×
UNCOV
1036
        return returnString;
×
1037
    }
1038
    
1039
    @PUT
1040
    @AuthRequired
1041
    @Path("{id}/editMetadata")
1042
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1043

1044
        Boolean replaceData = replace != null;
×
1045
        DataverseRequest req = null;
×
UNCOV
1046
        req = createDataverseRequest(getRequestUser(crc));
×
1047

UNCOV
1048
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1049
    }
1050
    
1051
    
1052
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1053
        try {
1054
           
1055
            Dataset ds = findDatasetOrDie(id);
×
UNCOV
1056
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1057
            //Get the current draft or create a new version to update
1058
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1059
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1060
            List<DatasetField> fields = new LinkedList<>();
×
UNCOV
1061
            DatasetField singleField = null;
×
1062
            
1063
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1064
            if (fieldsJson == null) {
×
1065
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
UNCOV
1066
                fields.add(singleField);
×
1067
            } else {
UNCOV
1068
                fields = jsonParser().parseMultipleFields(json);
×
1069
            }
1070
            
1071

UNCOV
1072
            String valdationErrors = validateDatasetFieldValues(fields);
×
1073

1074
            if (!valdationErrors.isEmpty()) {
×
1075
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
UNCOV
1076
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1077
            }
1078

UNCOV
1079
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1080

1081
            //loop through the update fields     
1082
            // and compare to the version fields  
1083
            //if exist add/replace values
1084
            //if not add entire dsf
1085
            for (DatasetField updateField : fields) {
×
1086
                boolean found = false;
×
1087
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1088
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1089
                        found = true;
×
1090
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1091
                            List priorCVV = new ArrayList<>();
×
UNCOV
1092
                            String cvvDisplay = "";
×
1093

1094
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1095
                                cvvDisplay = dsf.getDisplayValue();
×
1096
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1097
                                    priorCVV.add(cvvOld);
×
UNCOV
1098
                                }
×
1099
                            }
1100

1101
                            if (replaceData) {
×
1102
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1103
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1104
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1105
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1106
                                    priorCVV.clear();
×
UNCOV
1107
                                    dsf.getControlledVocabularyValues().clear();
×
1108
                                } else {
1109
                                    dsf.setSingleValue("");
×
UNCOV
1110
                                    dsf.setSingleControlledVocabularyValue(null);
×
1111
                                }
UNCOV
1112
                              cvvDisplay="";
×
1113
                            }
1114
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1115
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1116
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1117
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
UNCOV
1118
                                            priorCVV.add(cvv);
×
1119
                                        }
1120
                                    }
×
UNCOV
1121
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1122
                                } else {
UNCOV
1123
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1124
                                }
1125
                            } else {
1126
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1127
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1128
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1129
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1130
                                                dfv.setDatasetField(dsf);
×
UNCOV
1131
                                                dsf.getDatasetFieldValues().add(dfv);
×
1132
                                            }
UNCOV
1133
                                        }
×
1134
                                    } else {
UNCOV
1135
                                        dsf.setSingleValue(updateField.getValue());
×
1136
                                    }
1137
                                } else {
1138
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1139
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1140
                                            dfcv.setParentDatasetField(dsf);
×
1141
                                            dsf.setDatasetVersion(dsv);
×
UNCOV
1142
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1143
                                        }
UNCOV
1144
                                    }
×
1145
                                }
1146
                            }
1147
                        } else {
×
1148
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
UNCOV
1149
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1150
                            }
1151
                        }
1152
                        break;
1153
                    }
1154
                }
×
1155
                if (!found) {
×
1156
                    updateField.setDatasetVersion(dsv);
×
UNCOV
1157
                    dsv.getDatasetFields().add(updateField);
×
1158
                }
1159
            }
×
UNCOV
1160
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1161

UNCOV
1162
            return ok(json(managedVersion, true));
×
1163

1164
        } catch (JsonParseException ex) {
×
1165
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
UNCOV
1166
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1167

1168
        } catch (WrappedResponse ex) {
×
1169
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
UNCOV
1170
            return ex.getResponse();
×
1171

1172
        }
1173
    }
1174
    
1175
    private String validateDatasetFieldValues(List<DatasetField> fields) {
UNCOV
1176
        StringBuilder error = new StringBuilder();
×
1177

1178
        for (DatasetField dsf : fields) {
×
1179
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1180
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1181
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1182
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) {
×
UNCOV
1183
                error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1184
            }
UNCOV
1185
        }
×
1186

1187
        if (!error.toString().isEmpty()) {
×
UNCOV
1188
            return (error.toString());
×
1189
        }
UNCOV
1190
        return "";
×
1191
    }
1192
    
1193
    /**
1194
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1195
     */
1196
    @GET
1197
    @AuthRequired
1198
    @Path("{id}/actions/:publish")
1199
    @Deprecated
1200
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1201
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
UNCOV
1202
        return publishDataset(crc, id, type, false);
×
1203
    }
1204

1205
    @POST
1206
    @AuthRequired
1207
    @Path("{id}/actions/:publish")
1208
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1209
        try {
1210
            if (type == null) {
×
UNCOV
1211
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1212
            }
1213
            boolean updateCurrent=false;
×
1214
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1215
            type = type.toLowerCase();
×
1216
            boolean isMinor=false;
×
UNCOV
1217
            switch (type) {
×
1218
                case "minor":
1219
                    isMinor = true;
×
UNCOV
1220
                    break;
×
1221
                case "major":
1222
                    isMinor = false;
×
UNCOV
1223
                    break;
×
1224
                case "updatecurrent":
1225
                    if (user.isSuperuser()) {
×
UNCOV
1226
                        updateCurrent = true;
×
1227
                    } else {
UNCOV
1228
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1229
                    }
1230
                    break;
1231
                default:
UNCOV
1232
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1233
            }
1234

UNCOV
1235
            Dataset ds = findDatasetOrDie(id);
×
1236
            
1237
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1238
            if (!hasValidTerms) {
×
UNCOV
1239
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1240
            }
1241
            
1242
            if (mustBeIndexed) {
×
1243
                logger.fine("IT: " + ds.getIndexTime());
×
1244
                logger.fine("MT: " + ds.getModificationTime());
×
1245
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1246
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1247
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
UNCOV
1248
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1249
                }
1250
                /*
1251
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1252
                 * modification or permission modification times. The checks here are trying to
1253
                 * see if indexing or permissionindexing could be pending, so they check to see
1254
                 * if the relevant modification time is set and if so, whether the index is also
1255
                 * set and if so, if it after the modification time. If the modification time is
1256
                 * set and the index time is null or is before the mod time, the 409/conflict
1257
                 * error is returned.
1258
                 *
1259
                 */
1260
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1261
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
UNCOV
1262
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1263
                }
1264
            }
UNCOV
1265
            if (updateCurrent) {
×
1266
                /*
1267
                 * Note: The code here mirrors that in the
1268
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1269
                 * to the core logic (i.e. beyond updating the messaging about results) should
1270
                 * be applied to the code there as well.
1271
                 */
1272
                String errorMsg = null;
×
UNCOV
1273
                String successMsg = null;
×
1274
                try {
1275
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1276
                    ds = commandEngine.submit(cmd);
×
UNCOV
1277
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1278

1279
                    // If configured, update archive copy as well
1280
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1281
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1282
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
UNCOV
1283
                    if (archiveCommand != null) {
×
1284
                        // Delete the record of any existing copy since it is now out of date/incorrect
UNCOV
1285
                        updateVersion.setArchivalCopyLocation(null);
×
1286
                        /*
1287
                         * Then try to generate and submit an archival copy. Note that running this
1288
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1289
                         * "The attribute [id] of class
1290
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1291
                         * key column in the database. Updates are not allowed." To avoid that, and to
1292
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1293
                         * pulled this out as a separate submit().
1294
                         */
1295
                        try {
1296
                            updateVersion = commandEngine.submit(archiveCommand);
×
1297
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
UNCOV
1298
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1299
                            } else {
UNCOV
1300
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1301
                            }
1302
                        } catch (CommandException ex) {
×
1303
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1304
                            logger.severe(ex.getMessage());
×
UNCOV
1305
                        }
×
1306
                    }
1307
                } catch (CommandException ex) {
×
1308
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1309
                    logger.severe(ex.getMessage());
×
1310
                }
×
1311
                if (errorMsg != null) {
×
UNCOV
1312
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1313
                } else {
1314
                    return Response.ok(Json.createObjectBuilder()
×
1315
                            .add("status", ApiConstants.STATUS_OK)
×
1316
                            .add("status_details", successMsg)
×
1317
                            .add("data", json(ds)).build())
×
1318
                            .type(MediaType.APPLICATION_JSON)
×
UNCOV
1319
                            .build();
×
1320
                }
1321
            } else {
1322
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
UNCOV
1323
                        createDataverseRequest(user),
×
1324
                        isMinor));
UNCOV
1325
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1326
            }
1327
        } catch (WrappedResponse ex) {
×
UNCOV
1328
            return ex.getResponse();
×
1329
        }
1330
    }
1331

1332
    @POST
1333
    @AuthRequired
1334
    @Path("{id}/actions/:releasemigrated")
1335
    @Consumes("application/ld+json, application/json-ld")
1336
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1337
        try {
1338
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1339
            if (!user.isSuperuser()) {
×
UNCOV
1340
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1341
            }
1342

UNCOV
1343
            Dataset ds = findDatasetOrDie(id);
×
1344
            try {
1345
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1346
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1347
                logger.fine("Submitted date: " + pubDate);
×
1348
                LocalDateTime dateTime = null;
×
1349
                if(!StringUtils.isEmpty(pubDate)) {
×
1350
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
UNCOV
1351
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1352
                    //Set version release date
UNCOV
1353
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1354
                }
1355
                // dataset.getPublicationDateFormattedYYYYMMDD())
1356
                // Assign a version number if not set
UNCOV
1357
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1358

UNCOV
1359
                    if (ds.getVersions().size() == 1) {
×
1360
                        // First Release
1361
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1362
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1363
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1364
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
UNCOV
1365
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1366
                    } else {
1367
                        // major, non-first release
1368
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
UNCOV
1369
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1370
                    }
1371
                }
UNCOV
1372
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1373
                    //Also set publication date if this is the first
1374
                    if(dateTime != null) {
×
UNCOV
1375
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1376
                    }
1377
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1378
                    // is null, so set it here.
UNCOV
1379
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1380
                }
1381
            } catch (Exception e) {
×
1382
                logger.fine(e.getMessage());
×
1383
                throw new BadRequestException("Unable to set publication date ("
×
1384
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
UNCOV
1385
            }
×
1386
            /*
1387
             * Note: The code here mirrors that in the
1388
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1389
             * to the core logic (i.e. beyond updating the messaging about results) should
1390
             * be applied to the code there as well.
1391
             */
1392
            String errorMsg = null;
×
UNCOV
1393
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1394

1395
            try {
1396
                // ToDo - should this be in onSuccess()? May relate to todo above
UNCOV
1397
                if (prePubWf.isPresent()) {
×
1398
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1399
                    wfService.start(prePubWf.get(),
×
UNCOV
1400
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1401
                            false);
1402
                } else {
1403
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1404
                            createDataverseRequest(user), !contactPIDProvider);
×
UNCOV
1405
                    ds = commandEngine.submit(cmd);
×
1406
                }
1407
            } catch (CommandException ex) {
×
1408
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1409
                logger.severe(ex.getMessage());
×
UNCOV
1410
            }
×
1411

1412
            if (errorMsg != null) {
×
UNCOV
1413
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1414
            } else {
UNCOV
1415
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1416
            }
1417

1418
        } catch (WrappedResponse ex) {
×
UNCOV
1419
            return ex.getResponse();
×
1420
        }
1421
    }
1422

1423
    @POST
1424
    @AuthRequired
1425
    @Path("{id}/move/{targetDataverseAlias}")
1426
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1427
        try {
1428
            User u = getRequestUser(crc);
×
1429
            Dataset ds = findDatasetOrDie(id);
×
1430
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1431
            if (target == null) {
×
UNCOV
1432
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1433
            }
1434
            //Command requires Super user - it will be tested by the command
1435
            execCommand(new MoveDatasetCommand(
×
UNCOV
1436
                    createDataverseRequest(u), ds, target, force
×
1437
            ));
1438
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1439
        } catch (WrappedResponse ex) {
×
1440
            if (ex.getCause() instanceof UnforcedCommandException) {
×
UNCOV
1441
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1442
            } else {
UNCOV
1443
                return ex.getResponse();
×
1444
            }
1445
        }
1446
    }
1447

1448
    @POST
1449
    @AuthRequired
1450
    @Path("{id}/files/actions/:set-embargo")
1451
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1452

1453
        // user is authenticated
UNCOV
1454
        AuthenticatedUser authenticatedUser = null;
×
1455
        try {
1456
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1457
        } catch (WrappedResponse ex) {
×
1458
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
UNCOV
1459
        }
×
1460

1461
        Dataset dataset;
1462
        try {
1463
            dataset = findDatasetOrDie(id);
×
1464
        } catch (WrappedResponse ex) {
×
1465
            return ex.getResponse();
×
UNCOV
1466
        }
×
1467
        
UNCOV
1468
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1469
        
1470
        if (!hasValidTerms){
×
UNCOV
1471
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1472
        }
1473

1474
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1475
        /*
1476
         * This is only a pre-test - if there's no draft version, there are clearly no
1477
         * files that a normal user can change. The converse is not true. A draft
1478
         * version could contain only files that have already been released. Further, we
1479
         * haven't checked the file list yet so the user could still be trying to change
1480
         * released files even if there are some unreleased/draft-only files. Doing this
1481
         * check here does avoid having to do further parsing for some error cases. It
1482
         * also checks the user can edit this dataset, so we don't have to make that
1483
         * check later.
1484
         */
1485

1486
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
UNCOV
1487
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1488
        }
1489

1490
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
UNCOV
1491
        long maxEmbargoDurationInMonths = 0;
×
1492
        try {
1493
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1494
        } catch (NumberFormatException nfe){
×
1495
            if (nfe.getMessage().contains("null")) {
×
UNCOV
1496
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1497
            }
1498
        }
×
1499
        if (maxEmbargoDurationInMonths == 0){
×
UNCOV
1500
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1501
        }
1502

UNCOV
1503
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1504

UNCOV
1505
        Embargo embargo = new Embargo();
×
1506

1507

1508
        LocalDate currentDateTime = LocalDate.now();
×
UNCOV
1509
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1510

1511
        // check :MaxEmbargoDurationInMonths if -1
UNCOV
1512
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1513
        // dateAvailable is not in the past
1514
        if (dateAvailable.isAfter(currentDateTime)){
×
UNCOV
1515
            embargo.setDateAvailable(dateAvailable);
×
1516
        } else {
UNCOV
1517
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1518
        }
1519

1520
        // dateAvailable is within limits
1521
        if (maxEmbargoDateTime != null){
×
1522
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
UNCOV
1523
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1524
            }
1525
        }
1526

UNCOV
1527
        embargo.setReason(json.getString("reason"));
×
1528

1529
        List<DataFile> datasetFiles = dataset.getFiles();
×
UNCOV
1530
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1531

1532
        // extract fileIds from json, find datafiles and add to list
1533
        if (json.containsKey("fileIds")){
×
1534
            JsonArray fileIds = json.getJsonArray("fileIds");
×
UNCOV
1535
            for (JsonValue jsv : fileIds) {
×
1536
                try {
1537
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1538
                    filesToEmbargo.add(dataFile);
×
1539
                } catch (WrappedResponse ex) {
×
1540
                    return ex.getResponse();
×
1541
                }
×
UNCOV
1542
            }
×
1543
        }
1544

UNCOV
1545
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1546
        // check if files belong to dataset
1547
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1548
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1549
            boolean badFiles = false;
×
UNCOV
1550
            for (DataFile datafile : filesToEmbargo) {
×
1551
                // superuser can overrule an existing embargo, even on released files
1552
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1553
                    restrictedFiles.add(datafile.getId());
×
UNCOV
1554
                    badFiles = true;
×
1555
                }
1556
            }
×
1557
            if (badFiles) {
×
1558
                return Response.status(Status.FORBIDDEN)
×
1559
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1560
                                .add("message", "You do not have permission to embargo the following files")
×
1561
                                .add("files", restrictedFiles).build())
×
UNCOV
1562
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1563
            }
UNCOV
1564
            embargo=embargoService.merge(embargo);
×
1565
            // Good request, so add the embargo. Track any existing embargoes so we can
1566
            // delete them if there are no files left that reference them.
1567
            for (DataFile datafile : filesToEmbargo) {
×
1568
                Embargo emb = datafile.getEmbargo();
×
1569
                if (emb != null) {
×
1570
                    emb.getDataFiles().remove(datafile);
×
1571
                    if (emb.getDataFiles().isEmpty()) {
×
UNCOV
1572
                        orphanedEmbargoes.add(emb);
×
1573
                    }
1574
                }
1575
                // Save merges the datafile with an embargo into the context
1576
                datafile.setEmbargo(embargo);
×
1577
                fileService.save(datafile);
×
UNCOV
1578
            }
×
1579
            //Call service to get action logged
1580
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1581
            if (orphanedEmbargoes.size() > 0) {
×
1582
                for (Embargo emb : orphanedEmbargoes) {
×
1583
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
UNCOV
1584
                }
×
1585
            }
1586
            //If superuser, report changes to any released files
1587
            if (authenticatedUser.isSuperuser()) {
×
1588
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1589
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1590
                if (!releasedFiles.isBlank()) {
×
1591
                    actionLogSvc
×
1592
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
UNCOV
1593
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1594
                                            + releasedFiles + ".")
UNCOV
1595
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1596
                }
1597
            }
UNCOV
1598
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1599
        } else {
UNCOV
1600
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1601
        }
1602
    }
1603

1604
    @POST
1605
    @AuthRequired
1606
    @Path("{id}/files/actions/:unset-embargo")
1607
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1608

1609
        // user is authenticated
UNCOV
1610
        AuthenticatedUser authenticatedUser = null;
×
1611
        try {
1612
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1613
        } catch (WrappedResponse ex) {
×
1614
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
UNCOV
1615
        }
×
1616

1617
        Dataset dataset;
1618
        try {
1619
            dataset = findDatasetOrDie(id);
×
1620
        } catch (WrappedResponse ex) {
×
1621
            return ex.getResponse();
×
UNCOV
1622
        }
×
1623

1624
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1625
        // check if files are unreleased(DRAFT?)
1626
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1627
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
UNCOV
1628
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1629
        }
1630

1631
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1632
        //Todo - is 400 right for embargoes not enabled
1633
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
UNCOV
1634
        int maxEmbargoDurationInMonths = 0;
×
1635
        try {
1636
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1637
        } catch (NumberFormatException nfe){
×
1638
            if (nfe.getMessage().contains("null")) {
×
UNCOV
1639
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1640
            }
1641
        }
×
1642
        if (maxEmbargoDurationInMonths == 0){
×
UNCOV
1643
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1644
        }
1645

UNCOV
1646
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1647

1648
        List<DataFile> datasetFiles = dataset.getFiles();
×
UNCOV
1649
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1650

1651
        // extract fileIds from json, find datafiles and add to list
1652
        if (json.containsKey("fileIds")){
×
1653
            JsonArray fileIds = json.getJsonArray("fileIds");
×
UNCOV
1654
            for (JsonValue jsv : fileIds) {
×
1655
                try {
1656
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1657
                    embargoFilesToUnset.add(dataFile);
×
1658
                } catch (WrappedResponse ex) {
×
1659
                    return ex.getResponse();
×
1660
                }
×
UNCOV
1661
            }
×
1662
        }
1663

UNCOV
1664
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1665
        // check if files belong to dataset
1666
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1667
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1668
            boolean badFiles = false;
×
UNCOV
1669
            for (DataFile datafile : embargoFilesToUnset) {
×
1670
                // superuser can overrule an existing embargo, even on released files
1671
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1672
                    restrictedFiles.add(datafile.getId());
×
UNCOV
1673
                    badFiles = true;
×
1674
                }
1675
            }
×
1676
            if (badFiles) {
×
1677
                return Response.status(Status.FORBIDDEN)
×
1678
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1679
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1680
                                .add("files", restrictedFiles).build())
×
UNCOV
1681
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1682
            }
1683
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1684
            // delete them if there are no files left that reference them.
1685
            for (DataFile datafile : embargoFilesToUnset) {
×
1686
                Embargo emb = datafile.getEmbargo();
×
1687
                if (emb != null) {
×
1688
                    emb.getDataFiles().remove(datafile);
×
1689
                    if (emb.getDataFiles().isEmpty()) {
×
UNCOV
1690
                        orphanedEmbargoes.add(emb);
×
1691
                    }
1692
                }
1693
                // Save merges the datafile with an embargo into the context
1694
                datafile.setEmbargo(null);
×
1695
                fileService.save(datafile);
×
1696
            }
×
1697
            if (orphanedEmbargoes.size() > 0) {
×
1698
                for (Embargo emb : orphanedEmbargoes) {
×
1699
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
UNCOV
1700
                }
×
1701
            }
1702
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1703
            if(!releasedFiles.isBlank()) {
×
1704
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1705
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
UNCOV
1706
                actionLogSvc.log(removeRecord);
×
1707
            }
UNCOV
1708
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1709
        } else {
UNCOV
1710
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1711
        }
1712
    }
1713

1714
    @POST
1715
    @AuthRequired
1716
    @Path("{id}/files/actions/:set-retention")
1717
    public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1718

1719
        // user is authenticated
UNCOV
1720
        AuthenticatedUser authenticatedUser = null;
×
1721
        try {
1722
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1723
        } catch (WrappedResponse ex) {
×
1724
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
UNCOV
1725
        }
×
1726

1727
        Dataset dataset;
1728
        try {
1729
            dataset = findDatasetOrDie(id);
×
1730
        } catch (WrappedResponse ex) {
×
1731
            return ex.getResponse();
×
UNCOV
1732
        }
×
1733

UNCOV
1734
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1735

1736
        if (!hasValidTerms){
×
UNCOV
1737
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1738
        }
1739

1740
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1741
        // check if files are unreleased(DRAFT?)
1742
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
UNCOV
1743
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1744
        }
1745

1746
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
UNCOV
1747
        long minRetentionDurationInMonths = 0;
×
1748
        try {
1749
            minRetentionDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1750
        } catch (NumberFormatException nfe){
×
1751
            if (nfe.getMessage().contains("null")) {
×
UNCOV
1752
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1753
            }
1754
        }
×
1755
        if (minRetentionDurationInMonths == 0){
×
UNCOV
1756
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1757
        }
1758

1759
        JsonObject json;
1760
        try {
1761
            json = JsonUtil.getJsonObject(jsonBody);
×
1762
        } catch (JsonException ex) {
×
1763
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
UNCOV
1764
        }
×
1765

UNCOV
1766
        Retention retention = new Retention();
×
1767

1768

UNCOV
1769
        LocalDate currentDateTime = LocalDate.now();
×
1770

1771
        // Extract the dateUnavailable - check if specified and valid
UNCOV
1772
        String dateUnavailableStr = "";
×
1773
        LocalDate dateUnavailable;
1774
        try {
1775
            dateUnavailableStr = json.getString("dateUnavailable");
×
1776
            dateUnavailable = LocalDate.parse(dateUnavailableStr);
×
1777
        } catch (NullPointerException npex) {
×
1778
            return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified");
×
1779
        } catch (ClassCastException ccex) {
×
1780
            return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string");
×
1781
        } catch (DateTimeParseException dtpex) {
×
1782
            return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr);
×
UNCOV
1783
        }
×
1784

1785
        // check :MinRetentionDurationInMonths if -1
UNCOV
1786
        LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null;
×
1787
        // dateUnavailable is not in the past
1788
        if (dateUnavailable.isAfter(currentDateTime)){
×
UNCOV
1789
            retention.setDateUnavailable(dateUnavailable);
×
1790
        } else {
UNCOV
1791
            return error(Status.BAD_REQUEST, "Date unavailable can not be in the past");
×
1792
        }
1793

1794
        // dateAvailable is within limits
1795
        if (minRetentionDateTime != null){
×
1796
            if (dateUnavailable.isBefore(minRetentionDateTime)){
×
UNCOV
1797
                return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
×
1798
            }
1799
        }
1800
        
1801
        try {
1802
            String reason = json.getString("reason");
×
1803
            retention.setReason(reason);
×
UNCOV
1804
        } catch (NullPointerException npex) {
×
1805
            // ignoring; no reason specified is OK, it is optional
1806
        } catch (ClassCastException ccex) {
×
1807
            return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string");
×
UNCOV
1808
        }
×
1809

1810

1811
        List<DataFile> datasetFiles = dataset.getFiles();
×
UNCOV
1812
        List<DataFile> filesToRetention = new LinkedList<>();
×
1813

1814
        // extract fileIds from json, find datafiles and add to list
UNCOV
1815
        if (json.containsKey("fileIds")){
×
1816
            try {
1817
                JsonArray fileIds = json.getJsonArray("fileIds");
×
UNCOV
1818
                for (JsonValue jsv : fileIds) {
×
1819
                    try {
1820
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1821
                        filesToRetention.add(dataFile);
×
1822
                    } catch (WrappedResponse ex) {
×
1823
                        return ex.getResponse();
×
1824
                    }
×
1825
                }
×
1826
            } catch (ClassCastException ccex) {
×
1827
                return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings");
×
1828
            } catch (NullPointerException npex) {
×
1829
                return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified");
×
UNCOV
1830
            }
×
1831
        } else {
UNCOV
1832
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1833
        }
1834

UNCOV
1835
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1836
        // check if files belong to dataset
1837
        if (datasetFiles.containsAll(filesToRetention)) {
×
1838
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1839
            boolean badFiles = false;
×
UNCOV
1840
            for (DataFile datafile : filesToRetention) {
×
1841
                // superuser can overrule an existing retention, even on released files
1842
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1843
                    restrictedFiles.add(datafile.getId());
×
UNCOV
1844
                    badFiles = true;
×
1845
                }
1846
            }
×
1847
            if (badFiles) {
×
1848
                return Response.status(Status.FORBIDDEN)
×
1849
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1850
                                .add("message", "You do not have permission to set a retention period for the following files")
×
1851
                                .add("files", restrictedFiles).build())
×
UNCOV
1852
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1853
            }
UNCOV
1854
            retention=retentionService.merge(retention);
×
1855
            // Good request, so add the retention. Track any existing retentions so we can
1856
            // delete them if there are no files left that reference them.
1857
            for (DataFile datafile : filesToRetention) {
×
1858
                Retention ret = datafile.getRetention();
×
1859
                if (ret != null) {
×
1860
                    ret.getDataFiles().remove(datafile);
×
1861
                    if (ret.getDataFiles().isEmpty()) {
×
UNCOV
1862
                        orphanedRetentions.add(ret);
×
1863
                    }
1864
                }
1865
                // Save merges the datafile with an retention into the context
1866
                datafile.setRetention(retention);
×
1867
                fileService.save(datafile);
×
UNCOV
1868
            }
×
1869
            //Call service to get action logged
1870
            long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier());
×
1871
            if (orphanedRetentions.size() > 0) {
×
1872
                for (Retention ret : orphanedRetentions) {
×
1873
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
UNCOV
1874
                }
×
1875
            }
1876
            //If superuser, report changes to any released files
1877
            if (authenticatedUser.isSuperuser()) {
×
1878
                String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased())
×
1879
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1880
                if (!releasedFiles.isBlank()) {
×
1881
                    actionLogSvc
×
1882
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo")
×
UNCOV
1883
                                    .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) "
×
1884
                                            + releasedFiles + ".")
UNCOV
1885
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1886
                }
1887
            }
UNCOV
1888
            return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated"));
×
1889
        } else {
UNCOV
1890
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1891
        }
1892
    }
1893

1894
    @POST
1895
    @AuthRequired
1896
    @Path("{id}/files/actions/:unset-retention")
1897
    public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1898

1899
        // user is authenticated
UNCOV
1900
        AuthenticatedUser authenticatedUser = null;
×
1901
        try {
1902
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1903
        } catch (WrappedResponse ex) {
×
1904
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
UNCOV
1905
        }
×
1906

1907
        Dataset dataset;
1908
        try {
1909
            dataset = findDatasetOrDie(id);
×
1910
        } catch (WrappedResponse ex) {
×
1911
            return ex.getResponse();
×
UNCOV
1912
        }
×
1913

1914
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1915
        // check if files are unreleased(DRAFT?)
1916
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1917
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
UNCOV
1918
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1919
        }
1920

1921
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
UNCOV
1922
        int minRetentionDurationInMonths = 0;
×
1923
        try {
1924
            minRetentionDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1925
        } catch (NumberFormatException nfe){
×
1926
            if (nfe.getMessage().contains("null")) {
×
UNCOV
1927
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1928
            }
1929
        }
×
1930
        if (minRetentionDurationInMonths == 0){
×
UNCOV
1931
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1932
        }
1933

1934
        JsonObject json;
1935
        try {
1936
            json = JsonUtil.getJsonObject(jsonBody);
×
1937
        } catch (JsonException ex) {
×
1938
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
UNCOV
1939
        }
×
1940

1941
        List<DataFile> datasetFiles = dataset.getFiles();
×
UNCOV
1942
        List<DataFile> retentionFilesToUnset = new LinkedList<>();
×
1943

1944
        // extract fileIds from json, find datafiles and add to list
UNCOV
1945
        if (json.containsKey("fileIds")){
×
1946
            try {
1947
                JsonArray fileIds = json.getJsonArray("fileIds");
×
UNCOV
1948
                for (JsonValue jsv : fileIds) {
×
1949
                    try {
1950
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1951
                        retentionFilesToUnset.add(dataFile);
×
1952
                    } catch (WrappedResponse ex) {
×
1953
                        return ex.getResponse();
×
1954
                    }
×
1955
                }
×
1956
            } catch (ClassCastException ccex) {
×
1957
                return error(Status.BAD_REQUEST, "fileIds must be an array of id strings");
×
1958
            } catch (NullPointerException npex) {
×
1959
                return error(Status.BAD_REQUEST, "No fileIds specified");
×
UNCOV
1960
            }
×
1961
        } else {
UNCOV
1962
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1963
        }
1964

UNCOV
1965
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1966
        // check if files belong to dataset
1967
        if (datasetFiles.containsAll(retentionFilesToUnset)) {
×
1968
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1969
            boolean badFiles = false;
×
UNCOV
1970
            for (DataFile datafile : retentionFilesToUnset) {
×
1971
                // superuser can overrule an existing retention, even on released files
1972
                if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) {
×
1973
                    restrictedFiles.add(datafile.getId());
×
UNCOV
1974
                    badFiles = true;
×
1975
                }
1976
            }
×
1977
            if (badFiles) {
×
1978
                return Response.status(Status.FORBIDDEN)
×
1979
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1980
                                .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods")
×
1981
                                .add("files", restrictedFiles).build())
×
UNCOV
1982
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1983
            }
1984
            // Good request, so remove the retention from the files. Track any existing retentions so we can
1985
            // delete them if there are no files left that reference them.
1986
            for (DataFile datafile : retentionFilesToUnset) {
×
1987
                Retention ret = datafile.getRetention();
×
1988
                if (ret != null) {
×
1989
                    ret.getDataFiles().remove(datafile);
×
1990
                    if (ret.getDataFiles().isEmpty()) {
×
UNCOV
1991
                        orphanedRetentions.add(ret);
×
1992
                    }
1993
                }
1994
                // Save merges the datafile with an retention into the context
1995
                datafile.setRetention(null);
×
1996
                fileService.save(datafile);
×
1997
            }
×
1998
            if (orphanedRetentions.size() > 0) {
×
1999
                for (Retention ret : orphanedRetentions) {
×
2000
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
UNCOV
2001
                }
×
2002
            }
2003
            String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
2004
            if(!releasedFiles.isBlank()) {
×
2005
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + ".");
×
2006
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
UNCOV
2007
                actionLogSvc.log(removeRecord);
×
2008
            }
UNCOV
2009
            return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)"));
×
2010
        } else {
UNCOV
2011
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
2012
        }
2013
    }
2014

2015
    @PUT
2016
    @AuthRequired
2017
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
2018
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
2019
        try {
2020
            User u = getRequestUser(crc);
×
2021
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
2022
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
2023
            if (linked == null){
×
UNCOV
2024
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
2025
            }
2026
            if (linking == null) {
×
UNCOV
2027
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
2028
            }
2029
            execCommand(new LinkDatasetCommand(
×
UNCOV
2030
                    createDataverseRequest(u), linking, linked
×
2031
            ));
2032
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
2033
        } catch (WrappedResponse ex) {
×
UNCOV
2034
            return ex.getResponse();
×
2035
        }
2036
    }
2037

2038
    @GET
2039
    @Path("{id}/versions/{versionId}/customlicense")
2040
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
2041
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
UNCOV
2042
        User user = session.getUser();
×
2043
        String persistentId;
2044
        try {
2045
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
UNCOV
2046
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
2047
            }
2048
            persistentId = getRequestParameter(":persistentId".substring(1));
×
2049
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
UNCOV
2050
                versionId = "DRAFT";
×
2051
            }
2052
        } catch (WrappedResponse wrappedResponse) {
×
2053
            return wrappedResponse.getResponse();
×
2054
        }
×
2055
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
UNCOV
2056
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
2057
    }
2058

2059

2060
    @GET
2061
    @AuthRequired
2062
    @Path("{id}/links")
2063
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
2064
        try {
2065
            User u = getRequestUser(crc);
×
2066
            if (!u.isSuperuser()) {
×
UNCOV
2067
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
2068
            }
UNCOV
2069
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2070

2071
            long datasetId = dataset.getId();
×
2072
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
2073
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
2074
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
2075
                dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
×
2076
            }
×
2077
            JsonObjectBuilder response = Json.createObjectBuilder();
×
2078
            response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
×
2079
            return ok(response);
×
2080
        } catch (WrappedResponse wr) {
×
UNCOV
2081
            return wr.getResponse();
×
2082
        }
2083
    }
2084

2085
    /**
2086
     * Add a given assignment to a given user or group
2087
     * @param ra     role assignment DTO
2088
     * @param id     dataset id
2089
     * @param apiKey
2090
     */
2091
    @POST
2092
    @AuthRequired
2093
    @Path("{identifier}/assignments")
2094
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
2095
        try {
UNCOV
2096
            Dataset dataset = findDatasetOrDie(id);
×
2097
            
2098
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
2099
            if (assignee == null) {
×
UNCOV
2100
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
2101
            }
2102
            
2103
            DataverseRole theRole;
2104
            Dataverse dv = dataset.getOwner();
×
2105
            theRole = null;
×
2106
            while ((theRole == null) && (dv != null)) {
×
2107
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
2108
                    if (aRole.getAlias().equals(ra.getRole())) {
×
2109
                        theRole = aRole;
×
UNCOV
2110
                        break;
×
2111
                    }
2112
                }
×
UNCOV
2113
                dv = dv.getOwner();
×
2114
            }
2115
            if (theRole == null) {
×
2116
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
UNCOV
2117
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
2118
            }
2119

2120
            String privateUrlToken = null;
×
2121
            return ok(
×
2122
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
2123
        } catch (WrappedResponse ex) {
×
2124
            List<String> args = Arrays.asList(ex.getMessage());
×
2125
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
UNCOV
2126
            return ex.getResponse();
×
2127
        }
2128

2129
    }
2130
    
2131
    @DELETE
2132
    @AuthRequired
2133
    @Path("{identifier}/assignments/{id}")
2134
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
2135
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
UNCOV
2136
        if (ra != null) {
×
2137
            try {
2138
                findDatasetOrDie(dsId);
×
2139
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
2140
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
2141
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
2142
            } catch (WrappedResponse ex) {
×
UNCOV
2143
                return ex.getResponse();
×
2144
            }
2145
        } else {
2146
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
UNCOV
2147
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
2148
        }
2149
    }
2150

2151
    @GET
2152
    @AuthRequired
2153
    @Path("{identifier}/assignments")
2154
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2155
        return response(req ->
×
2156
                ok(execCommand(
×
2157
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
UNCOV
2158
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
2159
    }
2160

2161
    @GET
2162
    @AuthRequired
2163
    @Path("{id}/privateUrl")
2164
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2165
        return response( req -> {
×
2166
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
2167
            return (privateUrl != null) ? ok(json(privateUrl))
×
2168
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
UNCOV
2169
        }, getRequestUser(crc));
×
2170
    }
2171

2172
    @POST
2173
    @AuthRequired
2174
    @Path("{id}/privateUrl")
2175
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
2176
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
UNCOV
2177
            throw new NotAcceptableException("Anonymized Access not enabled");
×
2178
        }
2179
        return response(req ->
×
2180
                ok(json(execCommand(
×
UNCOV
2181
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
2182
    }
2183

2184
    @DELETE
2185
    @AuthRequired
2186
    @Path("{id}/privateUrl")
2187
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2188
        return response( req -> {
×
2189
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2190
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
2191
            if (privateUrl != null) {
×
2192
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
UNCOV
2193
                return ok("Private URL deleted.");
×
2194
            } else {
UNCOV
2195
                return notFound("No Private URL to delete.");
×
2196
            }
UNCOV
2197
        }, getRequestUser(crc));
×
2198
    }
2199

2200
    @GET
2201
    @AuthRequired
2202
    @Path("{id}/thumbnail/candidates")
2203
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2204
        try {
2205
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2206
            boolean canUpdateThumbnail = false;
×
2207
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
2208
            if (!canUpdateThumbnail) {
×
UNCOV
2209
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
2210
            }
2211
            JsonArrayBuilder data = Json.createArrayBuilder();
×
2212
            boolean considerDatasetLogoAsCandidate = true;
×
2213
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
2214
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
2215
                String base64image = datasetThumbnail.getBase64image();
×
2216
                if (base64image != null) {
×
2217
                    logger.fine("found a candidate!");
×
UNCOV
2218
                    candidate.add("base64image", base64image);
×
2219
                }
2220
                DataFile dataFile = datasetThumbnail.getDataFile();
×
2221
                if (dataFile != null) {
×
UNCOV
2222
                    candidate.add("dataFileId", dataFile.getId());
×
2223
                }
2224
                data.add(candidate);
×
2225
            }
×
2226
            return ok(data);
×
2227
        } catch (WrappedResponse ex) {
×
UNCOV
2228
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
2229
        }
2230
    }
2231

2232
    @GET
2233
    @Produces({"image/png"})
2234
    @Path("{id}/thumbnail")
2235
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
2236
        try {
2237
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2238
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
2239
            if(is == null) {
×
UNCOV
2240
                return notFound("Thumbnail not available");
×
2241
            }
2242
            return Response.ok(is).build();
×
2243
        } catch (WrappedResponse wr) {
×
UNCOV
2244
            return notFound("Thumbnail not available");
×
2245
        }
2246
    }
2247

2248
    @GET
2249
    @Produces({ "image/png" })
2250
    @Path("{id}/logo")
2251
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
2252
        try {
2253
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2254
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
2255
            if (is == null) {
×
UNCOV
2256
                return notFound("Logo not available");
×
2257
            }
2258
            return Response.ok(is).build();
×
2259
        } catch (WrappedResponse wr) {
×
UNCOV
2260
            return notFound("Logo not available");
×
2261
        }
2262
    }
2263

2264
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
2265
    @POST
2266
    @AuthRequired
2267
    @Path("{id}/thumbnail/{dataFileId}")
2268
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
2269
        try {
2270
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
2271
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
2272
        } catch (WrappedResponse wr) {
×
UNCOV
2273
            return wr.getResponse();
×
2274
        }
2275
    }
2276

2277
    @POST
2278
    @AuthRequired
2279
    @Path("{id}/thumbnail")
2280
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2281
    @Produces("application/json")
2282
    @Operation(summary = "Uploads a logo for a dataset", 
2283
               description = "Uploads a logo for a dataset")
2284
    @APIResponse(responseCode = "200",
2285
               description = "Dataset logo uploaded successfully")
2286
    @Tag(name = "uploadDatasetLogo", 
2287
         description = "Uploads a logo for a dataset")
2288
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))          
2289
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
2290
        try {
2291
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
2292
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
2293
        } catch (WrappedResponse wr) {
×
UNCOV
2294
            return wr.getResponse();
×
2295
        }
2296
    }
2297

2298
    @DELETE
2299
    @AuthRequired
2300
    @Path("{id}/thumbnail")
2301
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2302
        try {
2303
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
2304
            return ok("Dataset thumbnail removed.");
×
2305
        } catch (WrappedResponse wr) {
×
UNCOV
2306
            return wr.getResponse();
×
2307
        }
2308
    }
2309

2310
    @Deprecated(forRemoval = true, since = "2024-07-07")
2311
    @GET
2312
    @AuthRequired
2313
    @Path("{identifier}/dataCaptureModule/rsync")
2314
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2315
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
2316
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
UNCOV
2317
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
2318
        }
UNCOV
2319
        Dataset dataset = null;
×
2320
        try {
2321
            dataset = findDatasetOrDie(id);
×
2322
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
2323
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
2324
            
2325
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
2326
            if (lock == null) {
×
2327
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
UNCOV
2328
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
2329
            }
2330
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
2331
        } catch (WrappedResponse wr) {
×
2332
            return wr.getResponse();
×
2333
        } catch (EJBException ex) {
×
UNCOV
2334
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
2335
        }
2336
    }
2337
    
2338
    /**
2339
     * This api endpoint triggers the creation of a "package" file in a dataset
2340
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2341
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2342
     * The "package" can be downloaded over RSAL.
2343
     *
2344
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2345
     *
2346
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2347
     * But due to the possibly immense number of files (millions) the package approach was taken.
2348
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2349
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2350
     * -MAD 4.9.1
2351
     */
2352
    @POST
2353
    @AuthRequired
2354
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2355
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2356
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
UNCOV
2357
        AuthenticatedUser authenticatedUser = null;
×
2358
        try {
2359
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2360
        } catch (WrappedResponse ex) {
×
2361
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2362
        }
×
2363
        if (!authenticatedUser.isSuperuser()) {
×
UNCOV
2364
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2365
        }
UNCOV
2366
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2367
        try {
2368
            Dataset dataset = findDatasetOrDie(id);
×
2369
            if ("validation passed".equals(statusMessageFromDcm)) {
×
UNCOV
2370
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2371

2372
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2373
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2374
                int totalSize = jsonFromDcm.getInt("totalSize");
×
UNCOV
2375
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2376
                
2377
                if (storageDriverType.equals("file")) {
×
UNCOV
2378
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2379

UNCOV
2380
                    ImportMode importMode = ImportMode.MERGE;
×
2381
                    try {
2382
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2383
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2384
                        String message = jsonFromImportJobKickoff.getString("message");
×
2385
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2386
                        job.add("jobId", jobId);
×
2387
                        job.add("message", message);
×
2388
                        return ok(job);
×
2389
                    } catch (WrappedResponse wr) {
×
2390
                        String message = wr.getMessage();
×
UNCOV
2391
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2392
                    }
UNCOV
2393
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2394
                    
UNCOV
2395
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2396
                    try {
2397
                        
2398
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2399
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
UNCOV
2400
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2401
                        
2402
                        if (packageFile == null) {
×
2403
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
UNCOV
2404
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2405
                        }
2406
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2407
                        if (dcmLock == null) {
×
UNCOV
2408
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2409
                        } else {
2410
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
UNCOV
2411
                            dataset.removeLock(dcmLock);
×
2412
                        }
2413
                        
2414
                        // update version using the command engine to enforce user permissions and constraints
UNCOV
2415
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2416
                            try {
2417
                                Command<Dataset> cmd;
2418
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2419
                                commandEngine.submit(cmd);
×
2420
                            } catch (CommandException ex) {
×
2421
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
UNCOV
2422
                            }
×
2423
                        } else {
UNCOV
2424
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2425
                                    + "single version in draft mode.";
UNCOV
2426
                            logger.log(Level.SEVERE, constraintError);
×
2427
                        }
2428

2429
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
UNCOV
2430
                        return ok(job);
×
2431
                        
2432
                    } catch (IOException e) {
×
2433
                        String message = e.getMessage();
×
UNCOV
2434
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2435
                    }
2436
                } else {
UNCOV
2437
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2438
                }
2439
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2440
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2441
                distinctAuthors.values().forEach((value) -> {
×
2442
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2443
                });
×
2444
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2445
                if (superUsers != null && !superUsers.isEmpty()) {
×
2446
                    superUsers.forEach((au) -> {
×
2447
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
UNCOV
2448
                    });
×
2449
                }
UNCOV
2450
                return ok("User notified about checksum validation failure.");
×
2451
            } else {
UNCOV
2452
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2453
            }
2454
        } catch (WrappedResponse ex) {
×
UNCOV
2455
            return ex.getResponse();
×
2456
        }
2457
    }
2458
    
2459

2460
    @POST
2461
    @AuthRequired
2462
    @Path("{id}/submitForReview")
2463
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2464
        try {
2465
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
UNCOV
2466
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2467
            
UNCOV
2468
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2469
            
2470
            result.add("inReview", inReview);
×
2471
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2472
            return ok(result);
×
2473
        } catch (WrappedResponse wr) {
×
UNCOV
2474
            return wr.getResponse();
×
2475
        }
2476
    }
2477

2478
    @POST
2479
    @AuthRequired
2480
    @Path("{id}/returnToAuthor")
2481
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2482

2483
        if (jsonBody == null || jsonBody.isEmpty()) {
×
UNCOV
2484
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2485
        }
UNCOV
2486
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2487
        try {
2488
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2489
            String reasonForReturn = null;
×
2490
            reasonForReturn = json.getString("reasonForReturn");
×
2491
            if ((reasonForReturn == null || reasonForReturn.isEmpty())
×
2492
                    && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) {
×
UNCOV
2493
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2494
            }
2495
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
UNCOV
2496
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2497

2498
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2499
            result.add("inReview", false);
×
2500
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2501
            return ok(result);
×
2502
        } catch (WrappedResponse wr) {
×
UNCOV
2503
            return wr.getResponse();
×
2504
        }
2505
    }
2506

2507
    @GET
2508
    @AuthRequired
2509
    @Path("{id}/curationStatus")
2510
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2511
        try {
2512
            Dataset ds = findDatasetOrDie(idSupplied);
×
2513
            DatasetVersion dsv = ds.getLatestVersion();
×
2514
            User user = getRequestUser(crc);
×
2515
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
UNCOV
2516
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2517
            } else {
UNCOV
2518
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2519
            }
2520
        } catch (WrappedResponse wr) {
×
UNCOV
2521
            return wr.getResponse();
×
2522
        }
2523
    }
2524

2525
    @PUT
2526
    @AuthRequired
2527
    @Path("{id}/curationStatus")
2528
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2529
        Dataset ds = null;
×
UNCOV
2530
        User u = null;
×
2531
        try {
2532
            ds = findDatasetOrDie(idSupplied);
×
2533
            u = getRequestUser(crc);
×
2534
        } catch (WrappedResponse wr) {
×
2535
            return wr.getResponse();
×
UNCOV
2536
        }
×
2537
        try {
2538
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2539
            return ok("Curation Status updated");
×
UNCOV
2540
        } catch (WrappedResponse wr) {
×
2541
            // Just change to Bad Request and send
UNCOV
2542
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2543
        }
2544
    }
2545

2546
    @DELETE
2547
    @AuthRequired
2548
    @Path("{id}/curationStatus")
2549
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2550
        Dataset ds = null;
×
UNCOV
2551
        User u = null;
×
2552
        try {
2553
            ds = findDatasetOrDie(idSupplied);
×
2554
            u = getRequestUser(crc);
×
2555
        } catch (WrappedResponse wr) {
×
2556
            return wr.getResponse();
×
UNCOV
2557
        }
×
2558
        try {
2559
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2560
            return ok("Curation Status deleted");
×
UNCOV
2561
        } catch (WrappedResponse wr) {
×
2562
            //Just change to Bad Request and send
UNCOV
2563
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2564
        }
2565
    }
2566

2567
    @GET
2568
    @AuthRequired
2569
    @Path("{id}/uploadurls")
2570
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2571
        try {
UNCOV
2572
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2573

2574
            boolean canUpdateDataset = false;
×
2575
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2576
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2577
            if (!canUpdateDataset) {
×
UNCOV
2578
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2579
            }
2580
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2581
            if (s3io == null) {
×
2582
                return error(Response.Status.NOT_FOUND,
×
UNCOV
2583
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2584
            }
2585
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
2586
            if (maxSize != null) {
×
2587
                if(fileSize > maxSize) {
×
UNCOV
2588
                    return error(Response.Status.BAD_REQUEST,
×
2589
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2590
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2591
                }
2592
            }
2593
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
2594
            if (limit != null) {
×
2595
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
UNCOV
2596
                    return error(Response.Status.BAD_REQUEST,
×
2597
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
UNCOV
2598
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2599
                }
2600
            }
2601
            JsonObjectBuilder response = null;
×
UNCOV
2602
            String storageIdentifier = null;
×
2603
            try {
2604
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
UNCOV
2605
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2606

2607
            } catch (IOException io) {
×
2608
                logger.warning(io.getMessage());
×
2609
                throw new WrappedResponse(io,
×
2610
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
UNCOV
2611
            }
×
2612

2613
            response.add("storageIdentifier", storageIdentifier);
×
2614
            return ok(response);
×
2615
        } catch (WrappedResponse wr) {
×
UNCOV
2616
            return wr.getResponse();
×
2617
        }
2618
    }
2619

2620
    @DELETE
2621
    @AuthRequired
2622
    @Path("mpupload")
2623
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2624
        try {
UNCOV
2625
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2626
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2627
            User user = session.getUser();
×
UNCOV
2628
            if (!user.isAuthenticated()) {
×
2629
                try {
2630
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2631
                } catch (WrappedResponse ex) {
×
UNCOV
2632
                    logger.info(
×
2633
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2634
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2635
                    throw ex;
×
UNCOV
2636
                }
×
2637
            }
2638
            boolean allowed = false;
×
2639
            if (dataset != null) {
×
2640
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
UNCOV
2641
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2642
            } else {
2643
                /*
2644
                 * The only legitimate case where a global id won't correspond to a dataset is
2645
                 * for uploads during creation. Given that this call will still fail unless all
2646
                 * three parameters correspond to an active multipart upload, it should be safe
2647
                 * to allow the attempt for an authenticated user. If there are concerns about
2648
                 * permissions, one could check with the current design that the user is allowed
2649
                 * to create datasets in some dataverse that is configured to use the storage
2650
                 * provider specified in the storageidentifier, but testing for the ability to
2651
                 * create a dataset in a specific dataverse would requiring changing the design
2652
                 * somehow (e.g. adding the ownerId to this call).
2653
                 */
UNCOV
2654
                allowed = true;
×
2655
            }
2656
            if (!allowed) {
×
UNCOV
2657
                return error(Response.Status.FORBIDDEN,
×
2658
                        "You are not permitted to abort file uploads with the supplied parameters.");
2659
            }
2660
            try {
2661
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2662
            } catch (IOException io) {
×
2663
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2664
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2665
                logger.warning(io.getMessage());
×
2666
                throw new WrappedResponse(io,
×
2667
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2668
            }
×
2669
            return Response.noContent().build();
×
2670
        } catch (WrappedResponse wr) {
×
UNCOV
2671
            return wr.getResponse();
×
2672
        }
2673
    }
2674

2675
    @PUT
2676
    @AuthRequired
2677
    @Path("mpupload")
2678
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2679
        try {
UNCOV
2680
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2681
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2682
            User user = session.getUser();
×
UNCOV
2683
            if (!user.isAuthenticated()) {
×
2684
                try {
2685
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2686
                } catch (WrappedResponse ex) {
×
UNCOV
2687
                    logger.info(
×
2688
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2689
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2690
                    throw ex;
×
UNCOV
2691
                }
×
2692
            }
2693
            boolean allowed = false;
×
2694
            if (dataset != null) {
×
2695
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
UNCOV
2696
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2697
            } else {
2698
                /*
2699
                 * The only legitimate case where a global id won't correspond to a dataset is
2700
                 * for uploads during creation. Given that this call will still fail unless all
2701
                 * three parameters correspond to an active multipart upload, it should be safe
2702
                 * to allow the attempt for an authenticated user. If there are concerns about
2703
                 * permissions, one could check with the current design that the user is allowed
2704
                 * to create datasets in some dataverse that is configured to use the storage
2705
                 * provider specified in the storageidentifier, but testing for the ability to
2706
                 * create a dataset in a specific dataverse would requiring changing the design
2707
                 * somehow (e.g. adding the ownerId to this call).
2708
                 */
UNCOV
2709
                allowed = true;
×
2710
            }
2711
            if (!allowed) {
×
UNCOV
2712
                return error(Response.Status.FORBIDDEN,
×
2713
                        "You are not permitted to complete file uploads with the supplied parameters.");
2714
            }
2715
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
UNCOV
2716
            logger.info("Etags: " + partETagBody);
×
2717
            try {
2718
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2719
                for (String partNo : object.keySet()) {
×
2720
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2721
                }
×
2722
                for (PartETag et : eTagList) {
×
2723
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2724
                }
×
2725
            } catch (JsonException je) {
×
2726
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2727
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
UNCOV
2728
            }
×
2729
            try {
2730
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2731
            } catch (IOException io) {
×
2732
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
UNCOV
2733
                logger.warning(io.getMessage());
×
2734
                try {
2735
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2736
                } catch (IOException e) {
×
2737
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2738
                    logger.severe(io.getMessage());
×
UNCOV
2739
                }
×
2740

2741
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2742
            }
×
2743
            return ok("Multipart Upload completed");
×
2744
        } catch (WrappedResponse wr) {
×
UNCOV
2745
            return wr.getResponse();
×
2746
        }
2747
    }
2748

2749
    /**
2750
     * Add a File to an existing Dataset
2751
     *
2752
     * @param idSupplied
2753
     * @param jsonData
2754
     * @param fileInputStream
2755
     * @param contentDispositionHeader
2756
     * @param formDataBodyPart
2757
     * @return
2758
     */
2759
    @POST
2760
    @AuthRequired
2761
    @Path("{id}/add")
2762
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2763
    @Produces("application/json")
2764
    @Operation(summary = "Uploads a file for a dataset", 
2765
               description = "Uploads a file for a dataset")
2766
    @APIResponse(responseCode = "200",
2767
               description = "File uploaded successfully to dataset")
2768
    @Tag(name = "addFileToDataset", 
2769
         description = "Uploads a file for a dataset")
2770
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
2771
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2772
                    @PathParam("id") String idSupplied,
2773
                    @FormDataParam("jsonData") String jsonData,
2774
                    @FormDataParam("file") InputStream fileInputStream,
2775
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2776
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2777
                    ){
2778

2779
        if (!systemConfig.isHTTPUpload()) {
×
UNCOV
2780
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2781
        }
2782

2783
        // -------------------------------------
2784
        // (1) Get the user from the ContainerRequestContext
2785
        // -------------------------------------
2786
        User authUser;
UNCOV
2787
        authUser = getRequestUser(crc);
×
2788

2789
        // -------------------------------------
2790
        // (2) Get the Dataset Id
2791
        //  
2792
        // -------------------------------------
2793
        Dataset dataset;
2794
        
2795
        try {
2796
            dataset = findDatasetOrDie(idSupplied);
×
2797
        } catch (WrappedResponse wr) {
×
2798
            return wr.getResponse();
×
UNCOV
2799
        }
×
2800
        
2801
        //------------------------------------
2802
        // (2a) Make sure dataset does not have package file
2803
        //
2804
        // --------------------------------------
2805
        
2806
        for (DatasetVersion dv : dataset.getVersions()) {
×
2807
            if (dv.isHasPackageFile()) {
×
2808
                return error(Response.Status.FORBIDDEN,
×
UNCOV
2809
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2810
                );
2811
            }
UNCOV
2812
        }
×
2813

2814
        // (2a) Load up optional params via JSON
2815
        //---------------------------------------
2816
        OptionalFileParams optionalFileParams = null;
×
UNCOV
2817
        msgt("(api) jsonData: " + jsonData);
×
2818

2819
        try {
2820
            optionalFileParams = new OptionalFileParams(jsonData);
×
2821
        } catch (DataFileTagException ex) {
×
UNCOV
2822
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2823
        }
2824
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2825
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
UNCOV
2826
        }
×
2827
        
2828
        // -------------------------------------
2829
        // (3) Get the file name and content type
2830
        // -------------------------------------
2831
        String newFilename = null;
×
2832
        String newFileContentType = null;
×
2833
        String newStorageIdentifier = null;
×
2834
        if (null == contentDispositionHeader) {
×
2835
            if (optionalFileParams.hasStorageIdentifier()) {
×
2836
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
UNCOV
2837
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2838
                
2839
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
UNCOV
2840
                    return error(BAD_REQUEST,
×
2841
                            "Dataset store configuration does not allow provided storageIdentifier.");
2842
                }
2843
                if (optionalFileParams.hasFileName()) {
×
2844
                    newFilename = optionalFileParams.getFileName();
×
2845
                    if (optionalFileParams.hasMimetype()) {
×
UNCOV
2846
                        newFileContentType = optionalFileParams.getMimeType();
×
2847
                    }
2848
                }
2849
            } else {
UNCOV
2850
                return error(BAD_REQUEST,
×
2851
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2852
            }
2853
        } else {
UNCOV
2854
            newFilename = contentDispositionHeader.getFileName();
×
2855
            // Let's see if the form data part has the mime (content) type specified.
2856
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2857
            // because that defaults to "text/plain" when no "Content-Type:" header is
2858
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2859
            // is there. If not, we'll default to "application/octet-stream" - the generic
2860
            // unknown type. This will prompt the application to run type detection and
2861
            // potentially find something more accurate.
2862
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2863

2864
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2865
                if (header.equalsIgnoreCase("Content-Type")) {
×
UNCOV
2866
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2867
                }
2868
            }
×
2869
            if (newFileContentType == null) {
×
UNCOV
2870
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2871
            }
2872
        }
2873

2874

2875
        //-------------------
2876
        // (3) Create the AddReplaceFileHelper object
2877
        //-------------------
UNCOV
2878
        msg("ADD!");
×
2879

2880
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
UNCOV
2881
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2882
                ingestService,
2883
                datasetService,
2884
                fileService,
2885
                permissionSvc,
2886
                commandEngine,
2887
                systemConfig);
2888

2889

2890
        //-------------------
2891
        // (4) Run "runAddFileByDatasetId"
2892
        //-------------------
UNCOV
2893
        addFileHelper.runAddFileByDataset(dataset,
×
2894
                newFilename,
2895
                newFileContentType,
2896
                newStorageIdentifier,
2897
                fileInputStream,
2898
                optionalFileParams);
2899

2900

UNCOV
2901
        if (addFileHelper.hasError()){
×
2902
            //conflict response status added for 8859
2903
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
UNCOV
2904
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2905
            }
UNCOV
2906
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2907
        } else {
UNCOV
2908
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2909
            try {
2910
                //msgt("as String: " + addFileHelper.getSuccessResult());
2911
                /**
2912
                 * @todo We need a consistent, sane way to communicate a human
2913
                 * readable message to an API client suitable for human
2914
                 * consumption. Imagine if the UI were built in Angular or React
2915
                 * and we want to return a message from the API as-is to the
2916
                 * user. Human readable.
2917
                 */
2918
                logger.fine("successMsg: " + successMsg);
×
2919
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2920
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
UNCOV
2921
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2922
                } else {
UNCOV
2923
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2924
                }
2925

2926
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2927
            } catch (NoFilesException ex) {
×
2928
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
UNCOV
2929
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2930

2931
            }
2932
        }
2933
        
2934
    } // end: addFileToDataset
2935

2936

2937
    /**
2938
     * Clean storage of a Dataset
2939
     *
2940
     * @param idSupplied
2941
     * @return
2942
     */
2943
    @GET
2944
    @AuthRequired
2945
    @Path("{id}/cleanStorage")
2946
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2947
        // get user and dataset
UNCOV
2948
        User authUser = getRequestUser(crc);
×
2949

2950
        Dataset dataset;
2951
        try {
2952
            dataset = findDatasetOrDie(idSupplied);
×
2953
        } catch (WrappedResponse wr) {
×
2954
            return wr.getResponse();
×
UNCOV
2955
        }
×
2956
        
2957
        // check permissions
2958
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
UNCOV
2959
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
2960
        }
2961

UNCOV
2962
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
2963

2964
        // check if no legacy files are present
2965
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
2966
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
UNCOV
2967
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
2968
        }
2969

UNCOV
2970
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
2971
        List<String> deleted;
2972
        try {
2973
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
2974
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
2975
        } catch (IOException ex) {
×
2976
            logger.log(Level.SEVERE, null, ex);
×
2977
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
UNCOV
2978
        }
×
2979

UNCOV
2980
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
2981
        
2982
    }
2983

2984
    private static Set<String> getDatasetFilenames(Dataset dataset) {
2985
        Set<String> files = new HashSet<>();
×
2986
        for (DataFile dataFile: dataset.getFiles()) {
×
2987
            String storageIdentifier = dataFile.getStorageIdentifier();
×
2988
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
2989
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
2990
            files.add(locationParts[locationParts.length-1]);
×
2991
        }
×
UNCOV
2992
        return files;
×
2993
    }
2994

2995
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
2996
        return f -> {
1✔
2997
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
2998
        };
2999
    }
3000

3001
    private void msg(String m) {
3002
        //System.out.println(m);
3003
        logger.fine(m);
×
UNCOV
3004
    }
×
3005

3006
    private void dashes() {
3007
        msg("----------------");
×
UNCOV
3008
    }
×
3009

3010
    private void msgt(String m) {
3011
        dashes();
×
3012
        msg(m);
×
3013
        dashes();
×
UNCOV
3014
    }
×
3015

3016

3017
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
3018
            throws WrappedResponse {
UNCOV
3019
        switch (versionId) {
×
3020
            case DS_VERSION_LATEST:
UNCOV
3021
                return hdl.handleLatest();
×
3022
            case DS_VERSION_DRAFT:
UNCOV
3023
                return hdl.handleDraft();
×
3024
            case DS_VERSION_LATEST_PUBLISHED:
UNCOV
3025
                return hdl.handleLatestPublished();
×
3026
            default:
3027
                try {
3028
                    String[] versions = versionId.split("\\.");
×
UNCOV
3029
                    switch (versions.length) {
×
3030
                        case 1:
UNCOV
3031
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
3032
                        case 2:
UNCOV
3033
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
3034
                        default:
UNCOV
3035
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3036
                    }
3037
                } catch (NumberFormatException nfe) {
×
UNCOV
3038
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3039
                }
3040
        }
3041
    }
3042

3043
    /*
3044
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
3045
     * a deaccessioned dataset.
3046
     */
3047
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, 
3048
                                                  String versionNumber, 
3049
                                                  final Dataset ds,
3050
                                                  UriInfo uriInfo, 
3051
                                                  HttpHeaders headers) throws WrappedResponse {
3052
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
3053
        boolean checkFilePerms = false;
×
3054
        boolean includeDeaccessioned = false;
×
UNCOV
3055
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
×
3056
    }
3057
    
3058
    /*
3059
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3060
     */
3061
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3062
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
3063
        boolean checkPermsWhenDeaccessioned = true;
×
3064
        boolean bypassAccessCheck = false;
×
UNCOV
3065
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3066
    }
3067

3068
    /*
3069
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3070
     */
3071
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3072
                                                  UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
3073
        boolean bypassAccessCheck = false;
×
UNCOV
3074
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3075
    }
3076

3077
    /*
3078
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
3079
     */
3080
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3081
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
3082
            boolean bypassAccessCheck)
3083
            throws WrappedResponse {
3084

UNCOV
3085
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
3086

3087
        if (dsv == null || dsv.getId() == null) {
×
3088
            throw new WrappedResponse(
×
UNCOV
3089
                    notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
3090
        }
3091
        if (dsv.isReleased()&& uriInfo!=null) {
×
3092
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
UNCOV
3093
            mdcLogService.logEntry(entry);
×
3094
        }
UNCOV
3095
        return dsv;
×
3096
    }
3097
 
3098
    @GET
3099
    @Path("{identifier}/locks")
3100
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3101

UNCOV
3102
        Dataset dataset = null;
×
3103
        try {
UNCOV
3104
            dataset = findDatasetOrDie(id);
×
3105
            Set<DatasetLock> locks;
3106
            if (lockType == null) {
×
UNCOV
3107
                locks = dataset.getLocks();
×
3108
            } else {
3109
                // request for a specific type lock:
UNCOV
3110
                DatasetLock lock = dataset.getLockFor(lockType);
×
3111

3112
                locks = new HashSet<>();
×
3113
                if (lock != null) {
×
UNCOV
3114
                    locks.add(lock);
×
3115
                }
3116
            }
3117
            
UNCOV
3118
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3119

3120
        } catch (WrappedResponse wr) {
×
UNCOV
3121
            return wr.getResponse();
×
3122
        }
3123
    }
3124

3125
    @DELETE
3126
    @AuthRequired
3127
    @Path("{identifier}/locks")
3128
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3129

UNCOV
3130
        return response(req -> {
×
3131
            try {
3132
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3133
                if (!user.isSuperuser()) {
×
UNCOV
3134
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3135
                }
UNCOV
3136
                Dataset dataset = findDatasetOrDie(id);
×
3137
                
3138
                if (lockType == null) {
×
3139
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
3140
                    for (DatasetLock lock : dataset.getLocks()) {
×
3141
                        locks.add(lock.getReason());
×
3142
                    }
×
3143
                    if (!locks.isEmpty()) {
×
3144
                        for (DatasetLock.Reason locktype : locks) {
×
UNCOV
3145
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
3146
                            // refresh the dataset:
3147
                            dataset = findDatasetOrDie(id);
×
UNCOV
3148
                        }
×
3149
                        // kick of dataset reindexing, in case the locks removed 
3150
                        // affected the search card:
3151
                        indexService.asyncIndexDataset(dataset, true);
×
UNCOV
3152
                        return ok("locks removed");
×
3153
                    }
UNCOV
3154
                    return ok("dataset not locked");
×
3155
                }
3156
                // request for a specific type lock:
3157
                DatasetLock lock = dataset.getLockFor(lockType);
×
3158
                if (lock != null) {
×
UNCOV
3159
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
3160
                    // refresh the dataset:
UNCOV
3161
                    dataset = findDatasetOrDie(id);
×
3162
                    // ... and kick of dataset reindexing, in case the lock removed 
3163
                    // affected the search card:
3164
                    indexService.asyncIndexDataset(dataset, true);
×
UNCOV
3165
                    return ok("lock type " + lock.getReason() + " removed");
×
3166
                }
3167
                return ok("no lock type " + lockType + " on the dataset");
×
3168
            } catch (WrappedResponse wr) {
×
UNCOV
3169
                return wr.getResponse();
×
3170
            }
3171

UNCOV
3172
        }, getRequestUser(crc));
×
3173

3174
    }
3175
    
3176
    @POST
3177
    @AuthRequired
3178
    @Path("{identifier}/lock/{type}")
3179
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
UNCOV
3180
        return response(req -> {
×
3181
            try {
3182
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3183
                if (!user.isSuperuser()) {
×
UNCOV
3184
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3185
                }
3186
                Dataset dataset = findDatasetOrDie(id);
×
3187
                DatasetLock lock = dataset.getLockFor(lockType);
×
3188
                if (lock != null) {
×
UNCOV
3189
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
3190
                }
3191
                lock = new DatasetLock(lockType, user);
×
UNCOV
3192
                execCommand(new AddLockCommand(req, dataset, lock));
×
3193
                // refresh the dataset:
UNCOV
3194
                dataset = findDatasetOrDie(id);
×
3195
                // ... and kick of dataset reindexing:
UNCOV
3196
                indexService.asyncIndexDataset(dataset, true);
×
3197

3198
                return ok("dataset locked with lock type " + lockType);
×
3199
            } catch (WrappedResponse wr) {
×
UNCOV
3200
                return wr.getResponse();
×
3201
            }
3202

UNCOV
3203
        }, getRequestUser(crc));
×
3204
    }
3205
    
3206
    @GET
3207
    @AuthRequired
3208
    @Path("locks")
3209
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
3210
        // This API is here, under /datasets, and not under /admin, because we
3211
        // likely want it to be accessible to admin users who may not necessarily 
3212
        // have localhost access, that would be required to get to /api/admin in 
3213
        // most installations. It is still reasonable however to limit access to
3214
        // this api to admin users only.
3215
        AuthenticatedUser apiUser;
3216
        try {
3217
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
3218
        } catch (WrappedResponse ex) {
×
3219
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3220
        }
×
3221
        if (!apiUser.isSuperuser()) {
×
UNCOV
3222
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3223
        }
3224
        
3225
        // Locks can be optinally filtered by type, user or both.
3226
        DatasetLock.Reason lockTypeValue = null;
×
UNCOV
3227
        AuthenticatedUser user = null; 
×
3228
        
3229
        // For the lock type, we use a QueryParam of type String, instead of 
3230
        // DatasetLock.Reason; that would be less code to write, but this way 
3231
        // we can check if the value passed matches a valid lock type ("reason") 
3232
        // and provide a helpful error message if it doesn't. If you use a 
3233
        // QueryParam of an Enum type, trying to pass an invalid value to it 
3234
        // results in a potentially confusing "404/NOT FOUND - requested 
3235
        // resource is not available".
UNCOV
3236
        if (lockType != null && !lockType.isEmpty()) {
×
3237
            try {
3238
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
3239
            } catch (IllegalArgumentException iax) {
×
3240
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
3241
                for (Reason r: Reason.values()) {
×
UNCOV
3242
                    reasonJoiner.add(r.name());
×
3243
                };
3244
                String errorMessage = "Invalid lock type value: " + lockType + 
×
3245
                        "; valid lock types: " + reasonJoiner.toString();
×
3246
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
UNCOV
3247
            }
×
3248
        }
3249
        
3250
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
3251
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
3252
            if (user == null) {
×
UNCOV
3253
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
3254
            }
3255
        }
3256
        
3257
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
UNCOV
3258
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
3259
                            
UNCOV
3260
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3261
    }   
3262
    
3263
    
3264
    @GET
3265
    @Path("{id}/makeDataCount/citations")
3266
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
3267
        
3268
        try {
3269
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3270
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
3271
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
3272
            for (DatasetExternalCitations citation : externalCitations) {
×
UNCOV
3273
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
3274
                /**
3275
                 * In the future we can imagine storing and presenting more
3276
                 * information about the citation such as the title of the paper
3277
                 * and the names of the authors. For now, we'll at least give
3278
                 * the URL of the citation so people can click and find out more
3279
                 * about the citation.
3280
                 */
3281
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
3282
                datasetsCitations.add(candidateObj);
×
3283
            }
×
UNCOV
3284
            return ok(datasetsCitations);
×
3285

3286
        } catch (WrappedResponse wr) {
×
UNCOV
3287
            return wr.getResponse();
×
3288
        }
3289

3290
    }
3291

3292
    @GET
3293
    @Path("{id}/makeDataCount/{metric}")
3294
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
3295
        String nullCurrentMonth = null;
×
UNCOV
3296
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
3297
    }
3298

3299
    @GET
3300
    @AuthRequired
3301
    @Path("{identifier}/storagesize")
3302
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
3303
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
UNCOV
3304
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
3305
    }
3306

3307
    @GET
3308
    @AuthRequired
3309
    @Path("{identifier}/versions/{versionId}/downloadsize")
3310
    public Response getDownloadSize(@Context ContainerRequestContext crc,
3311
                                    @PathParam("identifier") String dvIdtf,
3312
                                    @PathParam("versionId") String version,
3313
                                    @QueryParam("contentType") String contentType,
3314
                                    @QueryParam("accessStatus") String accessStatus,
3315
                                    @QueryParam("categoryName") String categoryName,
3316
                                    @QueryParam("tabularTagName") String tabularTagName,
3317
                                    @QueryParam("searchText") String searchText,
3318
                                    @QueryParam("mode") String mode,
3319
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
3320
                                    @Context UriInfo uriInfo,
3321
                                    @Context HttpHeaders headers) {
3322

UNCOV
3323
        return response(req -> {
×
3324
            FileSearchCriteria fileSearchCriteria;
3325
            try {
UNCOV
3326
                fileSearchCriteria = new FileSearchCriteria(
×
3327
                        contentType,
UNCOV
3328
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
3329
                        categoryName,
3330
                        tabularTagName,
3331
                        searchText
3332
                );
3333
            } catch (IllegalArgumentException e) {
×
3334
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
UNCOV
3335
            }
×
3336
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
3337
            try {
3338
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
3339
            } catch (IllegalArgumentException e) {
×
3340
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
3341
            }
×
3342
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
3343
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
3344
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
3345
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
3346
            jsonObjectBuilder.add("message", message);
×
3347
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
3348
            return ok(jsonObjectBuilder);
×
UNCOV
3349
        }, getRequestUser(crc));
×
3350
    }
3351

3352
    @GET
3353
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
3354
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
3355
        try {
3356
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3357
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
UNCOV
3358
            MakeDataCountUtil.MetricType metricType = null;
×
3359
            try {
3360
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
3361
            } catch (IllegalArgumentException ex) {
×
3362
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
3363
            }
×
3364
            String monthYear = null;
×
UNCOV
3365
            if (yyyymm != null) {
×
3366
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
3367
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
UNCOV
3368
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
3369
            }
3370
            if (country != null) {
×
3371
                country = country.toLowerCase();
×
3372
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
UNCOV
3373
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3374
                }
3375
            }
3376
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
3377
            if (datasetMetrics == null) {
×
3378
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3379
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
UNCOV
3380
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3381
            }
3382
            Long viewsTotalRegular = null;
×
3383
            Long viewsUniqueRegular = null;
×
3384
            Long downloadsTotalRegular = null;
×
3385
            Long downloadsUniqueRegular = null;
×
3386
            Long viewsTotalMachine = null;
×
3387
            Long viewsUniqueMachine = null;
×
3388
            Long downloadsTotalMachine = null;
×
3389
            Long downloadsUniqueMachine = null;
×
3390
            Long viewsTotal = null;
×
3391
            Long viewsUnique = null;
×
3392
            Long downloadsTotal = null;
×
3393
            Long downloadsUnique = null;
×
UNCOV
3394
            switch (metricSupplied) {
×
3395
                case "viewsTotal":
3396
                    viewsTotal = datasetMetrics.getViewsTotal();
×
UNCOV
3397
                    break;
×
3398
                case "viewsTotalRegular":
3399
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
UNCOV
3400
                    break;
×
3401
                case "viewsTotalMachine":
3402
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
UNCOV
3403
                    break;
×
3404
                case "viewsUnique":
3405
                    viewsUnique = datasetMetrics.getViewsUnique();
×
UNCOV
3406
                    break;
×
3407
                case "viewsUniqueRegular":
3408
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
UNCOV
3409
                    break;
×
3410
                case "viewsUniqueMachine":
3411
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
UNCOV
3412
                    break;
×
3413
                case "downloadsTotal":
3414
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
UNCOV
3415
                    break;
×
3416
                case "downloadsTotalRegular":
3417
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
UNCOV
3418
                    break;
×
3419
                case "downloadsTotalMachine":
3420
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
UNCOV
3421
                    break;
×
3422
                case "downloadsUnique":
3423
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
UNCOV
3424
                    break;
×
3425
                case "downloadsUniqueRegular":
3426
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
UNCOV
3427
                    break;
×
3428
                case "downloadsUniqueMachine":
3429
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
UNCOV
3430
                    break;
×
3431
                default:
3432
                    break;
3433
            }
3434
            /**
3435
             * TODO: Think more about the JSON output and the API design.
3436
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3437
             * now, by country. We could return multiple metrics (viewsTotal,
3438
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3439
             */
3440
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
3441
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
3442
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3443
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3444
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3445
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3446
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3447
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3448
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3449
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3450
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3451
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3452
            return ok(jsonObjectBuilder);
×
3453
        } catch (WrappedResponse wr) {
×
3454
            return wr.getResponse();
×
UNCOV
3455
        } catch (Exception e) {
×
3456
            //bad date - caught in sanitize call
UNCOV
3457
            return error(BAD_REQUEST, e.getMessage());
×
3458
        }
3459
    }
3460
    
3461
    @GET
3462
    @AuthRequired
3463
    @Path("{identifier}/storageDriver")
3464
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3465
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3466
        
3467
        Dataset dataset; 
3468
        
3469
        try {
3470
            dataset = findDatasetOrDie(dvIdtf);
×
3471
        } catch (WrappedResponse ex) {
×
3472
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
UNCOV
3473
        }
×
3474
        
UNCOV
3475
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3476
    }
3477
    
3478
    @PUT
3479
    @AuthRequired
3480
    @Path("{identifier}/storageDriver")
3481
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3482
            String storageDriverLabel,
3483
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3484
        
3485
        // Superuser-only:
3486
        AuthenticatedUser user;
3487
        try {
3488
            user = getRequestAuthenticatedUserOrDie(crc);
×
3489
        } catch (WrappedResponse ex) {
×
3490
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3491
        }
×
3492
        if (!user.isSuperuser()) {
×
UNCOV
3493
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3494
        }
3495

3496
        Dataset dataset;
3497

3498
        try {
3499
            dataset = findDatasetOrDie(dvIdtf);
×
3500
        } catch (WrappedResponse ex) {
×
3501
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
UNCOV
3502
        }
×
3503
        
3504
        // We don't want to allow setting this to a store id that does not exist: 
3505
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
3506
            if (store.getKey().equals(storageDriverLabel)) {
×
3507
                dataset.setStorageDriverId(store.getValue());
×
3508
                datasetService.merge(dataset);
×
UNCOV
3509
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3510
            }
3511
        }
×
UNCOV
3512
        return error(Response.Status.BAD_REQUEST,
×
3513
                "No Storage Driver found for : " + storageDriverLabel);
3514
    }
3515
    
3516
    @DELETE
3517
    @AuthRequired
3518
    @Path("{identifier}/storageDriver")
3519
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3520
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3521
        
3522
        // Superuser-only:
3523
        AuthenticatedUser user;
3524
        try {
3525
            user = getRequestAuthenticatedUserOrDie(crc);
×
3526
        } catch (WrappedResponse ex) {
×
3527
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3528
        }
×
3529
        if (!user.isSuperuser()) {
×
UNCOV
3530
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3531
        }
3532

3533
        Dataset dataset;
3534

3535
        try {
3536
            dataset = findDatasetOrDie(dvIdtf);
×
3537
        } catch (WrappedResponse ex) {
×
3538
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
UNCOV
3539
        }
×
3540
        
3541
        dataset.setStorageDriverId(null);
×
3542
        datasetService.merge(dataset);
×
UNCOV
3543
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3544
    }
3545

3546
    @GET
3547
    @AuthRequired
3548
    @Path("{identifier}/curationLabelSet")
3549
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3550
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3551

3552
        try {
3553
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3554
            if (!user.isSuperuser()) {
×
UNCOV
3555
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3556
            }
3557
        } catch (WrappedResponse wr) {
×
3558
            return wr.getResponse();
×
UNCOV
3559
        }
×
3560

3561
        Dataset dataset;
3562

3563
        try {
3564
            dataset = findDatasetOrDie(dvIdtf);
×
3565
        } catch (WrappedResponse ex) {
×
3566
            return ex.getResponse();
×
UNCOV
3567
        }
×
3568

UNCOV
3569
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3570
    }
3571

3572
    @PUT
3573
    @AuthRequired
3574
    @Path("{identifier}/curationLabelSet")
3575
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3576
                                        @PathParam("identifier") String dvIdtf,
3577
                                        @QueryParam("name") String curationLabelSet,
3578
                                        @Context UriInfo uriInfo,
3579
                                        @Context HttpHeaders headers) throws WrappedResponse {
3580

3581
        // Superuser-only:
3582
        AuthenticatedUser user;
3583
        try {
3584
            user = getRequestAuthenticatedUserOrDie(crc);
×
3585
        } catch (WrappedResponse ex) {
×
3586
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3587
        }
×
3588
        if (!user.isSuperuser()) {
×
UNCOV
3589
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3590
        }
3591

3592
        Dataset dataset;
3593

3594
        try {
3595
            dataset = findDatasetOrDie(dvIdtf);
×
3596
        } catch (WrappedResponse ex) {
×
3597
            return ex.getResponse();
×
3598
        }
×
3599
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3600
            dataset.setCurationLabelSetName(curationLabelSet);
×
3601
            datasetService.merge(dataset);
×
UNCOV
3602
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3603
        } else {
3604
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3605
                if (setName.equals(curationLabelSet)) {
×
3606
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3607
                    datasetService.merge(dataset);
×
UNCOV
3608
                    return ok("Curation Label Set Name set to: " + setName);
×
3609
                }
UNCOV
3610
            }
×
3611
        }
UNCOV
3612
        return error(Response.Status.BAD_REQUEST,
×
3613
            "No Such Curation Label Set");
3614
    }
3615

3616
    @DELETE
3617
    @AuthRequired
3618
    @Path("{identifier}/curationLabelSet")
3619
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3620
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3621

3622
        // Superuser-only:
3623
        AuthenticatedUser user;
3624
        try {
3625
            user = getRequestAuthenticatedUserOrDie(crc);
×
3626
        } catch (WrappedResponse ex) {
×
3627
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3628
        }
×
3629
        if (!user.isSuperuser()) {
×
UNCOV
3630
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3631
        }
3632

3633
        Dataset dataset;
3634

3635
        try {
3636
            dataset = findDatasetOrDie(dvIdtf);
×
3637
        } catch (WrappedResponse ex) {
×
3638
            return ex.getResponse();
×
UNCOV
3639
        }
×
3640

3641
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3642
        datasetService.merge(dataset);
×
UNCOV
3643
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3644
    }
3645

3646
    @GET
3647
    @AuthRequired
3648
    @Path("{identifier}/allowedCurationLabels")
3649
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3650
                                             @PathParam("identifier") String dvIdtf,
3651
                                             @Context UriInfo uriInfo,
3652
                                             @Context HttpHeaders headers) throws WrappedResponse {
UNCOV
3653
        AuthenticatedUser user = null;
×
3654
        try {
3655
            user = getRequestAuthenticatedUserOrDie(crc);
×
3656
        } catch (WrappedResponse wr) {
×
3657
            return wr.getResponse();
×
UNCOV
3658
        }
×
3659

3660
        Dataset dataset;
3661

3662
        try {
3663
            dataset = findDatasetOrDie(dvIdtf);
×
3664
        } catch (WrappedResponse ex) {
×
3665
            return ex.getResponse();
×
3666
        }
×
3667
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3668
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
UNCOV
3669
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3670
        } else {
UNCOV
3671
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3672
        }
3673
    }
3674

3675
    @GET
3676
    @AuthRequired
3677
    @Path("{identifier}/timestamps")
3678
    @Produces(MediaType.APPLICATION_JSON)
3679
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3680

3681
        Dataset dataset = null;
×
UNCOV
3682
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3683
        try {
3684
            dataset = findDatasetOrDie(id);
×
3685
            User u = getRequestUser(crc);
×
3686
            Set<Permission> perms = new HashSet<Permission>();
×
3687
            perms.add(Permission.ViewUnpublishedDataset);
×
3688
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3689
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3690
            logger.fine("CSD: " + canSeeDraft);
×
3691
            logger.fine("IT: " + dataset.getIndexTime());
×
3692
            logger.fine("MT: " + dataset.getModificationTime());
×
3693
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
UNCOV
3694
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3695
            // Basic info if it's released
3696
            if (dataset.isReleased() || canSeeDraft) {
×
3697
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
3698
                if (dataset.getPublicationDate() != null) {
×
UNCOV
3699
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3700
                }
3701

3702
                if (dataset.getLastExportTime() != null) {
×
3703
                    timestamps.add("lastMetadataExportTime",
×
UNCOV
3704
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3705

3706
                }
3707

3708
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
3709
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
UNCOV
3710
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3711
                }
3712
                // If the modification/permissionmodification time is
3713
                // set and the index time is null or is before the mod time, the relevant index is stale
3714
                timestamps.add("hasStaleIndex",
×
3715
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
3716
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3717
                                : false);
×
3718
                timestamps.add("hasStalePermissionIndex",
×
3719
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3720
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
UNCOV
3721
                                : false);
×
3722
            }
3723
            // More detail if you can see a draft
3724
            if (canSeeDraft) {
×
3725
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
3726
                if (dataset.getIndexTime() != null) {
×
UNCOV
3727
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3728
                }
3729
                if (dataset.getPermissionModificationTime() != null) {
×
3730
                    timestamps.add("lastPermissionUpdateTime",
×
UNCOV
3731
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3732
                }
3733
                if (dataset.getPermissionIndexTime() != null) {
×
3734
                    timestamps.add("lastPermissionIndexTime",
×
UNCOV
3735
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3736
                }
3737
                if (dataset.getGlobalIdCreateTime() != null) {
×
3738
                    timestamps.add("globalIdCreateTime", formatter
×
UNCOV
3739
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3740
                }
3741

3742
            }
3743
            return ok(timestamps);
×
3744
        } catch (WrappedResponse wr) {
×
UNCOV
3745
            return wr.getResponse();
×
3746
        }
3747
    }
3748

3749

3750
/****************************
3751
 * Globus Support Section:
3752
 * 
3753
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3754
 * app is launched and makes a callback to the
3755
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3756
 * needed to set up it's display.
3757
 * 
3758
 * At some point after that, the user will make a selection as to which files to
3759
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3760
 * to indicate a transfer is about to start. In addition to providing the
3761
 * details of where to transfer the files to/from, Dataverse also grants the
3762
 * Globus principal involved the relevant rw or r permission for the dataset.
3763
 * 
3764
 * Once the transfer is started, the app records the task id and sends it to
3765
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3766
 * monitors the transfer task and when it ultimately succeeds for fails it
3767
 * revokes the principal's permission and, for the transfer in case, adds the
3768
 * files to the dataset. (The dataset is locked until the transfer completes.)
3769
 * 
3770
 * (If no transfer is started within a specified timeout, permissions will
3771
 * automatically be revoked - see the GlobusServiceBean for details.)
3772
 *
3773
 * The option to reference a file at a remote endpoint (rather than transfer it)
3774
 * follows the first two steps of the process above but completes with a call to
3775
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3776
 * files can be added to the dataset immediately.)
3777
 */
3778

3779
    /**
3780
     * Retrieve the parameters and signed URLs required to perform a globus
3781
     * transfer. This api endpoint is expected to be called as a signed callback
3782
     * after the globus-dataverse app/other app is launched, but it will accept
3783
     * other forms of authentication.
3784
     * 
3785
     * @param crc
3786
     * @param datasetId
3787
     */
3788
    @GET
3789
    @AuthRequired
3790
    @Path("{id}/globusUploadParameters")
3791
    @Produces(MediaType.APPLICATION_JSON)
3792
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3793
            @QueryParam(value = "locale") String locale) {
3794
        // -------------------------------------
3795
        // (1) Get the user from the ContainerRequestContext
3796
        // -------------------------------------
3797
        AuthenticatedUser authUser;
3798
        try {
3799
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3800
        } catch (WrappedResponse e) {
×
3801
            return e.getResponse();
×
UNCOV
3802
        }
×
3803
        // -------------------------------------
3804
        // (2) Get the Dataset Id
3805
        // -------------------------------------
3806
        Dataset dataset;
3807

3808
        try {
3809
            dataset = findDatasetOrDie(datasetId);
×
3810
        } catch (WrappedResponse wr) {
×
3811
            return wr.getResponse();
×
3812
        }
×
UNCOV
3813
        String storeId = dataset.getEffectiveStorageDriverId();
×
3814
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3815
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3816
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
UNCOV
3817
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3818
        }
3819

UNCOV
3820
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3821

3822
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3823
        String transferEndpoint = null;
×
3824
        JsonArray referenceEndpointsWithPaths = null;
×
3825
        if (managed) {
×
UNCOV
3826
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3827
        } else {
UNCOV
3828
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3829
        }
3830

3831
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3832
        queryParams.add("queryParameters",
×
3833
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3834
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3835
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3836
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3837
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3838
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3839
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3840
        substitutedParams.keySet().forEach((key) -> {
×
3841
            params.add(key, substitutedParams.get(key));
×
3842
        });
×
3843
        params.add("managed", Boolean.toString(managed));
×
3844
        if (managed) {
×
3845
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
3846
            if (maxSize != null) {
×
UNCOV
3847
                params.add("fileSizeLimit", maxSize);
×
3848
            }
3849
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
3850
            if (limit != null) {
×
UNCOV
3851
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3852
            }
3853
        }
3854
        if (transferEndpoint != null) {
×
UNCOV
3855
            params.add("endpoint", transferEndpoint);
×
3856
        } else {
UNCOV
3857
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3858
        }
3859
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3860
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3861
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3862
        allowedApiCalls.add(
×
3863
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3864
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3865
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3866
        if(managed) {
×
3867
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3868
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3869
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
UNCOV
3870
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3871
        } else {
3872
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3873
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3874
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
UNCOV
3875
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3876
        }
3877
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3878
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3879
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3880
                .add(URLTokenUtil.TIMEOUT, 5));
×
3881
        allowedApiCalls.add(
×
3882
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3883
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
UNCOV
3884
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3885

UNCOV
3886
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3887
    }
3888

3889
    /**
3890
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3891
     * 
3892
     * @param crc
3893
     * @param datasetId
3894
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3895
     * @return
3896
     * @throws IOException
3897
     * @throws ExecutionException
3898
     * @throws InterruptedException
3899
     */
3900
    @POST
3901
    @AuthRequired
3902
    @Path("{id}/requestGlobusUploadPaths")
3903
    @Consumes(MediaType.APPLICATION_JSON)
3904
    @Produces(MediaType.APPLICATION_JSON)
3905
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3906
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3907

UNCOV
3908
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3909

3910
        if (!systemConfig.isGlobusUpload()) {
×
3911
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
NEW
3912
                    BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
3913
        }
3914

3915
        // -------------------------------------
3916
        // (1) Get the user from the ContainerRequestContext
3917
        // -------------------------------------
3918
        AuthenticatedUser authUser;
3919
        try {
3920
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3921
        } catch (WrappedResponse e) {
×
3922
            return e.getResponse();
×
UNCOV
3923
        }
×
3924

3925
        // -------------------------------------
3926
        // (2) Get the Dataset Id
3927
        // -------------------------------------
3928
        Dataset dataset;
3929

3930
        try {
3931
            dataset = findDatasetOrDie(datasetId);
×
3932
        } catch (WrappedResponse wr) {
×
3933
            return wr.getResponse();
×
3934
        }
×
3935
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
UNCOV
3936
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
3937

3938
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
UNCOV
3939
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3940
                try {
3941
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
3942
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
UNCOV
3943
                        return badRequest("No referencedFiles specified");
×
3944
                    }
3945
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
3946
                    return (ok(fileMap));
×
3947
                } catch (Exception e) {
×
UNCOV
3948
                    return badRequest(e.getLocalizedMessage());
×
3949
                }
3950
            } else {
3951
                try {
3952
                    String principal = params.getString("principal");
×
3953
                    int numberOfPaths = params.getInt("numberOfFiles");
×
3954
                    if (numberOfPaths <= 0) {
×
UNCOV
3955
                        return badRequest("numberOfFiles must be positive");
×
3956
                    }
3957

3958
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
UNCOV
3959
                    switch (response.getInt("status")) {
×
3960
                    case 201:
UNCOV
3961
                        return ok(response.getJsonObject("paths"));
×
3962
                    case 400:
UNCOV
3963
                        return badRequest("Unable to grant permission");
×
3964
                    case 409:
UNCOV
3965
                        return conflict("Permission already exists");
×
3966
                    default:
UNCOV
3967
                        return error(null, "Unexpected error when granting permission");
×
3968
                    }
3969

3970
                } catch (NullPointerException | ClassCastException e) {
×
UNCOV
3971
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
3972

3973
                }
3974
            }
3975
        } else {
UNCOV
3976
            return forbidden("User doesn't have permission to upload to this dataset");
×
3977
        }
3978

3979
    }
3980

3981
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
3982
     * 
3983
     * @param crc
3984
     * @param datasetId
3985
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
3986
     * @param uriInfo
3987
     * @return
3988
     * @throws IOException
3989
     * @throws ExecutionException
3990
     * @throws InterruptedException
3991
     */
3992
    @POST
3993
    @AuthRequired
3994
    @Path("{id}/addGlobusFiles")
3995
    @Consumes(MediaType.MULTIPART_FORM_DATA)
3996
    @Produces("application/json")
3997
    @Operation(summary = "Uploads a Globus file for a dataset", 
3998
               description = "Uploads a Globus file for a dataset")
3999
    @APIResponse(responseCode = "200",
4000
               description = "Globus file uploaded successfully to dataset")
4001
    @Tag(name = "addGlobusFilesToDataset", 
4002
         description = "Uploads a Globus file for a dataset")
4003
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4004
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
4005
                                            @PathParam("id") String datasetId,
4006
                                            @FormDataParam("jsonData") String jsonData,
4007
                                            @Context UriInfo uriInfo
4008
    ) throws IOException, ExecutionException, InterruptedException {
4009

UNCOV
4010
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
4011

4012
        // -------------------------------------
4013
        // (1) Get the user from the API key
4014
        // -------------------------------------
4015
        AuthenticatedUser authUser;
4016
        try {
4017
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4018
        } catch (WrappedResponse ex) {
×
UNCOV
4019
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
4020
            );
UNCOV
4021
        }
×
4022

4023
        // -------------------------------------
4024
        // (2) Get the Dataset Id
4025
        // -------------------------------------
4026
        Dataset dataset;
4027

4028
        try {
4029
            dataset = findDatasetOrDie(datasetId);
×
4030
        } catch (WrappedResponse wr) {
×
4031
            return wr.getResponse();
×
UNCOV
4032
        }
×
4033
        
4034
        // Is Globus upload service available? 
4035
        
4036
        // ... on this Dataverse instance?
NEW
4037
        if (!systemConfig.isGlobusUpload()) {
×
NEW
4038
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
4039
        }
4040

4041
        // ... and on this specific Dataset? 
NEW
4042
        String storeId = dataset.getEffectiveStorageDriverId();
×
4043
        // acceptsGlobusTransfers should only be true for an S3 or globus store
NEW
4044
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
NEW
4045
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
NEW
4046
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
4047
        }
4048
        
4049
        // Check if the dataset is already locked
4050
        // We are reusing the code and logic used by various command to determine 
4051
        // if there are any locks on the dataset that would prevent the current 
4052
        // users from modifying it:
4053
        try {
NEW
4054
            DataverseRequest dataverseRequest = createDataverseRequest(authUser);
×
NEW
4055
            permissionService.checkEditDatasetLock(dataset, dataverseRequest, new UpdateDatasetVersionCommand(dataset, dataverseRequest));
×
NEW
4056
        } catch (IllegalCommandException icex) {
×
NEW
4057
            return error(Response.Status.FORBIDDEN, "Dataset " + datasetId + " is locked: " + icex.getLocalizedMessage());
×
NEW
4058
        }
×
4059
        
4060
        JsonObject jsonObject = null;
×
4061
        try {
4062
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
4063
        } catch (Exception ex) {
×
4064
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
UNCOV
4065
            return badRequest("Error parsing json body");
×
4066

UNCOV
4067
        }
×
4068

4069
        //------------------------------------
4070
        // (2b) Make sure dataset does not have package file
4071
        // --------------------------------------
4072

4073
        for (DatasetVersion dv : dataset.getVersions()) {
×
4074
            if (dv.isHasPackageFile()) {
×
UNCOV
4075
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4076
                );
4077
            }
UNCOV
4078
        }
×
4079

4080

4081
        String lockInfoMessage = "Globus Upload API started ";
×
4082
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
4083
                (authUser).getId(), lockInfoMessage);
×
4084
        if (lock != null) {
×
UNCOV
4085
            dataset.addLock(lock);
×
4086
        } else {
UNCOV
4087
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
4088
        }
4089

4090
        if(uriInfo != null) {
×
UNCOV
4091
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
4092
        }
4093

4094
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
4095
        
4096
        // Async Call
4097
        try {
NEW
4098
            globusService.globusUpload(jsonObject, dataset, requestUrl, authUser);
×
NEW
4099
        } catch (IllegalArgumentException ex) {
×
NEW
4100
            return badRequest("Invalid parameters: "+ex.getMessage());
×
NEW
4101
        }
×
4102

UNCOV
4103
        return ok("Async call to Globus Upload started ");
×
4104

4105
    }
4106
    
4107
/**
4108
 * Retrieve the parameters and signed URLs required to perform a globus
4109
 * transfer/download. This api endpoint is expected to be called as a signed
4110
 * callback after the globus-dataverse app/other app is launched, but it will
4111
 * accept other forms of authentication.
4112
 * 
4113
 * @param crc
4114
 * @param datasetId
4115
 * @param locale
4116
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
4117
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
4118
 */
4119
    @GET
4120
    @AuthRequired
4121
    @Path("{id}/globusDownloadParameters")
4122
    @Produces(MediaType.APPLICATION_JSON)
4123
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4124
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
4125
        // -------------------------------------
4126
        // (1) Get the user from the ContainerRequestContext
4127
        // -------------------------------------
UNCOV
4128
        AuthenticatedUser authUser = null;
×
4129
        try {
4130
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4131
        } catch (WrappedResponse e) {
×
4132
            logger.fine("guest user globus download");
×
UNCOV
4133
        }
×
4134
        // -------------------------------------
4135
        // (2) Get the Dataset Id
4136
        // -------------------------------------
4137
        Dataset dataset;
4138

4139
        try {
4140
            dataset = findDatasetOrDie(datasetId);
×
4141
        } catch (WrappedResponse wr) {
×
4142
            return wr.getResponse();
×
4143
        }
×
UNCOV
4144
        String storeId = dataset.getEffectiveStorageDriverId();
×
4145
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4146
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4147
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
UNCOV
4148
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4149
        }
4150

4151
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
4152
        if (files == null) {
×
UNCOV
4153
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4154
        }
4155

UNCOV
4156
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
4157

4158
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
UNCOV
4159
        String transferEndpoint = null;
×
4160

4161
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
4162
        queryParams.add("queryParameters",
×
4163
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
4164
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
4165
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
4166
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
4167
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
4168
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
4169
        JsonObjectBuilder params = Json.createObjectBuilder();
×
4170
        substitutedParams.keySet().forEach((key) -> {
×
4171
            params.add(key, substitutedParams.get(key));
×
4172
        });
×
4173
        params.add("managed", Boolean.toString(managed));
×
4174
        if (managed) {
×
4175
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
UNCOV
4176
            params.add("endpoint", transferEndpoint);
×
4177
        }
4178
        params.add("files", files);
×
4179
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
4180
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
4181
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
4182
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4183
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
4184
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4185
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
4186
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
UNCOV
4187
                .add(URLTokenUtil.URL_TEMPLATE,
×
4188
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
4189
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4190
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
4191
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
4192
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
4193
                .add(URLTokenUtil.TIMEOUT, 5));
×
4194
        allowedApiCalls.add(
×
4195
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
4196
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
UNCOV
4197
                        .add(URLTokenUtil.TIMEOUT, 5));
×
4198

UNCOV
4199
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
4200
    }
4201

4202
    /**
4203
     * Requests permissions for a given globus user to download the specified files
4204
     * the dataset and returns information about the paths to transfer from.
4205
     * 
4206
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
4207
     * 
4208
     * @param crc
4209
     * @param datasetId
4210
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
4211
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
4212
     * @throws IOException
4213
     * @throws ExecutionException
4214
     * @throws InterruptedException
4215
     */
4216
    @POST
4217
    @AuthRequired
4218
    @Path("{id}/requestGlobusDownload")
4219
    @Consumes(MediaType.APPLICATION_JSON)
4220
    @Produces(MediaType.APPLICATION_JSON)
4221
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4222
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
4223
            throws IOException, ExecutionException, InterruptedException {
4224

UNCOV
4225
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
4226

4227
        if (!systemConfig.isGlobusDownload()) {
×
4228
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
UNCOV
4229
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4230
        }
4231

4232
        // -------------------------------------
4233
        // (1) Get the user from the ContainerRequestContext
4234
        // -------------------------------------
UNCOV
4235
        User user = getRequestUser(crc);
×
4236

4237
        // -------------------------------------
4238
        // (2) Get the Dataset Id
4239
        // -------------------------------------
4240
        Dataset dataset;
4241

4242
        try {
4243
            dataset = findDatasetOrDie(datasetId);
×
4244
        } catch (WrappedResponse wr) {
×
4245
            return wr.getResponse();
×
4246
        }
×
4247
        JsonObject body = null;
×
4248
        if (jsonBody != null) {
×
UNCOV
4249
            body = JsonUtil.getJsonObject(jsonBody);
×
4250
        }
4251
        Set<String> fileIds = null;
×
4252
        if (downloadId != null) {
×
4253
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
4254
            if (files != null) {
×
UNCOV
4255
                fileIds = files.keySet();
×
4256
            }
4257
        } else {
×
4258
            if ((body!=null) && body.containsKey("fileIds")) {
×
4259
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
4260
                fileIds = new HashSet<String>(fileVals.size());
×
4261
                for (JsonValue fileVal : fileVals) {
×
4262
                    String id = null;
×
UNCOV
4263
                    switch (fileVal.getValueType()) {
×
4264
                    case STRING:
4265
                        id = ((JsonString) fileVal).getString();
×
UNCOV
4266
                        break;
×
4267
                    case NUMBER:
4268
                        id = ((JsonNumber) fileVal).toString();
×
UNCOV
4269
                        break;
×
4270
                    default:
UNCOV
4271
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
4272
                    }
4273
                    ;
4274
                    fileIds.add(id);
×
4275
                }
×
4276
            } else {
×
UNCOV
4277
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
4278
            }
4279
        }
4280

4281
        if (fileIds.isEmpty()) {
×
UNCOV
4282
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4283
        }
4284
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
4285
        for (String id : fileIds) {
×
4286
            boolean published = false;
×
UNCOV
4287
            logger.info("File id: " + id);
×
4288

UNCOV
4289
            DataFile df = null;
×
4290
            try {
4291
                df = findDataFileOrDie(id);
×
4292
            } catch (WrappedResponse wr) {
×
4293
                return wr.getResponse();
×
4294
            }
×
4295
            if (!df.getOwner().equals(dataset)) {
×
UNCOV
4296
                return badRequest("All files must be in the dataset");
×
4297
            }
UNCOV
4298
            dataFiles.add(df);
×
4299

4300
            for (FileMetadata fm : df.getFileMetadatas()) {
×
4301
                if (fm.getDatasetVersion().isPublished()) {
×
4302
                    published = true;
×
UNCOV
4303
                    break;
×
4304
                }
UNCOV
4305
            }
×
4306

UNCOV
4307
            if (!published) {
×
4308
                // If the file is not published, they can still download the file, if the user
4309
                // has the permission to view unpublished versions:
4310

4311
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
4312
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
UNCOV
4313
                    return forbidden("User doesn't have permission to download file: " + id);
×
4314
                }
4315
            } else { // published and restricted and/or embargoed
UNCOV
4316
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
4317
                    // This line also handles all three authenticated session user, token user, and
4318
                    // guest cases.
4319
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
UNCOV
4320
                        return forbidden("User doesn't have permission to download file: " + id);
×
4321
                    }
4322

4323
            }
UNCOV
4324
        }
×
4325
        // Allowed to download all requested files
4326
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
UNCOV
4327
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4328
            // If managed, give the principal read permissions
4329
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
UNCOV
4330
            switch (status) {
×
4331
            case 201:
UNCOV
4332
                return ok(files);
×
4333
            case 400:
UNCOV
4334
                return badRequest("Unable to grant permission");
×
4335
            case 409:
UNCOV
4336
                return conflict("Permission already exists");
×
4337
            default:
UNCOV
4338
                return error(null, "Unexpected error when granting permission");
×
4339
            }
4340

4341
        }
4342

UNCOV
4343
        return ok(files);
×
4344
    }
4345

4346
    /**
4347
     * Monitors a globus download and removes permissions on the dir/dataset when
4348
     * the specified transfer task is completed.
4349
     * 
4350
     * @param crc
4351
     * @param datasetId
4352
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
4353
     *                  Globus task to monitor.
4354
     * @return
4355
     * @throws IOException
4356
     * @throws ExecutionException
4357
     * @throws InterruptedException
4358
     */
4359
    @POST
4360
    @AuthRequired
4361
    @Path("{id}/monitorGlobusDownload")
4362
    @Consumes(MediaType.APPLICATION_JSON)
4363
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4364
            String jsonData) throws IOException, ExecutionException, InterruptedException {
4365

UNCOV
4366
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
4367

4368
        if (!systemConfig.isGlobusDownload()) {
×
4369
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
UNCOV
4370
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4371
        }
4372

4373
        // -------------------------------------
4374
        // (1) Get the user from the ContainerRequestContext
4375
        // -------------------------------------
4376
        User authUser;
UNCOV
4377
        authUser = getRequestUser(crc);
×
4378

4379
        // -------------------------------------
4380
        // (2) Get the Dataset Id
4381
        // -------------------------------------
4382
        Dataset dataset;
4383

4384
        try {
4385
            dataset = findDatasetOrDie(datasetId);
×
4386
        } catch (WrappedResponse wr) {
×
4387
            return wr.getResponse();
×
UNCOV
4388
        }
×
4389

4390
        // Async Call
UNCOV
4391
        globusService.globusDownload(jsonData, dataset, authUser);
×
4392

UNCOV
4393
        return ok("Async call to Globus Download started");
×
4394

4395
    }
4396

4397
    /**
4398
     * Add multiple Files to an existing Dataset
4399
     *
4400
     * @param idSupplied
4401
     * @param jsonData
4402
     * @return
4403
     */
4404
    @POST
4405
    @AuthRequired
4406
    @Path("{id}/addFiles")
4407
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4408
    @Produces("application/json")
4409
    @Operation(summary = "Uploads a set of files to a dataset", 
4410
               description = "Uploads a set of files to a dataset")
4411
    @APIResponse(responseCode = "200",
4412
               description = "Files uploaded successfully to dataset")
4413
    @Tag(name = "addFilesToDataset", 
4414
         description = "Uploads a set of files to a dataset")
4415
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4416
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4417
            @FormDataParam("jsonData") String jsonData) {
4418

4419
        if (!systemConfig.isHTTPUpload()) {
×
UNCOV
4420
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4421
        }
4422

4423
        // -------------------------------------
4424
        // (1) Get the user from the ContainerRequestContext
4425
        // -------------------------------------
4426
        User authUser;
UNCOV
4427
        authUser = getRequestUser(crc);
×
4428

4429
        // -------------------------------------
4430
        // (2) Get the Dataset Id
4431
        // -------------------------------------
4432
        Dataset dataset;
4433

4434
        try {
4435
            dataset = findDatasetOrDie(idSupplied);
×
4436
        } catch (WrappedResponse wr) {
×
4437
            return wr.getResponse();
×
UNCOV
4438
        }
×
4439

4440
        dataset.getLocks().forEach(dl -> {
×
4441
            logger.info(dl.toString());
×
UNCOV
4442
        });
×
4443

4444
        //------------------------------------
4445
        // (2a) Make sure dataset does not have package file
4446
        // --------------------------------------
4447

4448
        for (DatasetVersion dv : dataset.getVersions()) {
×
4449
            if (dv.isHasPackageFile()) {
×
4450
                return error(Response.Status.FORBIDDEN,
×
UNCOV
4451
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4452
                );
4453
            }
UNCOV
4454
        }
×
4455

UNCOV
4456
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4457

UNCOV
4458
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4459
                dvRequest,
4460
                this.ingestService,
4461
                this.datasetService,
4462
                this.fileService,
4463
                this.permissionSvc,
4464
                this.commandEngine,
4465
                this.systemConfig
4466
        );
4467

UNCOV
4468
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4469

4470
    }
4471

4472
    /**
4473
     * Replace multiple Files to an existing Dataset
4474
     *
4475
     * @param idSupplied
4476
     * @param jsonData
4477
     * @return
4478
     */
4479
    @POST
4480
    @AuthRequired
4481
    @Path("{id}/replaceFiles")
4482
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4483
    @Produces("application/json")
4484
    @Operation(summary = "Replace a set of files to a dataset", 
4485
               description = "Replace a set of files to a dataset")
4486
    @APIResponse(responseCode = "200",
4487
               description = "Files replaced successfully to dataset")
4488
    @Tag(name = "replaceFilesInDataset", 
4489
         description = "Replace a set of files to a dataset")
4490
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) 
4491
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4492
                                          @PathParam("id") String idSupplied,
4493
                                          @FormDataParam("jsonData") String jsonData) {
4494

4495
        if (!systemConfig.isHTTPUpload()) {
×
UNCOV
4496
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4497
        }
4498

4499
        // -------------------------------------
4500
        // (1) Get the user from the ContainerRequestContext
4501
        // -------------------------------------
4502
        User authUser;
UNCOV
4503
        authUser = getRequestUser(crc);
×
4504

4505
        // -------------------------------------
4506
        // (2) Get the Dataset Id
4507
        // -------------------------------------
4508
        Dataset dataset;
4509

4510
        try {
4511
            dataset = findDatasetOrDie(idSupplied);
×
4512
        } catch (WrappedResponse wr) {
×
4513
            return wr.getResponse();
×
UNCOV
4514
        }
×
4515

4516
        dataset.getLocks().forEach(dl -> {
×
4517
            logger.info(dl.toString());
×
UNCOV
4518
        });
×
4519

4520
        //------------------------------------
4521
        // (2a) Make sure dataset does not have package file
4522
        // --------------------------------------
4523

4524
        for (DatasetVersion dv : dataset.getVersions()) {
×
4525
            if (dv.isHasPackageFile()) {
×
4526
                return error(Response.Status.FORBIDDEN,
×
UNCOV
4527
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4528
                );
4529
            }
UNCOV
4530
        }
×
4531

UNCOV
4532
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4533

UNCOV
4534
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4535
                dvRequest,
4536
                this.ingestService,
4537
                this.datasetService,
4538
                this.fileService,
4539
                this.permissionSvc,
4540
                this.commandEngine,
4541
                this.systemConfig
4542
        );
4543

UNCOV
4544
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4545

4546
    }
4547

4548
    /**
4549
     * API to find curation assignments and statuses
4550
     *
4551
     * @return
4552
     * @throws WrappedResponse
4553
     */
4554
    @GET
4555
    @AuthRequired
4556
    @Path("/listCurationStates")
4557
    @Produces("text/csv")
4558
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4559

4560
        try {
4561
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
4562
            if (!user.isSuperuser()) {
×
UNCOV
4563
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4564
            }
4565
        } catch (WrappedResponse wr) {
×
4566
            return wr.getResponse();
×
UNCOV
4567
        }
×
4568

4569
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4570
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
4571
        allRoles.forEach(r -> {
×
4572
            if (r.permissions().contains(Permission.PublishDataset))
×
4573
                curationRoles.add(r);
×
4574
        });
×
4575
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4576
        curationRoles.forEach(r -> {
×
4577
            assignees.put(r.getAlias(), null);
×
UNCOV
4578
        });
×
4579

4580
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4581
                BundleUtil.getStringFromBundle("dataset"),
×
4582
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4583
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4584
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4585
                String.join(",", assignees.keySet())));
×
4586
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4587
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4588
            curationRoles.forEach(r -> {
×
4589
                assignees.put(r.getAlias(), new HashSet<String>());
×
4590
            });
×
4591
            for (RoleAssignment ra : ras) {
×
4592
                if (curationRoles.contains(ra.getRole())) {
×
UNCOV
4593
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4594
                }
4595
            }
×
4596
            DatasetVersion dsv = dataset.getLatestVersion();
×
4597
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4598
            String status = dsv.getExternalStatusLabel();
×
4599
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4600
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4601
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4602
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4603
            List<String> sList = new ArrayList<String>();
×
4604
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4605
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4606
        }
×
4607
        csvSB.append("\n");
×
UNCOV
4608
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4609
    }
4610

4611
    // APIs to manage archival status
4612

4613
    @GET
4614
    @AuthRequired
4615
    @Produces(MediaType.APPLICATION_JSON)
4616
    @Path("/{id}/{version}/archivalStatus")
4617
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4618
                                                    @PathParam("id") String datasetId,
4619
                                                    @PathParam("version") String versionNumber,
4620
                                                    @Context UriInfo uriInfo,
4621
                                                    @Context HttpHeaders headers) {
4622

4623
        try {
4624
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4625
            if (!au.isSuperuser()) {
×
UNCOV
4626
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4627
            }
4628
            DataverseRequest req = createDataverseRequest(au);
×
UNCOV
4629
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4630
                    headers);
4631

4632
            if (dsv.getArchivalCopyLocation() == null) {
×
UNCOV
4633
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4634
            } else {
4635
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
UNCOV
4636
                return ok(status);
×
4637
            }
4638
        } catch (WrappedResponse wr) {
×
UNCOV
4639
            return wr.getResponse();
×
4640
        }
4641
    }
4642

4643
    @PUT
4644
    @AuthRequired
4645
    @Consumes(MediaType.APPLICATION_JSON)
4646
    @Path("/{id}/{version}/archivalStatus")
4647
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4648
                                                    @PathParam("id") String datasetId,
4649
                                                    @PathParam("version") String versionNumber,
4650
                                                    String newStatus,
4651
                                                    @Context UriInfo uriInfo,
4652
                                                    @Context HttpHeaders headers) {
4653

UNCOV
4654
        logger.fine(newStatus);
×
4655
        try {
UNCOV
4656
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4657

4658
            if (!au.isSuperuser()) {
×
UNCOV
4659
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4660
            }
4661
            
4662
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
UNCOV
4663
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4664
            
4665
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4666
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
4667
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
UNCOV
4668
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4669

4670
                    DataverseRequest req = createDataverseRequest(au);
×
UNCOV
4671
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4672
                            uriInfo, headers);
4673

4674
                    if (dsv == null) {
×
UNCOV
4675
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4676
                    }
4677
                    if (isSingleVersionArchiving()) {
×
4678
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
4679
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
UNCOV
4680
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4681
                            }
UNCOV
4682
                        }
×
4683
                    }
4684

4685
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
4686
                    dsv = datasetversionService.merge(dsv);
×
4687
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
UNCOV
4688
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4689

UNCOV
4690
                    return ok("Status updated");
×
4691
                }
4692
            }
4693
        } catch (WrappedResponse wr) {
×
4694
            return wr.getResponse();
×
4695
        } catch (JsonException| IllegalStateException ex) {
×
4696
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4697
        }
×
UNCOV
4698
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4699
    }
4700
    
4701
    @DELETE
4702
    @AuthRequired
4703
    @Produces(MediaType.APPLICATION_JSON)
4704
    @Path("/{id}/{version}/archivalStatus")
4705
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4706
                                                       @PathParam("id") String datasetId,
4707
                                                       @PathParam("version") String versionNumber,
4708
                                                       @Context UriInfo uriInfo,
4709
                                                       @Context HttpHeaders headers) {
4710

4711
        try {
4712
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4713
            if (!au.isSuperuser()) {
×
UNCOV
4714
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4715
            }
4716

4717
            DataverseRequest req = createDataverseRequest(au);
×
UNCOV
4718
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4719
                    headers);
4720
            if (dsv == null) {
×
UNCOV
4721
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4722
            }
4723
            dsv.setArchivalCopyLocation(null);
×
UNCOV
4724
            dsv = datasetversionService.merge(dsv);
×
4725

UNCOV
4726
            return ok("Status deleted");
×
4727

4728
        } catch (WrappedResponse wr) {
×
UNCOV
4729
            return wr.getResponse();
×
4730
        }
4731
    }
4732
    
4733
    private boolean isSingleVersionArchiving() {
4734
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
UNCOV
4735
        if (className != null) {
×
4736
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4737
            try {
4738
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
4739
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
4740
            } catch (ClassNotFoundException e) {
×
4741
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4742
            } catch (ClassCastException cce) {
×
4743
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
UNCOV
4744
            }
×
4745
        }
UNCOV
4746
        return false;
×
4747
    }
4748
    
4749
    // This method provides a callback for an external tool to retrieve it's
4750
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4751
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4752
    // will be plain/unsigned.
4753
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4754
    // for a draft dataset), or public case.
4755
    @GET
4756
    @AuthRequired
4757
    @Path("{id}/versions/{version}/toolparams/{tid}")
4758
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4759
                                            @PathParam("tid") long externalToolId,
4760
                                            @PathParam("id") String datasetId,
4761
                                            @PathParam("version") String version,
4762
                                            @QueryParam(value = "locale") String locale) {
4763
        try {
4764
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
4765
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
4766
            if (target == null) {
×
UNCOV
4767
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4768
            }
4769
            
4770
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
4771
            if(externalTool==null) {
×
UNCOV
4772
                return error(BAD_REQUEST, "External tool not found.");
×
4773
            }
4774
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
UNCOV
4775
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4776
            }
4777
            ApiToken apiToken = null;
×
4778
            User u = getRequestUser(crc);
×
UNCOV
4779
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4780

4781
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4782
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
4783
        } catch (WrappedResponse wr) {
×
UNCOV
4784
            return wr.getResponse();
×
4785
        }
4786
    }
4787

4788
    @GET
4789
    @Path("summaryFieldNames")
4790
    public Response getDatasetSummaryFieldNames() {
4791
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
4792
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
4793
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4794
        for (String fieldName : fieldNames) {
×
UNCOV
4795
            fieldNamesArrayBuilder.add(fieldName);
×
4796
        }
UNCOV
4797
        return ok(fieldNamesArrayBuilder);
×
4798
    }
4799

4800
    @GET
4801
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4802
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4803
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4804
        if (privateUrlUser == null) {
×
UNCOV
4805
            return notFound("Private URL user not found");
×
4806
        }
4807
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4808
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4809
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
UNCOV
4810
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4811
        }
4812
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4813
        if (dsv == null || dsv.getId() == null) {
×
UNCOV
4814
            return notFound("Dataset version not found");
×
4815
        }
4816
        JsonObjectBuilder responseJson;
4817
        if (isAnonymizedAccess) {
×
4818
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
4819
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4820
        } else {
×
UNCOV
4821
            responseJson = json(dsv, null, true, returnOwners);
×
4822
        }
UNCOV
4823
        return ok(responseJson);
×
4824
    }
4825

4826
    @GET
4827
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4828
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
4829
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4830
        if (privateUrlUser == null) {
×
UNCOV
4831
            return notFound("Private URL user not found");
×
4832
        }
4833
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4834
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
UNCOV
4835
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4836
    }
4837

4838
    @GET
4839
    @AuthRequired
4840
    @Path("{id}/versions/{versionId}/citation")
4841
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4842
                                              @PathParam("id") String datasetId,
4843
                                              @PathParam("versionId") String versionId,
4844
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4845
                                              @Context UriInfo uriInfo,
4846
                                              @Context HttpHeaders headers) {
4847
        boolean checkFilePerms = false;
×
4848
        return response(req -> ok(
×
4849
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers,
×
4850
                        includeDeaccessioned, checkFilePerms).getCitation(true, false)),
×
UNCOV
4851
                getRequestUser(crc));
×
4852
    }
4853

4854
    @POST
4855
    @AuthRequired
4856
    @Path("{id}/versions/{versionId}/deaccession")
4857
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
4858
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
UNCOV
4859
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4860
        }
4861
        return response(req -> {
×
UNCOV
4862
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4863
            try {
4864
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4865
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
4866
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
UNCOV
4867
                if (deaccessionForwardURL != null) {
×
4868
                    try {
4869
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4870
                    } catch (IllegalArgumentException iae) {
×
4871
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
UNCOV
4872
                    }
×
4873
                }
UNCOV
4874
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4875
                
4876
                return ok("Dataset " + 
×
UNCOV
4877
                        (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + 
×
4878
                        " deaccessioned for version " + versionId);
4879
            } catch (JsonParsingException jpe) {
×
UNCOV
4880
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4881
            }
UNCOV
4882
        }, getRequestUser(crc));
×
4883
    }
4884

4885
    @GET
4886
    @AuthRequired
4887
    @Path("{identifier}/guestbookEntryAtRequest")
4888
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4889
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4890

4891
        Dataset dataset;
4892

4893
        try {
4894
            dataset = findDatasetOrDie(dvIdtf);
×
4895
        } catch (WrappedResponse ex) {
×
4896
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4897
        }
×
4898
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
4899
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
UNCOV
4900
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4901
        }
UNCOV
4902
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
4903
    }
4904

4905
    @PUT
4906
    @AuthRequired
4907
    @Path("{identifier}/guestbookEntryAtRequest")
4908
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4909
                                               boolean gbAtRequest,
4910
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4911

4912
        // Superuser-only:
4913
        AuthenticatedUser user;
4914
        try {
4915
            user = getRequestAuthenticatedUserOrDie(crc);
×
4916
        } catch (WrappedResponse ex) {
×
4917
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4918
        }
×
4919
        if (!user.isSuperuser()) {
×
UNCOV
4920
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4921
        }
4922

4923
        Dataset dataset;
4924

4925
        try {
4926
            dataset = findDatasetOrDie(dvIdtf);
×
4927
        } catch (WrappedResponse ex) {
×
4928
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4929
        }
×
4930
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
4931
        if (!gbAtRequestOpt.isPresent()) {
×
UNCOV
4932
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
4933
        }
4934
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
4935
        dataset.setGuestbookEntryAtRequest(choice);
×
4936
        datasetService.merge(dataset);
×
UNCOV
4937
        return ok("Guestbook Entry At Request set to: " + choice);
×
4938
    }
4939

4940
    @DELETE
4941
    @AuthRequired
4942
    @Path("{identifier}/guestbookEntryAtRequest")
4943
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4944
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4945

4946
        // Superuser-only:
4947
        AuthenticatedUser user;
4948
        try {
4949
            user = getRequestAuthenticatedUserOrDie(crc);
×
4950
        } catch (WrappedResponse ex) {
×
4951
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4952
        }
×
4953
        if (!user.isSuperuser()) {
×
UNCOV
4954
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4955
        }
4956

4957
        Dataset dataset;
4958

4959
        try {
4960
            dataset = findDatasetOrDie(dvIdtf);
×
4961
        } catch (WrappedResponse ex) {
×
4962
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
UNCOV
4963
        }
×
4964

4965
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
4966
        datasetService.merge(dataset);
×
UNCOV
4967
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4968
    }
4969

4970
    @GET
4971
    @AuthRequired
4972
    @Path("{id}/userPermissions")
4973
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
4974
        Dataset dataset;
4975
        try {
4976
            dataset = findDatasetOrDie(datasetId);
×
4977
        } catch (WrappedResponse wr) {
×
4978
            return wr.getResponse();
×
4979
        }
×
4980
        User requestUser = getRequestUser(crc);
×
4981
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
4982
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
4983
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
4984
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
4985
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
4986
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
UNCOV
4987
        return ok(jsonObjectBuilder);
×
4988
    }
4989

4990
    @GET
4991
    @AuthRequired
4992
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
4993
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
4994
                                                 @PathParam("id") String datasetId,
4995
                                                 @PathParam("versionId") String versionId,
4996
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4997
                                                 @Context UriInfo uriInfo,
4998
                                                 @Context HttpHeaders headers) {
4999
        return response(req -> {
×
5000
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
5001
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
UNCOV
5002
        }, getRequestUser(crc));
×
5003
    }
5004
    
5005
    /**
5006
     * Get the PidProvider that will be used for generating new DOIs in this dataset
5007
     *
5008
     * @return - the id of the effective PID generator for the given dataset
5009
     * @throws WrappedResponse
5010
     */
5011
    @GET
5012
    @AuthRequired
5013
    @Path("{identifier}/pidGenerator")
5014
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5015
            @Context HttpHeaders headers) throws WrappedResponse {
5016

5017
        Dataset dataset;
5018

5019
        try {
5020
            dataset = findDatasetOrDie(dvIdtf);
×
5021
        } catch (WrappedResponse ex) {
×
5022
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5023
        }
×
5024
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
UNCOV
5025
        if(pidProvider == null) {
×
5026
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
UNCOV
5027
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
5028
        }
5029
        String pidGeneratorId = pidProvider.getId();
×
UNCOV
5030
        return ok(pidGeneratorId);
×
5031
    }
5032

5033
    @PUT
5034
    @AuthRequired
5035
    @Path("{identifier}/pidGenerator")
5036
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
5037
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
5038

5039
        // Superuser-only:
5040
        AuthenticatedUser user;
5041
        try {
5042
            user = getRequestAuthenticatedUserOrDie(crc);
×
5043
        } catch (WrappedResponse ex) {
×
5044
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
5045
        }
×
5046
        if (!user.isSuperuser()) {
×
UNCOV
5047
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5048
        }
5049

5050
        Dataset dataset;
5051

5052
        try {
5053
            dataset = findDatasetOrDie(datasetId);
×
5054
        } catch (WrappedResponse ex) {
×
5055
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5056
        }
×
5057
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
5058
            dataset.setPidGeneratorId(generatorId);
×
5059
            datasetService.merge(dataset);
×
UNCOV
5060
            return ok("PID Generator set to: " + generatorId);
×
5061
        } else {
UNCOV
5062
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
5063
        }
5064

5065
    }
5066

5067
    @DELETE
5068
    @AuthRequired
5069
    @Path("{identifier}/pidGenerator")
5070
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5071
            @Context HttpHeaders headers) throws WrappedResponse {
5072

5073
        // Superuser-only:
5074
        AuthenticatedUser user;
5075
        try {
5076
            user = getRequestAuthenticatedUserOrDie(crc);
×
5077
        } catch (WrappedResponse ex) {
×
5078
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5079
        }
×
5080
        if (!user.isSuperuser()) {
×
UNCOV
5081
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5082
        }
5083

5084
        Dataset dataset;
5085

5086
        try {
5087
            dataset = findDatasetOrDie(dvIdtf);
×
5088
        } catch (WrappedResponse ex) {
×
5089
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
UNCOV
5090
        }
×
5091

5092
        dataset.setPidGenerator(null);
×
5093
        datasetService.merge(dataset);
×
UNCOV
5094
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
5095
    }
5096

5097
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc