• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #22693

03 Jul 2024 01:09PM CUT coverage: 20.626% (-0.09%) from 20.716%
#22693

push

github

web-flow
Merge pull request #10664 from IQSS/develop

merge develop into master for 6.3

195 of 1852 new or added lines in 82 files covered. (10.53%)

72 existing lines in 33 files now uncovered.

17335 of 84043 relevant lines covered (20.63%)

0.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.17
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4

5
import edu.harvard.iq.dataverse.*;
6
import edu.harvard.iq.dataverse.DatasetLock.Reason;
7
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
8
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
9
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
10
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
11
import edu.harvard.iq.dataverse.authorization.DataverseRole;
12
import edu.harvard.iq.dataverse.authorization.Permission;
13
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
14
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
15
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
16
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
17
import edu.harvard.iq.dataverse.authorization.users.User;
18
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
19
import edu.harvard.iq.dataverse.dataaccess.*;
20
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
21
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
22
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
23
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
24
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
25
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
26
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
27
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
28
import edu.harvard.iq.dataverse.engine.command.Command;
29
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
30
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
31
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
32
import edu.harvard.iq.dataverse.engine.command.impl.*;
33
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
34
import edu.harvard.iq.dataverse.export.ExportService;
35
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
36
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
37
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
38
import edu.harvard.iq.dataverse.globus.GlobusUtil;
39
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
40
import edu.harvard.iq.dataverse.makedatacount.*;
41
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
42
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
43
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
44
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
46
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
47
import edu.harvard.iq.dataverse.search.IndexServiceBean;
48
import edu.harvard.iq.dataverse.settings.FeatureFlags;
49
import edu.harvard.iq.dataverse.settings.JvmSettings;
50
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
51
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
52
import edu.harvard.iq.dataverse.util.*;
53
import edu.harvard.iq.dataverse.util.bagit.OREMap;
54
import edu.harvard.iq.dataverse.util.json.*;
55
import edu.harvard.iq.dataverse.workflow.Workflow;
56
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
57
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
58
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
59
import jakarta.ejb.EJB;
60
import jakarta.ejb.EJBException;
61
import jakarta.inject.Inject;
62
import jakarta.json.*;
63
import jakarta.json.stream.JsonParsingException;
64
import jakarta.servlet.http.HttpServletRequest;
65
import jakarta.servlet.http.HttpServletResponse;
66
import jakarta.ws.rs.*;
67
import jakarta.ws.rs.container.ContainerRequestContext;
68
import jakarta.ws.rs.core.*;
69
import jakarta.ws.rs.core.Response.Status;
70
import org.apache.commons.lang3.StringUtils;
71
import org.eclipse.microprofile.openapi.annotations.Operation;
72
import org.eclipse.microprofile.openapi.annotations.media.Content;
73
import org.eclipse.microprofile.openapi.annotations.media.Schema;
74
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
75
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
76
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
77
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
78
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
79
import org.glassfish.jersey.media.multipart.FormDataParam;
80

81
import java.io.IOException;
82
import java.io.InputStream;
83
import java.net.URI;
84
import java.sql.Timestamp;
85
import java.text.MessageFormat;
86
import java.text.SimpleDateFormat;
87
import java.time.LocalDate;
88
import java.time.LocalDateTime;
89
import java.time.ZoneId;
90
import java.time.format.DateTimeFormatter;
91
import java.time.format.DateTimeParseException;
92
import java.util.*;
93
import java.util.Map.Entry;
94
import java.util.concurrent.ExecutionException;
95
import java.util.function.Predicate;
96
import java.util.logging.Level;
97
import java.util.logging.Logger;
98
import java.util.regex.Pattern;
99
import java.util.stream.Collectors;
100

101
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
102
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
103
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
104
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
105

106
@Path("datasets")
107
public class Datasets extends AbstractApiBean {
×
108

109
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
110
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
111
    
112
    @Inject DataverseSession session;
113

114
    @EJB
115
    DatasetServiceBean datasetService;
116

117
    @EJB
118
    DataverseServiceBean dataverseService;
119
    
120
    @EJB
121
    GlobusServiceBean globusService;
122

123
    @EJB
124
    UserNotificationServiceBean userNotificationService;
125
    
126
    @EJB
127
    PermissionServiceBean permissionService;
128
    
129
    @EJB
130
    AuthenticationServiceBean authenticationServiceBean;
131
    
132
    @EJB
133
    DDIExportServiceBean ddiExportService;
134

135
    @EJB
136
    MetadataBlockServiceBean metadataBlockService;
137
    
138
    @EJB
139
    DataFileServiceBean fileService;
140

141
    @EJB
142
    IngestServiceBean ingestService;
143

144
    @EJB
145
    EjbDataverseEngine commandEngine;
146
    
147
    @EJB
148
    IndexServiceBean indexService;
149

150
    @EJB
151
    S3PackageImporter s3PackageImporter;
152
     
153
    @EJB
154
    SettingsServiceBean settingsService;
155

156
    // TODO: Move to AbstractApiBean
157
    @EJB
158
    DatasetMetricsServiceBean datasetMetricsSvc;
159
    
160
    @EJB
161
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
162

163
    @EJB
164
    EmbargoServiceBean embargoService;
165

166
    @EJB
167
    RetentionServiceBean retentionService;
168

169
    @Inject
170
    MakeDataCountLoggingServiceBean mdcLogService;
171
    
172
    @Inject
173
    DataverseRequestServiceBean dvRequestService;
174

175
    @Inject
176
    WorkflowServiceBean wfService;
177
    
178
    @Inject
179
    DataverseRoleServiceBean dataverseRoleService;
180

181
    @EJB
182
    DatasetVersionServiceBean datasetversionService;
183

184
    @Inject
185
    PrivateUrlServiceBean privateUrlService;
186

187
    @Inject
188
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
189

190
    /**
191
     * Used to consolidate the way we parse and handle dataset versions.
192
     * @param <T> 
193
     */
194
    public interface DsVersionHandler<T> {
195
        T handleLatest();
196
        T handleDraft();
197
        T handleSpecific( long major, long minor );
198
        T handleLatestPublished();
199
    }
200
    
201
    @GET
202
    @AuthRequired
203
    @Path("{id}")
204
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
205
        return response( req -> {
×
NEW
206
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true)));
×
207
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
208
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
209
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
210
            if((latest != null) && latest.isReleased()) {
×
211
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
212
                mdcLogService.logEntry(entry);
×
213
            }
214
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
215
        }, getRequestUser(crc));
×
216
    }
217
    
218
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
219
    // to obtain the dataset that we are trying to export - which would handle
220
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
221
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
222
    @GET
223
    @Path("/export")
224
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" })
225
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
226

227
        try {
228
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
229
            if (dataset == null) {
×
230
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
231
            }
232
            
233
            ExportService instance = ExportService.getInstance();
×
234
            
235
            InputStream is = instance.getExport(dataset, exporter);
×
236
           
237
            String mediaType = instance.getMediaType(exporter);
×
238
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
239
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
240
            mdcLogService.logEntry(entry);
×
241
            
242
            return Response.ok()
×
243
                    .entity(is)
×
244
                    .type(mediaType).
×
245
                    build();
×
246
        } catch (Exception wr) {
×
247
            logger.warning(wr.getMessage());
×
248
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
249
        }
250
    }
251

252
    @DELETE
253
    @AuthRequired
254
    @Path("{id}")
255
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
256
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
257
        // (and there's a comment that says "TODO: remove this command")
258
        // do we need an exposed API call for it? 
259
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
260
        // if the dataset only has 1 version... In other words, the functionality 
261
        // currently provided by this API is covered between the "deleteDraftVersion" and
262
        // "destroyDataset" API calls.  
263
        // (The logic below follows the current implementation of the underlying 
264
        // commands!)
265

266
        User u = getRequestUser(crc);
×
267
        return response( req -> {
×
268
            Dataset doomed = findDatasetOrDie(id);
×
269
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
270
            boolean destroy = false;
×
271
            
272
            if (doomed.getVersions().size() == 1) {
×
273
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
274
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
275
                }
276
                destroy = true;
×
277
            } else {
278
                if (!doomedVersion.isDraft()) {
×
279
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
280
                }
281
            }
282
            
283
            // Gather the locations of the physical files that will need to be 
284
            // deleted once the destroy command execution has been finalized:
285
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
286
            
287
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
288
            
289
            // If we have gotten this far, the destroy command has succeeded, 
290
            // so we can finalize it by permanently deleting the physical files:
291
            // (DataFileService will double-check that the datafiles no 
292
            // longer exist in the database, before attempting to delete 
293
            // the physical files)
294
            if (!deleteStorageLocations.isEmpty()) {
×
295
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
296
            }
297
            
298
            return ok("Dataset " + id + " deleted");
×
299
        }, u);
300
    }
301
        
302
    @DELETE
303
    @AuthRequired
304
    @Path("{id}/destroy")
305
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
306

307
        User u = getRequestUser(crc);
×
308
        return response(req -> {
×
309
            // first check if dataset is released, and if so, if user is a superuser
310
            Dataset doomed = findDatasetOrDie(id);
×
311

312
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
313
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
314
            }
315

316
            // Gather the locations of the physical files that will need to be 
317
            // deleted once the destroy command execution has been finalized:
318
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
319

320
            execCommand(new DestroyDatasetCommand(doomed, req));
×
321

322
            // If we have gotten this far, the destroy command has succeeded, 
323
            // so we can finalize permanently deleting the physical files:
324
            // (DataFileService will double-check that the datafiles no 
325
            // longer exist in the database, before attempting to delete 
326
            // the physical files)
327
            if (!deleteStorageLocations.isEmpty()) {
×
328
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
329
            }
330

331
            return ok("Dataset " + id + " destroyed");
×
332
        }, u);
333
    }
334
    
335
    @DELETE
336
    @AuthRequired
337
    @Path("{id}/versions/{versionId}")
338
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
339
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
340
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
341
        }
342

343
        return response( req -> {
×
344
            Dataset dataset = findDatasetOrDie(id);
×
345
            DatasetVersion doomed = dataset.getLatestVersion();
×
346
            
347
            if (!doomed.isDraft()) {
×
348
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
349
            }
350
            
351
            // Gather the locations of the physical files that will need to be 
352
            // deleted once the destroy command execution has been finalized:
353
            
354
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
355
            
356
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
357
            
358
            // If we have gotten this far, the delete command has succeeded - 
359
            // by either deleting the Draft version of a published dataset, 
360
            // or destroying an unpublished one. 
361
            // This means we can finalize permanently deleting the physical files:
362
            // (DataFileService will double-check that the datafiles no 
363
            // longer exist in the database, before attempting to delete 
364
            // the physical files)
365
            if (!deleteStorageLocations.isEmpty()) {
×
366
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
367
            }
368
            
369
            return ok("Draft version of dataset " + id + " deleted");
×
370
        }, getRequestUser(crc));
×
371
    }
372
        
373
    @DELETE
374
    @AuthRequired
375
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
376
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
377
                boolean index = true;
×
378
        return response(req -> {
×
379
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
380
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
381
        }, getRequestUser(crc));
×
382
    }
383
        
384
    @PUT
385
    @AuthRequired
386
    @Path("{id}/citationdate")
387
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
388
        return response( req -> {
×
389
            if ( dsfTypeName.trim().isEmpty() ){
×
390
                return badRequest("Please provide a dataset field type in the requst body.");
×
391
            }
392
            DatasetFieldType dsfType = null;
×
393
            if (!":publicationDate".equals(dsfTypeName)) {
×
394
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
395
                if (dsfType == null) {
×
396
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
397
                }
398
            }
399

400
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
401
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
402
        }, getRequestUser(crc));
×
403
    }
404
    
405
    @DELETE
406
    @AuthRequired
407
    @Path("{id}/citationdate")
408
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
409
        return response( req -> {
×
410
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
411
            return ok("Citation Date for dataset " + id + " set to default");
×
412
        }, getRequestUser(crc));
×
413
    }
414
    
415
    @GET
416
    @AuthRequired
417
    @Path("{id}/versions")
418
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
419

420
        return response( req -> {
×
421
            Dataset dataset = findDatasetOrDie(id);
×
422
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
423

424
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
425
                                .stream()
×
426
                                .map( d -> json(d, deepLookup) )
×
427
                                .collect(toJsonArray()));
×
428
        }, getRequestUser(crc));
×
429
    }
430
    
431
    @GET
432
    @AuthRequired
433
    @Path("{id}/versions/{versionId}")
434
    public Response getVersion(@Context ContainerRequestContext crc,
435
                               @PathParam("id") String datasetId,
436
                               @PathParam("versionId") String versionId,
437
                               @QueryParam("excludeFiles") Boolean excludeFiles,
438
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
439
                               @QueryParam("returnOwners") boolean returnOwners,
440
                               @Context UriInfo uriInfo,
441
                               @Context HttpHeaders headers) {
442
        return response( req -> {
×
443
            
444
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
UNCOV
445
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
446
            
NEW
447
            Dataset dataset = findDatasetOrDie(datasetId);
×
NEW
448
            DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, 
×
449
                                                                            versionId, 
450
                                                                            dataset, 
451
                                                                            uriInfo, 
452
                                                                            headers, 
453
                                                                            includeDeaccessioned,
454
                                                                            checkPerms);
455

NEW
456
            if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) {
×
UNCOV
457
                return notFound("Dataset version not found");
×
458
            }
459

460
            if (excludeFiles == null ? true : !excludeFiles) {
×
NEW
461
                requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId());
×
462
            }
463

NEW
464
            JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
×
465
                                                 null, 
NEW
466
                                                 excludeFiles == null ? true : !excludeFiles, 
×
467
                                                 returnOwners);
NEW
468
            return ok(jsonBuilder);
×
469

UNCOV
470
        }, getRequestUser(crc));
×
471
    }
472

473
    @GET
474
    @AuthRequired
475
    @Path("{id}/versions/{versionId}/files")
476
    public Response getVersionFiles(@Context ContainerRequestContext crc,
477
                                    @PathParam("id") String datasetId,
478
                                    @PathParam("versionId") String versionId,
479
                                    @QueryParam("limit") Integer limit,
480
                                    @QueryParam("offset") Integer offset,
481
                                    @QueryParam("contentType") String contentType,
482
                                    @QueryParam("accessStatus") String accessStatus,
483
                                    @QueryParam("categoryName") String categoryName,
484
                                    @QueryParam("tabularTagName") String tabularTagName,
485
                                    @QueryParam("searchText") String searchText,
486
                                    @QueryParam("orderCriteria") String orderCriteria,
487
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
488
                                    @Context UriInfo uriInfo,
489
                                    @Context HttpHeaders headers) {
490
        return response(req -> {
×
NEW
491
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned);
×
492
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
493
            try {
494
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
495
            } catch (IllegalArgumentException e) {
×
496
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
497
            }
×
498
            FileSearchCriteria fileSearchCriteria;
499
            try {
500
                fileSearchCriteria = new FileSearchCriteria(
×
501
                        contentType,
502
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
503
                        categoryName,
504
                        tabularTagName,
505
                        searchText
506
                );
507
            } catch (IllegalArgumentException e) {
×
508
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
509
            }
×
510
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
511
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
512
        }, getRequestUser(crc));
×
513
    }
514

515
    @GET
516
    @AuthRequired
517
    @Path("{id}/versions/{versionId}/files/counts")
518
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
519
                                         @PathParam("id") String datasetId,
520
                                         @PathParam("versionId") String versionId,
521
                                         @QueryParam("contentType") String contentType,
522
                                         @QueryParam("accessStatus") String accessStatus,
523
                                         @QueryParam("categoryName") String categoryName,
524
                                         @QueryParam("tabularTagName") String tabularTagName,
525
                                         @QueryParam("searchText") String searchText,
526
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
527
                                         @Context UriInfo uriInfo,
528
                                         @Context HttpHeaders headers) {
529
        return response(req -> {
×
530
            FileSearchCriteria fileSearchCriteria;
531
            try {
532
                fileSearchCriteria = new FileSearchCriteria(
×
533
                        contentType,
534
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
535
                        categoryName,
536
                        tabularTagName,
537
                        searchText
538
                );
539
            } catch (IllegalArgumentException e) {
×
540
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
541
            }
×
542
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
543
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
544
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
545
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
546
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
547
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
548
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
549
            return ok(jsonObjectBuilder);
×
550
        }, getRequestUser(crc));
×
551
    }
552

553
    @GET
554
    @AuthRequired
555
    @Path("{id}/dirindex")
556
    @Produces("text/html")
557
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
558

559
        folderName = folderName == null ? "" : folderName;
×
560
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
561
        
562
        DatasetVersion version;
563
        try {
564
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
565
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
566
        } catch (WrappedResponse wr) {
×
567
            return wr.getResponse();
×
568
        }
×
569
        
570
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
571
        
572
        // return "NOT FOUND" if there is no such folder in the dataset version:
573
        
574
        if ("".equals(output)) {
×
575
            return notFound("Folder " + folderName + " does not exist");
×
576
        }
577
        
578
        
579
        String indexFileName = folderName.equals("") ? ".index.html"
×
580
                : ".index-" + folderName.replace('/', '_') + ".html";
×
581
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
582

583
        
584
        return Response.ok()
×
585
                .entity(output)
×
586
                //.type("application/html").
587
                .build();
×
588
    }
589
    
590
    @GET
591
    @AuthRequired
592
    @Path("{id}/versions/{versionId}/metadata")
593
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
594
        return response( req -> ok(
×
595
                    jsonByBlocks(
×
596
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
597
                                .getDatasetFields())), getRequestUser(crc));
×
598
    }
599
    
600
    @GET
601
    @AuthRequired
602
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
603
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
604
                                            @PathParam("id") String datasetId,
605
                                            @PathParam("versionNumber") String versionNumber,
606
                                            @PathParam("block") String blockName,
607
                                            @Context UriInfo uriInfo,
608
                                            @Context HttpHeaders headers) {
609
        
610
        return response( req -> {
×
611
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
612
            
613
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
614
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
615
                if ( p.getKey().getName().equals(blockName) ) {
×
616
                    return ok(json(p.getKey(), p.getValue()));
×
617
                }
618
            }
×
619
            return notFound("metadata block named " + blockName + " not found");
×
620
        }, getRequestUser(crc));
×
621
    }
622

623
    /**
624
     * Add Signposting
625
     * @param datasetId
626
     * @param versionId
627
     * @param uriInfo
628
     * @param headers
629
     * @return
630
     */
631
    @GET
632
    @AuthRequired
633
    @Path("{id}/versions/{versionId}/linkset")
634
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
635
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
636
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
637
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
638
        }
639
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
640
        try {
641
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
642
            return Response
×
643
                    .ok(Json.createObjectBuilder()
×
644
                            .add("linkset",
×
645
                                    new SignpostingResources(systemConfig, dsv,
646
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
647
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
648
                                                    .getJsonLinkset())
×
649
                            .build())
×
650
                    .type(MediaType.APPLICATION_JSON).build();
×
651
        } catch (WrappedResponse wr) {
×
652
            return wr.getResponse();
×
653
        }
654
    }
655

656
    @GET
657
    @AuthRequired
658
    @Path("{id}/modifyRegistration")
659
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
660
        return response( req -> {
×
661
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
662
            return ok("Dataset " + id + " target url updated");
×
663
        }, getRequestUser(crc));
×
664
    }
665
    
666
    @POST
667
    @AuthRequired
668
    @Path("/modifyRegistrationAll")
669
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
670
        return response( req -> {
×
671
            datasetService.findAll().forEach( ds -> {
×
672
                try {
673
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
674
                } catch (WrappedResponse ex) {
×
675
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
676
                }
×
677
            });
×
678
            return ok("Update All Dataset target url completed");
×
679
        }, getRequestUser(crc));
×
680
    }
681
    
682
    @POST
683
    @AuthRequired
684
    @Path("{id}/modifyRegistrationMetadata")
685
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
686

687
        try {
688
            Dataset dataset = findDatasetOrDie(id);
×
689
            if (!dataset.isReleased()) {
×
690
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
691
            }
692
        } catch (WrappedResponse ex) {
×
693
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
694
        }
×
695

696
        return response(req -> {
×
NEW
697
            Dataset dataset = findDatasetOrDie(id);
×
NEW
698
            execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req));
×
NEW
699
            List<String> args = Arrays.asList(dataset.getIdentifier());
×
700
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
701
        }, getRequestUser(crc));
×
702
    }
703
    
704
    @GET
705
    @AuthRequired
706
    @Path("/modifyRegistrationPIDMetadataAll")
707
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
708
        return response( req -> {
×
709
            datasetService.findAll().forEach( ds -> {
×
710
                try {
NEW
711
                    logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier());
×
NEW
712
                    if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) {
×
NEW
713
                        if (ds.isReleased()) {
×
NEW
714
                            logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered");
×
715
                        }
716
                    } else {
717
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
718
                    }
719
                } catch (WrappedResponse ex) {
×
720
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
721
                }
×
722
            });
×
723
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
724
        }, getRequestUser(crc));
×
725
    }
726
  
727
    @PUT
728
    @AuthRequired
729
    @Path("{id}/versions/{versionId}")
730
    @Consumes(MediaType.APPLICATION_JSON)
731
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
732
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
733
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
734
        }
735
        
736
        try {
737
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
738
            Dataset ds = findDatasetOrDie(id);
×
739
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
740
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
741
            
742
            // clear possibly stale fields from the incoming dataset version.
743
            // creation and modification dates are updated by the commands.
744
            incomingVersion.setId(null);
×
745
            incomingVersion.setVersionNumber(null);
×
746
            incomingVersion.setMinorVersionNumber(null);
×
747
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
748
            incomingVersion.setDataset(ds);
×
749
            incomingVersion.setCreateTime(null);
×
750
            incomingVersion.setLastUpdateTime(null);
×
751
            
752
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
753
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
754
            }
755
            
756
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
757
            
758
            DatasetVersion managedVersion;
759
            if (updateDraft) {
×
760
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
761
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
762
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
763
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
764
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
765
                if (!hasValidTerms) {
×
766
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
767
                }
768
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
769
                managedVersion = managedDataset.getOrCreateEditVersion();
×
770
            } else {
×
771
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
772
                if (!hasValidTerms) {
×
773
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
774
                }
775
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
776
            }
777
            return ok( json(managedVersion, true) );
×
778
                    
779
        } catch (JsonParseException ex) {
×
780
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
781
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
782
            
783
        } catch (WrappedResponse ex) {
×
784
            return ex.getResponse();
×
785
            
786
        }
787
    }
788

789
    @GET
790
    @AuthRequired
791
    @Path("{id}/versions/{versionId}/metadata")
792
    @Produces("application/ld+json, application/json-ld")
793
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
794
        try {
795
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
796
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
797
            OREMap ore = new OREMap(dsv,
×
798
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
799
            return ok(ore.getOREMapBuilder(true));
×
800

801
        } catch (WrappedResponse ex) {
×
802
            ex.printStackTrace();
×
803
            return ex.getResponse();
×
804
        } catch (Exception jpe) {
×
805
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
806
            jpe.printStackTrace();
×
807
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
808
        }
809
    }
810

811
    @GET
812
    @AuthRequired
813
    @Path("{id}/metadata")
814
    @Produces("application/ld+json, application/json-ld")
815
    public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
816
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
817
    }
818

819
    @PUT
820
    @AuthRequired
821
    @Path("{id}/metadata")
822
    @Consumes("application/ld+json, application/json-ld")
823
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
824

825
        try {
826
            Dataset ds = findDatasetOrDie(id);
×
827
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
828
            //Get draft state as of now
829

830
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
831
            //Get the current draft or create a new version to update
832
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
833
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
834
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
835
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
836
            if (!hasValidTerms) {
×
837
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
838
            }
839
            DatasetVersion managedVersion;
840
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
841
            managedVersion = managedDataset.getLatestVersion();
×
842
            String info = updateDraft ? "Version Updated" : "Version Created";
×
843
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
844

845
        } catch (WrappedResponse ex) {
×
846
            return ex.getResponse();
×
847
        } catch (JsonParsingException jpe) {
×
848
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
849
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
850
        }
851
    }
852

853
    @PUT
854
    @AuthRequired
855
    @Path("{id}/metadata/delete")
856
    @Consumes("application/ld+json, application/json-ld")
857
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
858
        try {
859
            Dataset ds = findDatasetOrDie(id);
×
860
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
861
            //Get draft state as of now
862

863
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
864
            //Get the current draft or create a new version to update
865
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
866
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
867
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
868
            DatasetVersion managedVersion;
869
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
870
            managedVersion = managedDataset.getLatestVersion();
×
871
            String info = updateDraft ? "Version Updated" : "Version Created";
×
872
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
873

874
        } catch (WrappedResponse ex) {
×
875
            ex.printStackTrace();
×
876
            return ex.getResponse();
×
877
        } catch (JsonParsingException jpe) {
×
878
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
879
            jpe.printStackTrace();
×
880
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
881
        }
882
    }
883

884
    @PUT
885
    @AuthRequired
886
    @Path("{id}/deleteMetadata")
887
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
888

889
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
890

891
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
892
    }
893

894
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
895
        try {
896

897
            Dataset ds = findDatasetOrDie(id);
×
898
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
899
            //Get the current draft or create a new version to update
900
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
901
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
902
            List<DatasetField> fields = new LinkedList<>();
×
903
            DatasetField singleField = null;
×
904

905
            JsonArray fieldsJson = json.getJsonArray("fields");
×
906
            if (fieldsJson == null) {
×
907
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
908
                fields.add(singleField);
×
909
            } else {
910
                fields = jsonParser().parseMultipleFields(json);
×
911
            }
912

913
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
914

915
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
916
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
917
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
918

919
            for (DatasetField updateField : fields) {
×
920
                boolean found = false;
×
921
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
922
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
923
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
924
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
925
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
926
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
927
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
928
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
929
                                                found = true;
×
930
                                                controlledVocabularyItemsToRemove.add(existing);
×
931
                                            }
932
                                        }
×
933
                                        if (!found) {
×
934
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
935
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
936
                                        }
937
                                    }
×
938
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
939
                                        dsf.getControlledVocabularyValues().remove(remove);
×
940
                                    }
×
941

942
                                } else {
943
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
944
                                        found = true;
×
945
                                        dsf.setSingleControlledVocabularyValue(null);
×
946
                                    }
947

948
                                }
949
                            } else {
950
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
951
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
952
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
953
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
954
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
955
                                                    found = true;
×
956
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
957
                                                }
958
                                            }
×
959
                                            if (!found) {
×
960
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
961
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
962
                                            }
963
                                        }
×
964
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
965
                                            dsf.getDatasetFieldValues().remove(remove);
×
966
                                        });
×
967

968
                                    } else {
969
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
970
                                            found = true;
×
971
                                            dsf.setSingleValue(null);
×
972
                                        }
973

974
                                    }
975
                                } else {
976
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
977
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
978
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
979
                                            String existingString = getCompoundDisplayValue(existing);
×
980
                                            if (existingString.equals(deleteVal)) {
×
981
                                                found = true;
×
982
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
983
                                            }
984
                                        }
×
985
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
986
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
987
                                        });
×
988
                                        if (!found) {
×
989
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
990
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
991
                                        }
992
                                    }
×
993
                                }
994
                            }
995
                        } else {
996
                            found = true;
×
997
                            dsf.setSingleValue(null);
×
998
                            dsf.setSingleControlledVocabularyValue(null);
×
999
                        }
1000
                        break;
×
1001
                    }
1002
                }
×
1003
                if (!found){
×
1004
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
1005
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1006
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1007
                }
1008
            }
×
1009

1010

1011
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1012
            return ok(json(managedVersion, true));
×
1013

1014
        } catch (JsonParseException ex) {
×
1015
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1016
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1017

1018
        } catch (WrappedResponse ex) {
×
1019
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
1020
            return ex.getResponse();
×
1021

1022
        }
1023
    
1024
    }
1025
    
1026
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
1027
        String returnString = "";
×
1028
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
1029
            for (String value : dsf.getValues()) {
×
1030
                if (!(value == null)) {
×
1031
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1032
                }
1033
            }
×
1034
        }
×
1035
        return returnString;
×
1036
    }
1037
    
1038
    @PUT
1039
    @AuthRequired
1040
    @Path("{id}/editMetadata")
1041
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1042

1043
        Boolean replaceData = replace != null;
×
1044
        DataverseRequest req = null;
×
1045
        req = createDataverseRequest(getRequestUser(crc));
×
1046

1047
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1048
    }
1049
    
1050
    
1051
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1052
        try {
1053
           
1054
            Dataset ds = findDatasetOrDie(id);
×
1055
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1056
            //Get the current draft or create a new version to update
1057
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1058
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1059
            List<DatasetField> fields = new LinkedList<>();
×
1060
            DatasetField singleField = null;
×
1061
            
1062
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1063
            if (fieldsJson == null) {
×
1064
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
1065
                fields.add(singleField);
×
1066
            } else {
1067
                fields = jsonParser().parseMultipleFields(json);
×
1068
            }
1069
            
1070

1071
            String valdationErrors = validateDatasetFieldValues(fields);
×
1072

1073
            if (!valdationErrors.isEmpty()) {
×
1074
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1075
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1076
            }
1077

1078
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1079

1080
            //loop through the update fields     
1081
            // and compare to the version fields  
1082
            //if exist add/replace values
1083
            //if not add entire dsf
1084
            for (DatasetField updateField : fields) {
×
1085
                boolean found = false;
×
1086
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1087
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1088
                        found = true;
×
1089
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1090
                            List priorCVV = new ArrayList<>();
×
1091
                            String cvvDisplay = "";
×
1092

1093
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1094
                                cvvDisplay = dsf.getDisplayValue();
×
1095
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1096
                                    priorCVV.add(cvvOld);
×
1097
                                }
×
1098
                            }
1099

1100
                            if (replaceData) {
×
1101
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1102
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1103
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1104
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1105
                                    priorCVV.clear();
×
1106
                                    dsf.getControlledVocabularyValues().clear();
×
1107
                                } else {
1108
                                    dsf.setSingleValue("");
×
1109
                                    dsf.setSingleControlledVocabularyValue(null);
×
1110
                                }
1111
                              cvvDisplay="";
×
1112
                            }
1113
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1114
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1115
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1116
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
1117
                                            priorCVV.add(cvv);
×
1118
                                        }
1119
                                    }
×
1120
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1121
                                } else {
1122
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1123
                                }
1124
                            } else {
1125
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1126
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1127
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1128
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1129
                                                dfv.setDatasetField(dsf);
×
1130
                                                dsf.getDatasetFieldValues().add(dfv);
×
1131
                                            }
1132
                                        }
×
1133
                                    } else {
1134
                                        dsf.setSingleValue(updateField.getValue());
×
1135
                                    }
1136
                                } else {
1137
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1138
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1139
                                            dfcv.setParentDatasetField(dsf);
×
1140
                                            dsf.setDatasetVersion(dsv);
×
1141
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1142
                                        }
1143
                                    }
×
1144
                                }
1145
                            }
1146
                        } else {
×
1147
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
1148
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1149
                            }
1150
                        }
1151
                        break;
1152
                    }
1153
                }
×
1154
                if (!found) {
×
1155
                    updateField.setDatasetVersion(dsv);
×
1156
                    dsv.getDatasetFields().add(updateField);
×
1157
                }
1158
            }
×
1159
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1160

1161
            return ok(json(managedVersion, true));
×
1162

1163
        } catch (JsonParseException ex) {
×
1164
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1165
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1166

1167
        } catch (WrappedResponse ex) {
×
1168
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1169
            return ex.getResponse();
×
1170

1171
        }
1172
    }
1173
    
1174
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1175
        StringBuilder error = new StringBuilder();
×
1176

1177
        for (DatasetField dsf : fields) {
×
1178
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1179
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1180
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1181
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) {
×
1182
                error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1183
            }
1184
        }
×
1185

1186
        if (!error.toString().isEmpty()) {
×
1187
            return (error.toString());
×
1188
        }
1189
        return "";
×
1190
    }
1191
    
1192
    /**
1193
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1194
     */
1195
    @GET
1196
    @AuthRequired
1197
    @Path("{id}/actions/:publish")
1198
    @Deprecated
1199
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1200
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
1201
        return publishDataset(crc, id, type, false);
×
1202
    }
1203

1204
    @POST
1205
    @AuthRequired
1206
    @Path("{id}/actions/:publish")
1207
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1208
        try {
1209
            if (type == null) {
×
1210
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1211
            }
1212
            boolean updateCurrent=false;
×
1213
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1214
            type = type.toLowerCase();
×
1215
            boolean isMinor=false;
×
1216
            switch (type) {
×
1217
                case "minor":
1218
                    isMinor = true;
×
1219
                    break;
×
1220
                case "major":
1221
                    isMinor = false;
×
1222
                    break;
×
1223
                case "updatecurrent":
1224
                    if (user.isSuperuser()) {
×
1225
                        updateCurrent = true;
×
1226
                    } else {
1227
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1228
                    }
1229
                    break;
1230
                default:
1231
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1232
            }
1233

1234
            Dataset ds = findDatasetOrDie(id);
×
1235
            
1236
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1237
            if (!hasValidTerms) {
×
1238
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1239
            }
1240
            
1241
            if (mustBeIndexed) {
×
1242
                logger.fine("IT: " + ds.getIndexTime());
×
1243
                logger.fine("MT: " + ds.getModificationTime());
×
1244
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1245
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1246
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
1247
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1248
                }
1249
                /*
1250
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1251
                 * modification or permission modification times. The checks here are trying to
1252
                 * see if indexing or permissionindexing could be pending, so they check to see
1253
                 * if the relevant modification time is set and if so, whether the index is also
1254
                 * set and if so, if it after the modification time. If the modification time is
1255
                 * set and the index time is null or is before the mod time, the 409/conflict
1256
                 * error is returned.
1257
                 *
1258
                 */
1259
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1260
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
1261
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1262
                }
1263
            }
1264
            if (updateCurrent) {
×
1265
                /*
1266
                 * Note: The code here mirrors that in the
1267
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1268
                 * to the core logic (i.e. beyond updating the messaging about results) should
1269
                 * be applied to the code there as well.
1270
                 */
1271
                String errorMsg = null;
×
1272
                String successMsg = null;
×
1273
                try {
1274
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1275
                    ds = commandEngine.submit(cmd);
×
1276
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1277

1278
                    // If configured, update archive copy as well
1279
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1280
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1281
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
1282
                    if (archiveCommand != null) {
×
1283
                        // Delete the record of any existing copy since it is now out of date/incorrect
1284
                        updateVersion.setArchivalCopyLocation(null);
×
1285
                        /*
1286
                         * Then try to generate and submit an archival copy. Note that running this
1287
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1288
                         * "The attribute [id] of class
1289
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1290
                         * key column in the database. Updates are not allowed." To avoid that, and to
1291
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1292
                         * pulled this out as a separate submit().
1293
                         */
1294
                        try {
1295
                            updateVersion = commandEngine.submit(archiveCommand);
×
1296
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
1297
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1298
                            } else {
1299
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1300
                            }
1301
                        } catch (CommandException ex) {
×
1302
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1303
                            logger.severe(ex.getMessage());
×
1304
                        }
×
1305
                    }
1306
                } catch (CommandException ex) {
×
1307
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1308
                    logger.severe(ex.getMessage());
×
1309
                }
×
1310
                if (errorMsg != null) {
×
1311
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1312
                } else {
1313
                    return Response.ok(Json.createObjectBuilder()
×
1314
                            .add("status", ApiConstants.STATUS_OK)
×
1315
                            .add("status_details", successMsg)
×
1316
                            .add("data", json(ds)).build())
×
1317
                            .type(MediaType.APPLICATION_JSON)
×
1318
                            .build();
×
1319
                }
1320
            } else {
1321
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
1322
                        createDataverseRequest(user),
×
1323
                        isMinor));
1324
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1325
            }
1326
        } catch (WrappedResponse ex) {
×
1327
            return ex.getResponse();
×
1328
        }
1329
    }
1330

1331
    @POST
1332
    @AuthRequired
1333
    @Path("{id}/actions/:releasemigrated")
1334
    @Consumes("application/ld+json, application/json-ld")
1335
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1336
        try {
1337
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1338
            if (!user.isSuperuser()) {
×
1339
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1340
            }
1341

1342
            Dataset ds = findDatasetOrDie(id);
×
1343
            try {
1344
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1345
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1346
                logger.fine("Submitted date: " + pubDate);
×
1347
                LocalDateTime dateTime = null;
×
1348
                if(!StringUtils.isEmpty(pubDate)) {
×
1349
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
1350
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1351
                    //Set version release date
1352
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1353
                }
1354
                // dataset.getPublicationDateFormattedYYYYMMDD())
1355
                // Assign a version number if not set
1356
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1357

1358
                    if (ds.getVersions().size() == 1) {
×
1359
                        // First Release
1360
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1361
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1362
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1363
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
1364
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1365
                    } else {
1366
                        // major, non-first release
1367
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
1368
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1369
                    }
1370
                }
1371
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1372
                    //Also set publication date if this is the first
1373
                    if(dateTime != null) {
×
1374
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1375
                    }
1376
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1377
                    // is null, so set it here.
1378
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1379
                }
1380
            } catch (Exception e) {
×
1381
                logger.fine(e.getMessage());
×
1382
                throw new BadRequestException("Unable to set publication date ("
×
1383
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
1384
            }
×
1385
            /*
1386
             * Note: The code here mirrors that in the
1387
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1388
             * to the core logic (i.e. beyond updating the messaging about results) should
1389
             * be applied to the code there as well.
1390
             */
1391
            String errorMsg = null;
×
1392
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1393

1394
            try {
1395
                // ToDo - should this be in onSuccess()? May relate to todo above
1396
                if (prePubWf.isPresent()) {
×
1397
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1398
                    wfService.start(prePubWf.get(),
×
1399
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1400
                            false);
1401
                } else {
1402
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1403
                            createDataverseRequest(user), !contactPIDProvider);
×
1404
                    ds = commandEngine.submit(cmd);
×
1405
                }
1406
            } catch (CommandException ex) {
×
1407
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1408
                logger.severe(ex.getMessage());
×
1409
            }
×
1410

1411
            if (errorMsg != null) {
×
1412
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1413
            } else {
1414
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1415
            }
1416

1417
        } catch (WrappedResponse ex) {
×
1418
            return ex.getResponse();
×
1419
        }
1420
    }
1421

1422
    @POST
1423
    @AuthRequired
1424
    @Path("{id}/move/{targetDataverseAlias}")
1425
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1426
        try {
1427
            User u = getRequestUser(crc);
×
1428
            Dataset ds = findDatasetOrDie(id);
×
1429
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1430
            if (target == null) {
×
1431
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1432
            }
1433
            //Command requires Super user - it will be tested by the command
1434
            execCommand(new MoveDatasetCommand(
×
1435
                    createDataverseRequest(u), ds, target, force
×
1436
            ));
1437
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1438
        } catch (WrappedResponse ex) {
×
1439
            if (ex.getCause() instanceof UnforcedCommandException) {
×
1440
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1441
            } else {
1442
                return ex.getResponse();
×
1443
            }
1444
        }
1445
    }
1446

1447
    @POST
1448
    @AuthRequired
1449
    @Path("{id}/files/actions/:set-embargo")
1450
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1451

1452
        // user is authenticated
1453
        AuthenticatedUser authenticatedUser = null;
×
1454
        try {
1455
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1456
        } catch (WrappedResponse ex) {
×
1457
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1458
        }
×
1459

1460
        Dataset dataset;
1461
        try {
1462
            dataset = findDatasetOrDie(id);
×
1463
        } catch (WrappedResponse ex) {
×
1464
            return ex.getResponse();
×
1465
        }
×
1466
        
1467
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1468
        
1469
        if (!hasValidTerms){
×
1470
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1471
        }
1472

1473
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1474
        /*
1475
         * This is only a pre-test - if there's no draft version, there are clearly no
1476
         * files that a normal user can change. The converse is not true. A draft
1477
         * version could contain only files that have already been released. Further, we
1478
         * haven't checked the file list yet so the user could still be trying to change
1479
         * released files even if there are some unreleased/draft-only files. Doing this
1480
         * check here does avoid having to do further parsing for some error cases. It
1481
         * also checks the user can edit this dataset, so we don't have to make that
1482
         * check later.
1483
         */
1484

1485
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1486
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1487
        }
1488

1489
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1490
        long maxEmbargoDurationInMonths = 0;
×
1491
        try {
1492
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1493
        } catch (NumberFormatException nfe){
×
1494
            if (nfe.getMessage().contains("null")) {
×
1495
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1496
            }
1497
        }
×
1498
        if (maxEmbargoDurationInMonths == 0){
×
1499
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1500
        }
1501

1502
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1503

1504
        Embargo embargo = new Embargo();
×
1505

1506

1507
        LocalDate currentDateTime = LocalDate.now();
×
1508
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1509

1510
        // check :MaxEmbargoDurationInMonths if -1
1511
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1512
        // dateAvailable is not in the past
1513
        if (dateAvailable.isAfter(currentDateTime)){
×
1514
            embargo.setDateAvailable(dateAvailable);
×
1515
        } else {
1516
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1517
        }
1518

1519
        // dateAvailable is within limits
1520
        if (maxEmbargoDateTime != null){
×
1521
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
1522
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1523
            }
1524
        }
1525

1526
        embargo.setReason(json.getString("reason"));
×
1527

1528
        List<DataFile> datasetFiles = dataset.getFiles();
×
1529
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1530

1531
        // extract fileIds from json, find datafiles and add to list
1532
        if (json.containsKey("fileIds")){
×
1533
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1534
            for (JsonValue jsv : fileIds) {
×
1535
                try {
1536
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1537
                    filesToEmbargo.add(dataFile);
×
1538
                } catch (WrappedResponse ex) {
×
1539
                    return ex.getResponse();
×
1540
                }
×
1541
            }
×
1542
        }
1543

1544
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1545
        // check if files belong to dataset
1546
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1547
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1548
            boolean badFiles = false;
×
1549
            for (DataFile datafile : filesToEmbargo) {
×
1550
                // superuser can overrule an existing embargo, even on released files
1551
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1552
                    restrictedFiles.add(datafile.getId());
×
1553
                    badFiles = true;
×
1554
                }
1555
            }
×
1556
            if (badFiles) {
×
1557
                return Response.status(Status.FORBIDDEN)
×
1558
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1559
                                .add("message", "You do not have permission to embargo the following files")
×
1560
                                .add("files", restrictedFiles).build())
×
1561
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1562
            }
1563
            embargo=embargoService.merge(embargo);
×
1564
            // Good request, so add the embargo. Track any existing embargoes so we can
1565
            // delete them if there are no files left that reference them.
1566
            for (DataFile datafile : filesToEmbargo) {
×
1567
                Embargo emb = datafile.getEmbargo();
×
1568
                if (emb != null) {
×
1569
                    emb.getDataFiles().remove(datafile);
×
1570
                    if (emb.getDataFiles().isEmpty()) {
×
1571
                        orphanedEmbargoes.add(emb);
×
1572
                    }
1573
                }
1574
                // Save merges the datafile with an embargo into the context
1575
                datafile.setEmbargo(embargo);
×
1576
                fileService.save(datafile);
×
1577
            }
×
1578
            //Call service to get action logged
1579
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1580
            if (orphanedEmbargoes.size() > 0) {
×
1581
                for (Embargo emb : orphanedEmbargoes) {
×
1582
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1583
                }
×
1584
            }
1585
            //If superuser, report changes to any released files
1586
            if (authenticatedUser.isSuperuser()) {
×
1587
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1588
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1589
                if (!releasedFiles.isBlank()) {
×
1590
                    actionLogSvc
×
1591
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
1592
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1593
                                            + releasedFiles + ".")
1594
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1595
                }
1596
            }
1597
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1598
        } else {
1599
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1600
        }
1601
    }
1602

1603
    @POST
1604
    @AuthRequired
1605
    @Path("{id}/files/actions/:unset-embargo")
1606
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1607

1608
        // user is authenticated
1609
        AuthenticatedUser authenticatedUser = null;
×
1610
        try {
1611
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1612
        } catch (WrappedResponse ex) {
×
1613
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1614
        }
×
1615

1616
        Dataset dataset;
1617
        try {
1618
            dataset = findDatasetOrDie(id);
×
1619
        } catch (WrappedResponse ex) {
×
1620
            return ex.getResponse();
×
1621
        }
×
1622

1623
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1624
        // check if files are unreleased(DRAFT?)
1625
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1626
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1627
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1628
        }
1629

1630
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1631
        //Todo - is 400 right for embargoes not enabled
1632
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
1633
        int maxEmbargoDurationInMonths = 0;
×
1634
        try {
1635
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1636
        } catch (NumberFormatException nfe){
×
1637
            if (nfe.getMessage().contains("null")) {
×
1638
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1639
            }
1640
        }
×
1641
        if (maxEmbargoDurationInMonths == 0){
×
1642
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1643
        }
1644

1645
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1646

1647
        List<DataFile> datasetFiles = dataset.getFiles();
×
1648
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1649

1650
        // extract fileIds from json, find datafiles and add to list
1651
        if (json.containsKey("fileIds")){
×
1652
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1653
            for (JsonValue jsv : fileIds) {
×
1654
                try {
1655
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1656
                    embargoFilesToUnset.add(dataFile);
×
1657
                } catch (WrappedResponse ex) {
×
1658
                    return ex.getResponse();
×
1659
                }
×
1660
            }
×
1661
        }
1662

1663
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1664
        // check if files belong to dataset
1665
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1666
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1667
            boolean badFiles = false;
×
1668
            for (DataFile datafile : embargoFilesToUnset) {
×
1669
                // superuser can overrule an existing embargo, even on released files
1670
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1671
                    restrictedFiles.add(datafile.getId());
×
1672
                    badFiles = true;
×
1673
                }
1674
            }
×
1675
            if (badFiles) {
×
1676
                return Response.status(Status.FORBIDDEN)
×
1677
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1678
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1679
                                .add("files", restrictedFiles).build())
×
1680
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1681
            }
1682
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1683
            // delete them if there are no files left that reference them.
1684
            for (DataFile datafile : embargoFilesToUnset) {
×
1685
                Embargo emb = datafile.getEmbargo();
×
1686
                if (emb != null) {
×
1687
                    emb.getDataFiles().remove(datafile);
×
1688
                    if (emb.getDataFiles().isEmpty()) {
×
1689
                        orphanedEmbargoes.add(emb);
×
1690
                    }
1691
                }
1692
                // Save merges the datafile with an embargo into the context
1693
                datafile.setEmbargo(null);
×
1694
                fileService.save(datafile);
×
1695
            }
×
1696
            if (orphanedEmbargoes.size() > 0) {
×
1697
                for (Embargo emb : orphanedEmbargoes) {
×
1698
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1699
                }
×
1700
            }
1701
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1702
            if(!releasedFiles.isBlank()) {
×
1703
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1704
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
1705
                actionLogSvc.log(removeRecord);
×
1706
            }
1707
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1708
        } else {
1709
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1710
        }
1711
    }
1712

1713
    @POST
1714
    @AuthRequired
1715
    @Path("{id}/files/actions/:set-retention")
1716
    public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1717

1718
        // user is authenticated
NEW
1719
        AuthenticatedUser authenticatedUser = null;
×
1720
        try {
NEW
1721
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
NEW
1722
        } catch (WrappedResponse ex) {
×
NEW
1723
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
NEW
1724
        }
×
1725

1726
        Dataset dataset;
1727
        try {
NEW
1728
            dataset = findDatasetOrDie(id);
×
NEW
1729
        } catch (WrappedResponse ex) {
×
NEW
1730
            return ex.getResponse();
×
NEW
1731
        }
×
1732

NEW
1733
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1734

NEW
1735
        if (!hasValidTerms){
×
NEW
1736
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1737
        }
1738

1739
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1740
        // check if files are unreleased(DRAFT?)
NEW
1741
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
NEW
1742
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1743
        }
1744

1745
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
NEW
1746
        long minRetentionDurationInMonths = 0;
×
1747
        try {
NEW
1748
            minRetentionDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
NEW
1749
        } catch (NumberFormatException nfe){
×
NEW
1750
            if (nfe.getMessage().contains("null")) {
×
NEW
1751
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1752
            }
NEW
1753
        }
×
NEW
1754
        if (minRetentionDurationInMonths == 0){
×
NEW
1755
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1756
        }
1757

1758
        JsonObject json;
1759
        try {
NEW
1760
            json = JsonUtil.getJsonObject(jsonBody);
×
NEW
1761
        } catch (JsonException ex) {
×
NEW
1762
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
NEW
1763
        }
×
1764

NEW
1765
        Retention retention = new Retention();
×
1766

1767

NEW
1768
        LocalDate currentDateTime = LocalDate.now();
×
1769

1770
        // Extract the dateUnavailable - check if specified and valid
NEW
1771
        String dateUnavailableStr = "";
×
1772
        LocalDate dateUnavailable;
1773
        try {
NEW
1774
            dateUnavailableStr = json.getString("dateUnavailable");
×
NEW
1775
            dateUnavailable = LocalDate.parse(dateUnavailableStr);
×
NEW
1776
        } catch (NullPointerException npex) {
×
NEW
1777
            return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified");
×
NEW
1778
        } catch (ClassCastException ccex) {
×
NEW
1779
            return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string");
×
NEW
1780
        } catch (DateTimeParseException dtpex) {
×
NEW
1781
            return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr);
×
NEW
1782
        }
×
1783

1784
        // check :MinRetentionDurationInMonths if -1
NEW
1785
        LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null;
×
1786
        // dateUnavailable is not in the past
NEW
1787
        if (dateUnavailable.isAfter(currentDateTime)){
×
NEW
1788
            retention.setDateUnavailable(dateUnavailable);
×
1789
        } else {
NEW
1790
            return error(Status.BAD_REQUEST, "Date unavailable can not be in the past");
×
1791
        }
1792

1793
        // dateAvailable is within limits
NEW
1794
        if (minRetentionDateTime != null){
×
NEW
1795
            if (dateUnavailable.isBefore(minRetentionDateTime)){
×
NEW
1796
                return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
×
1797
            }
1798
        }
1799
        
1800
        try {
NEW
1801
            String reason = json.getString("reason");
×
NEW
1802
            retention.setReason(reason);
×
NEW
1803
        } catch (NullPointerException npex) {
×
1804
            // ignoring; no reason specified is OK, it is optional
NEW
1805
        } catch (ClassCastException ccex) {
×
NEW
1806
            return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string");
×
NEW
1807
        }
×
1808

1809

NEW
1810
        List<DataFile> datasetFiles = dataset.getFiles();
×
NEW
1811
        List<DataFile> filesToRetention = new LinkedList<>();
×
1812

1813
        // extract fileIds from json, find datafiles and add to list
NEW
1814
        if (json.containsKey("fileIds")){
×
1815
            try {
NEW
1816
                JsonArray fileIds = json.getJsonArray("fileIds");
×
NEW
1817
                for (JsonValue jsv : fileIds) {
×
1818
                    try {
NEW
1819
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
NEW
1820
                        filesToRetention.add(dataFile);
×
NEW
1821
                    } catch (WrappedResponse ex) {
×
NEW
1822
                        return ex.getResponse();
×
NEW
1823
                    }
×
NEW
1824
                }
×
NEW
1825
            } catch (ClassCastException ccex) {
×
NEW
1826
                return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings");
×
NEW
1827
            } catch (NullPointerException npex) {
×
NEW
1828
                return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified");
×
NEW
1829
            }
×
1830
        } else {
NEW
1831
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1832
        }
1833

NEW
1834
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1835
        // check if files belong to dataset
NEW
1836
        if (datasetFiles.containsAll(filesToRetention)) {
×
NEW
1837
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
NEW
1838
            boolean badFiles = false;
×
NEW
1839
            for (DataFile datafile : filesToRetention) {
×
1840
                // superuser can overrule an existing retention, even on released files
NEW
1841
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
NEW
1842
                    restrictedFiles.add(datafile.getId());
×
NEW
1843
                    badFiles = true;
×
1844
                }
NEW
1845
            }
×
NEW
1846
            if (badFiles) {
×
NEW
1847
                return Response.status(Status.FORBIDDEN)
×
NEW
1848
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
NEW
1849
                                .add("message", "You do not have permission to set a retention period for the following files")
×
NEW
1850
                                .add("files", restrictedFiles).build())
×
NEW
1851
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1852
            }
NEW
1853
            retention=retentionService.merge(retention);
×
1854
            // Good request, so add the retention. Track any existing retentions so we can
1855
            // delete them if there are no files left that reference them.
NEW
1856
            for (DataFile datafile : filesToRetention) {
×
NEW
1857
                Retention ret = datafile.getRetention();
×
NEW
1858
                if (ret != null) {
×
NEW
1859
                    ret.getDataFiles().remove(datafile);
×
NEW
1860
                    if (ret.getDataFiles().isEmpty()) {
×
NEW
1861
                        orphanedRetentions.add(ret);
×
1862
                    }
1863
                }
1864
                // Save merges the datafile with an retention into the context
NEW
1865
                datafile.setRetention(retention);
×
NEW
1866
                fileService.save(datafile);
×
NEW
1867
            }
×
1868
            //Call service to get action logged
NEW
1869
            long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier());
×
NEW
1870
            if (orphanedRetentions.size() > 0) {
×
NEW
1871
                for (Retention ret : orphanedRetentions) {
×
NEW
1872
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
NEW
1873
                }
×
1874
            }
1875
            //If superuser, report changes to any released files
NEW
1876
            if (authenticatedUser.isSuperuser()) {
×
NEW
1877
                String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased())
×
NEW
1878
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
NEW
1879
                if (!releasedFiles.isBlank()) {
×
NEW
1880
                    actionLogSvc
×
NEW
1881
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo")
×
NEW
1882
                                    .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) "
×
1883
                                            + releasedFiles + ".")
NEW
1884
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1885
                }
1886
            }
NEW
1887
            return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated"));
×
1888
        } else {
NEW
1889
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1890
        }
1891
    }
1892

1893
    @POST
1894
    @AuthRequired
1895
    @Path("{id}/files/actions/:unset-retention")
1896
    public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1897

1898
        // user is authenticated
NEW
1899
        AuthenticatedUser authenticatedUser = null;
×
1900
        try {
NEW
1901
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
NEW
1902
        } catch (WrappedResponse ex) {
×
NEW
1903
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
NEW
1904
        }
×
1905

1906
        Dataset dataset;
1907
        try {
NEW
1908
            dataset = findDatasetOrDie(id);
×
NEW
1909
        } catch (WrappedResponse ex) {
×
NEW
1910
            return ex.getResponse();
×
NEW
1911
        }
×
1912

1913
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1914
        // check if files are unreleased(DRAFT?)
1915
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
NEW
1916
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
NEW
1917
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1918
        }
1919

1920
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
NEW
1921
        int minRetentionDurationInMonths = 0;
×
1922
        try {
NEW
1923
            minRetentionDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
NEW
1924
        } catch (NumberFormatException nfe){
×
NEW
1925
            if (nfe.getMessage().contains("null")) {
×
NEW
1926
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1927
            }
NEW
1928
        }
×
NEW
1929
        if (minRetentionDurationInMonths == 0){
×
NEW
1930
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1931
        }
1932

1933
        JsonObject json;
1934
        try {
NEW
1935
            json = JsonUtil.getJsonObject(jsonBody);
×
NEW
1936
        } catch (JsonException ex) {
×
NEW
1937
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
NEW
1938
        }
×
1939

NEW
1940
        List<DataFile> datasetFiles = dataset.getFiles();
×
NEW
1941
        List<DataFile> retentionFilesToUnset = new LinkedList<>();
×
1942

1943
        // extract fileIds from json, find datafiles and add to list
NEW
1944
        if (json.containsKey("fileIds")){
×
1945
            try {
NEW
1946
                JsonArray fileIds = json.getJsonArray("fileIds");
×
NEW
1947
                for (JsonValue jsv : fileIds) {
×
1948
                    try {
NEW
1949
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
NEW
1950
                        retentionFilesToUnset.add(dataFile);
×
NEW
1951
                    } catch (WrappedResponse ex) {
×
NEW
1952
                        return ex.getResponse();
×
NEW
1953
                    }
×
NEW
1954
                }
×
NEW
1955
            } catch (ClassCastException ccex) {
×
NEW
1956
                return error(Status.BAD_REQUEST, "fileIds must be an array of id strings");
×
NEW
1957
            } catch (NullPointerException npex) {
×
NEW
1958
                return error(Status.BAD_REQUEST, "No fileIds specified");
×
NEW
1959
            }
×
1960
        } else {
NEW
1961
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1962
        }
1963

NEW
1964
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1965
        // check if files belong to dataset
NEW
1966
        if (datasetFiles.containsAll(retentionFilesToUnset)) {
×
NEW
1967
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
NEW
1968
            boolean badFiles = false;
×
NEW
1969
            for (DataFile datafile : retentionFilesToUnset) {
×
1970
                // superuser can overrule an existing retention, even on released files
NEW
1971
                if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) {
×
NEW
1972
                    restrictedFiles.add(datafile.getId());
×
NEW
1973
                    badFiles = true;
×
1974
                }
NEW
1975
            }
×
NEW
1976
            if (badFiles) {
×
NEW
1977
                return Response.status(Status.FORBIDDEN)
×
NEW
1978
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
NEW
1979
                                .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods")
×
NEW
1980
                                .add("files", restrictedFiles).build())
×
NEW
1981
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1982
            }
1983
            // Good request, so remove the retention from the files. Track any existing retentions so we can
1984
            // delete them if there are no files left that reference them.
NEW
1985
            for (DataFile datafile : retentionFilesToUnset) {
×
NEW
1986
                Retention ret = datafile.getRetention();
×
NEW
1987
                if (ret != null) {
×
NEW
1988
                    ret.getDataFiles().remove(datafile);
×
NEW
1989
                    if (ret.getDataFiles().isEmpty()) {
×
NEW
1990
                        orphanedRetentions.add(ret);
×
1991
                    }
1992
                }
1993
                // Save merges the datafile with an retention into the context
NEW
1994
                datafile.setRetention(null);
×
NEW
1995
                fileService.save(datafile);
×
NEW
1996
            }
×
NEW
1997
            if (orphanedRetentions.size() > 0) {
×
NEW
1998
                for (Retention ret : orphanedRetentions) {
×
NEW
1999
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
NEW
2000
                }
×
2001
            }
NEW
2002
            String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
NEW
2003
            if(!releasedFiles.isBlank()) {
×
NEW
2004
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + ".");
×
NEW
2005
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
NEW
2006
                actionLogSvc.log(removeRecord);
×
2007
            }
NEW
2008
            return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)"));
×
2009
        } else {
NEW
2010
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
2011
        }
2012
    }
2013

2014
    @PUT
2015
    @AuthRequired
2016
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
2017
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
2018
        try {
2019
            User u = getRequestUser(crc);
×
2020
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
2021
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
2022
            if (linked == null){
×
2023
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
2024
            }
2025
            if (linking == null) {
×
2026
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
2027
            }
2028
            execCommand(new LinkDatasetCommand(
×
2029
                    createDataverseRequest(u), linking, linked
×
2030
            ));
2031
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
2032
        } catch (WrappedResponse ex) {
×
2033
            return ex.getResponse();
×
2034
        }
2035
    }
2036

2037
    @GET
2038
    @Path("{id}/versions/{versionId}/customlicense")
2039
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
2040
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
2041
        User user = session.getUser();
×
2042
        String persistentId;
2043
        try {
2044
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
2045
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
2046
            }
2047
            persistentId = getRequestParameter(":persistentId".substring(1));
×
2048
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
2049
                versionId = "DRAFT";
×
2050
            }
2051
        } catch (WrappedResponse wrappedResponse) {
×
2052
            return wrappedResponse.getResponse();
×
2053
        }
×
2054
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
2055
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
2056
    }
2057

2058

2059
    @GET
2060
    @AuthRequired
2061
    @Path("{id}/links")
2062
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
2063
        try {
2064
            User u = getRequestUser(crc);
×
2065
            if (!u.isSuperuser()) {
×
2066
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
2067
            }
2068
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2069

2070
            long datasetId = dataset.getId();
×
2071
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
2072
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
2073
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
2074
                dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
×
2075
            }
×
2076
            JsonObjectBuilder response = Json.createObjectBuilder();
×
2077
            response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
×
2078
            return ok(response);
×
2079
        } catch (WrappedResponse wr) {
×
2080
            return wr.getResponse();
×
2081
        }
2082
    }
2083

2084
    /**
2085
     * Add a given assignment to a given user or group
2086
     * @param ra     role assignment DTO
2087
     * @param id     dataset id
2088
     * @param apiKey
2089
     */
2090
    @POST
2091
    @AuthRequired
2092
    @Path("{identifier}/assignments")
2093
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
2094
        try {
2095
            Dataset dataset = findDatasetOrDie(id);
×
2096
            
2097
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
2098
            if (assignee == null) {
×
2099
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
2100
            }
2101
            
2102
            DataverseRole theRole;
2103
            Dataverse dv = dataset.getOwner();
×
2104
            theRole = null;
×
2105
            while ((theRole == null) && (dv != null)) {
×
2106
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
2107
                    if (aRole.getAlias().equals(ra.getRole())) {
×
2108
                        theRole = aRole;
×
2109
                        break;
×
2110
                    }
2111
                }
×
2112
                dv = dv.getOwner();
×
2113
            }
2114
            if (theRole == null) {
×
2115
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
2116
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
2117
            }
2118

2119
            String privateUrlToken = null;
×
2120
            return ok(
×
2121
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
2122
        } catch (WrappedResponse ex) {
×
2123
            List<String> args = Arrays.asList(ex.getMessage());
×
2124
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
2125
            return ex.getResponse();
×
2126
        }
2127

2128
    }
2129
    
2130
    @DELETE
2131
    @AuthRequired
2132
    @Path("{identifier}/assignments/{id}")
2133
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
2134
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
2135
        if (ra != null) {
×
2136
            try {
2137
                findDatasetOrDie(dsId);
×
2138
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
2139
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
2140
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
2141
            } catch (WrappedResponse ex) {
×
2142
                return ex.getResponse();
×
2143
            }
2144
        } else {
2145
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
2146
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
2147
        }
2148
    }
2149

2150
    @GET
2151
    @AuthRequired
2152
    @Path("{identifier}/assignments")
2153
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2154
        return response(req ->
×
2155
                ok(execCommand(
×
2156
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
2157
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
2158
    }
2159

2160
    @GET
2161
    @AuthRequired
2162
    @Path("{id}/privateUrl")
2163
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2164
        return response( req -> {
×
2165
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
2166
            return (privateUrl != null) ? ok(json(privateUrl))
×
2167
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
2168
        }, getRequestUser(crc));
×
2169
    }
2170

2171
    @POST
2172
    @AuthRequired
2173
    @Path("{id}/privateUrl")
2174
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
2175
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
2176
            throw new NotAcceptableException("Anonymized Access not enabled");
×
2177
        }
2178
        return response(req ->
×
2179
                ok(json(execCommand(
×
2180
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
2181
    }
2182

2183
    @DELETE
2184
    @AuthRequired
2185
    @Path("{id}/privateUrl")
2186
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2187
        return response( req -> {
×
2188
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2189
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
2190
            if (privateUrl != null) {
×
2191
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
2192
                return ok("Private URL deleted.");
×
2193
            } else {
2194
                return notFound("No Private URL to delete.");
×
2195
            }
2196
        }, getRequestUser(crc));
×
2197
    }
2198

2199
    @GET
2200
    @AuthRequired
2201
    @Path("{id}/thumbnail/candidates")
2202
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2203
        try {
2204
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2205
            boolean canUpdateThumbnail = false;
×
2206
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
2207
            if (!canUpdateThumbnail) {
×
2208
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
2209
            }
2210
            JsonArrayBuilder data = Json.createArrayBuilder();
×
2211
            boolean considerDatasetLogoAsCandidate = true;
×
2212
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
2213
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
2214
                String base64image = datasetThumbnail.getBase64image();
×
2215
                if (base64image != null) {
×
2216
                    logger.fine("found a candidate!");
×
2217
                    candidate.add("base64image", base64image);
×
2218
                }
2219
                DataFile dataFile = datasetThumbnail.getDataFile();
×
2220
                if (dataFile != null) {
×
2221
                    candidate.add("dataFileId", dataFile.getId());
×
2222
                }
2223
                data.add(candidate);
×
2224
            }
×
2225
            return ok(data);
×
2226
        } catch (WrappedResponse ex) {
×
2227
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
2228
        }
2229
    }
2230

2231
    @GET
2232
    @Produces({"image/png"})
2233
    @Path("{id}/thumbnail")
2234
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
2235
        try {
2236
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2237
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
2238
            if(is == null) {
×
2239
                return notFound("Thumbnail not available");
×
2240
            }
2241
            return Response.ok(is).build();
×
2242
        } catch (WrappedResponse wr) {
×
2243
            return notFound("Thumbnail not available");
×
2244
        }
2245
    }
2246

2247
    @GET
2248
    @Produces({ "image/png" })
2249
    @Path("{id}/logo")
2250
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
2251
        try {
2252
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2253
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
2254
            if (is == null) {
×
2255
                return notFound("Logo not available");
×
2256
            }
2257
            return Response.ok(is).build();
×
2258
        } catch (WrappedResponse wr) {
×
2259
            return notFound("Logo not available");
×
2260
        }
2261
    }
2262

2263
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
2264
    @POST
2265
    @AuthRequired
2266
    @Path("{id}/thumbnail/{dataFileId}")
2267
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
2268
        try {
2269
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
2270
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
2271
        } catch (WrappedResponse wr) {
×
2272
            return wr.getResponse();
×
2273
        }
2274
    }
2275

2276
    @POST
2277
    @AuthRequired
2278
    @Path("{id}/thumbnail")
2279
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2280
    @Produces("application/json")
2281
    @Operation(summary = "Uploads a logo for a dataset", 
2282
               description = "Uploads a logo for a dataset")
2283
    @APIResponse(responseCode = "200",
2284
               description = "Dataset logo uploaded successfully")
2285
    @Tag(name = "uploadDatasetLogo", 
2286
         description = "Uploads a logo for a dataset")
2287
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))          
2288
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
2289
        try {
2290
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
2291
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
2292
        } catch (WrappedResponse wr) {
×
2293
            return wr.getResponse();
×
2294
        }
2295
    }
2296

2297
    @DELETE
2298
    @AuthRequired
2299
    @Path("{id}/thumbnail")
2300
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2301
        try {
2302
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
2303
            return ok("Dataset thumbnail removed.");
×
2304
        } catch (WrappedResponse wr) {
×
2305
            return wr.getResponse();
×
2306
        }
2307
    }
2308

2309
    @Deprecated(forRemoval = true, since = "2024-07-07")
2310
    @GET
2311
    @AuthRequired
2312
    @Path("{identifier}/dataCaptureModule/rsync")
2313
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2314
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
2315
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
2316
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
2317
        }
2318
        Dataset dataset = null;
×
2319
        try {
2320
            dataset = findDatasetOrDie(id);
×
2321
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2322
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
2323
            
2324
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
2325
            if (lock == null) {
×
2326
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
2327
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
2328
            }
2329
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
2330
        } catch (WrappedResponse wr) {
×
2331
            return wr.getResponse();
×
2332
        } catch (EJBException ex) {
×
2333
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
2334
        }
2335
    }
2336
    
2337
    /**
2338
     * This api endpoint triggers the creation of a "package" file in a dataset
2339
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2340
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2341
     * The "package" can be downloaded over RSAL.
2342
     *
2343
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2344
     *
2345
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2346
     * But due to the possibly immense number of files (millions) the package approach was taken.
2347
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2348
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2349
     * -MAD 4.9.1
2350
     */
2351
    @POST
2352
    @AuthRequired
2353
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2354
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2355
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
2356
        AuthenticatedUser authenticatedUser = null;
×
2357
        try {
2358
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2359
        } catch (WrappedResponse ex) {
×
2360
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2361
        }
×
2362
        if (!authenticatedUser.isSuperuser()) {
×
2363
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2364
        }
2365
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2366
        try {
2367
            Dataset dataset = findDatasetOrDie(id);
×
2368
            if ("validation passed".equals(statusMessageFromDcm)) {
×
2369
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2370

2371
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2372
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2373
                int totalSize = jsonFromDcm.getInt("totalSize");
×
2374
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2375
                
2376
                if (storageDriverType.equals("file")) {
×
2377
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2378

2379
                    ImportMode importMode = ImportMode.MERGE;
×
2380
                    try {
2381
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2382
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2383
                        String message = jsonFromImportJobKickoff.getString("message");
×
2384
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2385
                        job.add("jobId", jobId);
×
2386
                        job.add("message", message);
×
2387
                        return ok(job);
×
2388
                    } catch (WrappedResponse wr) {
×
2389
                        String message = wr.getMessage();
×
2390
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2391
                    }
2392
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2393
                    
2394
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2395
                    try {
2396
                        
2397
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2398
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
2399
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2400
                        
2401
                        if (packageFile == null) {
×
2402
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
2403
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2404
                        }
2405
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2406
                        if (dcmLock == null) {
×
2407
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2408
                        } else {
2409
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
2410
                            dataset.removeLock(dcmLock);
×
2411
                        }
2412
                        
2413
                        // update version using the command engine to enforce user permissions and constraints
2414
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2415
                            try {
2416
                                Command<Dataset> cmd;
2417
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2418
                                commandEngine.submit(cmd);
×
2419
                            } catch (CommandException ex) {
×
2420
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
2421
                            }
×
2422
                        } else {
2423
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2424
                                    + "single version in draft mode.";
2425
                            logger.log(Level.SEVERE, constraintError);
×
2426
                        }
2427

2428
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2429
                        return ok(job);
×
2430
                        
2431
                    } catch (IOException e) {
×
2432
                        String message = e.getMessage();
×
2433
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2434
                    }
2435
                } else {
2436
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2437
                }
2438
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2439
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2440
                distinctAuthors.values().forEach((value) -> {
×
2441
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2442
                });
×
2443
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2444
                if (superUsers != null && !superUsers.isEmpty()) {
×
2445
                    superUsers.forEach((au) -> {
×
2446
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2447
                    });
×
2448
                }
2449
                return ok("User notified about checksum validation failure.");
×
2450
            } else {
2451
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2452
            }
2453
        } catch (WrappedResponse ex) {
×
2454
            return ex.getResponse();
×
2455
        }
2456
    }
2457
    
2458

2459
    @POST
2460
    @AuthRequired
2461
    @Path("{id}/submitForReview")
2462
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2463
        try {
2464
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
2465
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2466
            
2467
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2468
            
2469
            result.add("inReview", inReview);
×
2470
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2471
            return ok(result);
×
2472
        } catch (WrappedResponse wr) {
×
2473
            return wr.getResponse();
×
2474
        }
2475
    }
2476

2477
    @POST
2478
    @AuthRequired
2479
    @Path("{id}/returnToAuthor")
2480
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2481

2482
        if (jsonBody == null || jsonBody.isEmpty()) {
×
2483
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2484
        }
2485
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2486
        try {
2487
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2488
            String reasonForReturn = null;
×
2489
            reasonForReturn = json.getString("reasonForReturn");
×
NEW
2490
            if ((reasonForReturn == null || reasonForReturn.isEmpty())
×
NEW
2491
                    && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) {
×
UNCOV
2492
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2493
            }
2494
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2495
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2496

2497
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2498
            result.add("inReview", false);
×
2499
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2500
            return ok(result);
×
2501
        } catch (WrappedResponse wr) {
×
2502
            return wr.getResponse();
×
2503
        }
2504
    }
2505

2506
    @GET
2507
    @AuthRequired
2508
    @Path("{id}/curationStatus")
2509
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2510
        try {
2511
            Dataset ds = findDatasetOrDie(idSupplied);
×
2512
            DatasetVersion dsv = ds.getLatestVersion();
×
2513
            User user = getRequestUser(crc);
×
2514
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
2515
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2516
            } else {
2517
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2518
            }
2519
        } catch (WrappedResponse wr) {
×
2520
            return wr.getResponse();
×
2521
        }
2522
    }
2523

2524
    @PUT
2525
    @AuthRequired
2526
    @Path("{id}/curationStatus")
2527
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2528
        Dataset ds = null;
×
2529
        User u = null;
×
2530
        try {
2531
            ds = findDatasetOrDie(idSupplied);
×
2532
            u = getRequestUser(crc);
×
2533
        } catch (WrappedResponse wr) {
×
2534
            return wr.getResponse();
×
2535
        }
×
2536
        try {
2537
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2538
            return ok("Curation Status updated");
×
2539
        } catch (WrappedResponse wr) {
×
2540
            // Just change to Bad Request and send
2541
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2542
        }
2543
    }
2544

2545
    @DELETE
2546
    @AuthRequired
2547
    @Path("{id}/curationStatus")
2548
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2549
        Dataset ds = null;
×
2550
        User u = null;
×
2551
        try {
2552
            ds = findDatasetOrDie(idSupplied);
×
2553
            u = getRequestUser(crc);
×
2554
        } catch (WrappedResponse wr) {
×
2555
            return wr.getResponse();
×
2556
        }
×
2557
        try {
2558
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2559
            return ok("Curation Status deleted");
×
2560
        } catch (WrappedResponse wr) {
×
2561
            //Just change to Bad Request and send
2562
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2563
        }
2564
    }
2565

2566
    @GET
2567
    @AuthRequired
2568
    @Path("{id}/uploadurls")
2569
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2570
        try {
2571
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2572

2573
            boolean canUpdateDataset = false;
×
2574
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2575
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2576
            if (!canUpdateDataset) {
×
2577
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2578
            }
2579
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2580
            if (s3io == null) {
×
2581
                return error(Response.Status.NOT_FOUND,
×
2582
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2583
            }
2584
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
2585
            if (maxSize != null) {
×
2586
                if(fileSize > maxSize) {
×
2587
                    return error(Response.Status.BAD_REQUEST,
×
2588
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2589
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2590
                }
2591
            }
2592
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
2593
            if (limit != null) {
×
2594
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
2595
                    return error(Response.Status.BAD_REQUEST,
×
2596
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2597
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2598
                }
2599
            }
2600
            JsonObjectBuilder response = null;
×
2601
            String storageIdentifier = null;
×
2602
            try {
2603
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
2604
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2605

2606
            } catch (IOException io) {
×
2607
                logger.warning(io.getMessage());
×
2608
                throw new WrappedResponse(io,
×
2609
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
2610
            }
×
2611

2612
            response.add("storageIdentifier", storageIdentifier);
×
2613
            return ok(response);
×
2614
        } catch (WrappedResponse wr) {
×
2615
            return wr.getResponse();
×
2616
        }
2617
    }
2618

2619
    @DELETE
2620
    @AuthRequired
2621
    @Path("mpupload")
2622
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2623
        try {
2624
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2625
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2626
            User user = session.getUser();
×
2627
            if (!user.isAuthenticated()) {
×
2628
                try {
2629
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2630
                } catch (WrappedResponse ex) {
×
2631
                    logger.info(
×
2632
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2633
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2634
                    throw ex;
×
2635
                }
×
2636
            }
2637
            boolean allowed = false;
×
2638
            if (dataset != null) {
×
2639
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2640
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2641
            } else {
2642
                /*
2643
                 * The only legitimate case where a global id won't correspond to a dataset is
2644
                 * for uploads during creation. Given that this call will still fail unless all
2645
                 * three parameters correspond to an active multipart upload, it should be safe
2646
                 * to allow the attempt for an authenticated user. If there are concerns about
2647
                 * permissions, one could check with the current design that the user is allowed
2648
                 * to create datasets in some dataverse that is configured to use the storage
2649
                 * provider specified in the storageidentifier, but testing for the ability to
2650
                 * create a dataset in a specific dataverse would requiring changing the design
2651
                 * somehow (e.g. adding the ownerId to this call).
2652
                 */
2653
                allowed = true;
×
2654
            }
2655
            if (!allowed) {
×
2656
                return error(Response.Status.FORBIDDEN,
×
2657
                        "You are not permitted to abort file uploads with the supplied parameters.");
2658
            }
2659
            try {
2660
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2661
            } catch (IOException io) {
×
2662
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2663
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2664
                logger.warning(io.getMessage());
×
2665
                throw new WrappedResponse(io,
×
2666
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2667
            }
×
2668
            return Response.noContent().build();
×
2669
        } catch (WrappedResponse wr) {
×
2670
            return wr.getResponse();
×
2671
        }
2672
    }
2673

2674
    @PUT
2675
    @AuthRequired
2676
    @Path("mpupload")
2677
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2678
        try {
2679
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2680
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2681
            User user = session.getUser();
×
2682
            if (!user.isAuthenticated()) {
×
2683
                try {
2684
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2685
                } catch (WrappedResponse ex) {
×
2686
                    logger.info(
×
2687
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2688
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2689
                    throw ex;
×
2690
                }
×
2691
            }
2692
            boolean allowed = false;
×
2693
            if (dataset != null) {
×
2694
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2695
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2696
            } else {
2697
                /*
2698
                 * The only legitimate case where a global id won't correspond to a dataset is
2699
                 * for uploads during creation. Given that this call will still fail unless all
2700
                 * three parameters correspond to an active multipart upload, it should be safe
2701
                 * to allow the attempt for an authenticated user. If there are concerns about
2702
                 * permissions, one could check with the current design that the user is allowed
2703
                 * to create datasets in some dataverse that is configured to use the storage
2704
                 * provider specified in the storageidentifier, but testing for the ability to
2705
                 * create a dataset in a specific dataverse would requiring changing the design
2706
                 * somehow (e.g. adding the ownerId to this call).
2707
                 */
2708
                allowed = true;
×
2709
            }
2710
            if (!allowed) {
×
2711
                return error(Response.Status.FORBIDDEN,
×
2712
                        "You are not permitted to complete file uploads with the supplied parameters.");
2713
            }
2714
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
2715
            logger.info("Etags: " + partETagBody);
×
2716
            try {
2717
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2718
                for (String partNo : object.keySet()) {
×
2719
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2720
                }
×
2721
                for (PartETag et : eTagList) {
×
2722
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2723
                }
×
2724
            } catch (JsonException je) {
×
2725
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2726
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2727
            }
×
2728
            try {
2729
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2730
            } catch (IOException io) {
×
2731
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2732
                logger.warning(io.getMessage());
×
2733
                try {
2734
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2735
                } catch (IOException e) {
×
2736
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2737
                    logger.severe(io.getMessage());
×
2738
                }
×
2739

2740
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2741
            }
×
2742
            return ok("Multipart Upload completed");
×
2743
        } catch (WrappedResponse wr) {
×
2744
            return wr.getResponse();
×
2745
        }
2746
    }
2747

2748
    /**
2749
     * Add a File to an existing Dataset
2750
     *
2751
     * @param idSupplied
2752
     * @param jsonData
2753
     * @param fileInputStream
2754
     * @param contentDispositionHeader
2755
     * @param formDataBodyPart
2756
     * @return
2757
     */
2758
    @POST
2759
    @AuthRequired
2760
    @Path("{id}/add")
2761
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2762
    @Produces("application/json")
2763
    @Operation(summary = "Uploads a file for a dataset", 
2764
               description = "Uploads a file for a dataset")
2765
    @APIResponse(responseCode = "200",
2766
               description = "File uploaded successfully to dataset")
2767
    @Tag(name = "addFileToDataset", 
2768
         description = "Uploads a file for a dataset")
2769
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
2770
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2771
                    @PathParam("id") String idSupplied,
2772
                    @FormDataParam("jsonData") String jsonData,
2773
                    @FormDataParam("file") InputStream fileInputStream,
2774
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2775
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2776
                    ){
2777

2778
        if (!systemConfig.isHTTPUpload()) {
×
2779
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2780
        }
2781

2782
        // -------------------------------------
2783
        // (1) Get the user from the ContainerRequestContext
2784
        // -------------------------------------
2785
        User authUser;
2786
        authUser = getRequestUser(crc);
×
2787

2788
        // -------------------------------------
2789
        // (2) Get the Dataset Id
2790
        //  
2791
        // -------------------------------------
2792
        Dataset dataset;
2793
        
2794
        try {
2795
            dataset = findDatasetOrDie(idSupplied);
×
2796
        } catch (WrappedResponse wr) {
×
2797
            return wr.getResponse();
×
2798
        }
×
2799
        
2800
        //------------------------------------
2801
        // (2a) Make sure dataset does not have package file
2802
        //
2803
        // --------------------------------------
2804
        
2805
        for (DatasetVersion dv : dataset.getVersions()) {
×
2806
            if (dv.isHasPackageFile()) {
×
2807
                return error(Response.Status.FORBIDDEN,
×
2808
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2809
                );
2810
            }
2811
        }
×
2812

2813
        // (2a) Load up optional params via JSON
2814
        //---------------------------------------
2815
        OptionalFileParams optionalFileParams = null;
×
2816
        msgt("(api) jsonData: " + jsonData);
×
2817

2818
        try {
2819
            optionalFileParams = new OptionalFileParams(jsonData);
×
2820
        } catch (DataFileTagException ex) {
×
2821
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2822
        }
2823
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2824
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
2825
        }
×
2826
        
2827
        // -------------------------------------
2828
        // (3) Get the file name and content type
2829
        // -------------------------------------
2830
        String newFilename = null;
×
2831
        String newFileContentType = null;
×
2832
        String newStorageIdentifier = null;
×
2833
        if (null == contentDispositionHeader) {
×
2834
            if (optionalFileParams.hasStorageIdentifier()) {
×
2835
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
2836
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2837
                
2838
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
2839
                    return error(BAD_REQUEST,
×
2840
                            "Dataset store configuration does not allow provided storageIdentifier.");
2841
                }
2842
                if (optionalFileParams.hasFileName()) {
×
2843
                    newFilename = optionalFileParams.getFileName();
×
2844
                    if (optionalFileParams.hasMimetype()) {
×
2845
                        newFileContentType = optionalFileParams.getMimeType();
×
2846
                    }
2847
                }
2848
            } else {
2849
                return error(BAD_REQUEST,
×
2850
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2851
            }
2852
        } else {
2853
            newFilename = contentDispositionHeader.getFileName();
×
2854
            // Let's see if the form data part has the mime (content) type specified.
2855
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2856
            // because that defaults to "text/plain" when no "Content-Type:" header is
2857
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2858
            // is there. If not, we'll default to "application/octet-stream" - the generic
2859
            // unknown type. This will prompt the application to run type detection and
2860
            // potentially find something more accurate.
2861
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2862

2863
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2864
                if (header.equalsIgnoreCase("Content-Type")) {
×
2865
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2866
                }
2867
            }
×
2868
            if (newFileContentType == null) {
×
2869
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2870
            }
2871
        }
2872

2873

2874
        //-------------------
2875
        // (3) Create the AddReplaceFileHelper object
2876
        //-------------------
2877
        msg("ADD!");
×
2878

2879
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
2880
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2881
                ingestService,
2882
                datasetService,
2883
                fileService,
2884
                permissionSvc,
2885
                commandEngine,
2886
                systemConfig);
2887

2888

2889
        //-------------------
2890
        // (4) Run "runAddFileByDatasetId"
2891
        //-------------------
2892
        addFileHelper.runAddFileByDataset(dataset,
×
2893
                newFilename,
2894
                newFileContentType,
2895
                newStorageIdentifier,
2896
                fileInputStream,
2897
                optionalFileParams);
2898

2899

2900
        if (addFileHelper.hasError()){
×
2901
            //conflict response status added for 8859
2902
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
2903
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2904
            }
2905
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2906
        } else {
2907
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2908
            try {
2909
                //msgt("as String: " + addFileHelper.getSuccessResult());
2910
                /**
2911
                 * @todo We need a consistent, sane way to communicate a human
2912
                 * readable message to an API client suitable for human
2913
                 * consumption. Imagine if the UI were built in Angular or React
2914
                 * and we want to return a message from the API as-is to the
2915
                 * user. Human readable.
2916
                 */
2917
                logger.fine("successMsg: " + successMsg);
×
2918
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2919
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
2920
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2921
                } else {
2922
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2923
                }
2924

2925
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2926
            } catch (NoFilesException ex) {
×
2927
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
2928
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2929

2930
            }
2931
        }
2932
        
2933
    } // end: addFileToDataset
2934

2935

2936
    /**
2937
     * Clean storage of a Dataset
2938
     *
2939
     * @param idSupplied
2940
     * @return
2941
     */
2942
    @GET
2943
    @AuthRequired
2944
    @Path("{id}/cleanStorage")
2945
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2946
        // get user and dataset
2947
        User authUser = getRequestUser(crc);
×
2948

2949
        Dataset dataset;
2950
        try {
2951
            dataset = findDatasetOrDie(idSupplied);
×
2952
        } catch (WrappedResponse wr) {
×
2953
            return wr.getResponse();
×
2954
        }
×
2955
        
2956
        // check permissions
2957
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
2958
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
2959
        }
2960

2961
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
2962

2963
        // check if no legacy files are present
2964
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
2965
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
2966
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
2967
        }
2968

2969
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
2970
        List<String> deleted;
2971
        try {
2972
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
2973
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
2974
        } catch (IOException ex) {
×
2975
            logger.log(Level.SEVERE, null, ex);
×
2976
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
2977
        }
×
2978

2979
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
2980
        
2981
    }
2982

2983
    private static Set<String> getDatasetFilenames(Dataset dataset) {
2984
        Set<String> files = new HashSet<>();
×
2985
        for (DataFile dataFile: dataset.getFiles()) {
×
2986
            String storageIdentifier = dataFile.getStorageIdentifier();
×
2987
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
2988
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
2989
            files.add(locationParts[locationParts.length-1]);
×
2990
        }
×
2991
        return files;
×
2992
    }
2993

2994
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
2995
        return f -> {
1✔
2996
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
2997
        };
2998
    }
2999

3000
    private void msg(String m) {
3001
        //System.out.println(m);
3002
        logger.fine(m);
×
3003
    }
×
3004

3005
    private void dashes() {
3006
        msg("----------------");
×
3007
    }
×
3008

3009
    private void msgt(String m) {
3010
        dashes();
×
3011
        msg(m);
×
3012
        dashes();
×
3013
    }
×
3014

3015

3016
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
3017
            throws WrappedResponse {
3018
        switch (versionId) {
×
3019
            case DS_VERSION_LATEST:
3020
                return hdl.handleLatest();
×
3021
            case DS_VERSION_DRAFT:
3022
                return hdl.handleDraft();
×
3023
            case DS_VERSION_LATEST_PUBLISHED:
3024
                return hdl.handleLatestPublished();
×
3025
            default:
3026
                try {
3027
                    String[] versions = versionId.split("\\.");
×
3028
                    switch (versions.length) {
×
3029
                        case 1:
3030
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
3031
                        case 2:
3032
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
3033
                        default:
3034
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3035
                    }
3036
                } catch (NumberFormatException nfe) {
×
3037
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3038
                }
3039
        }
3040
    }
3041

3042
    /*
3043
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
3044
     * a deaccessioned dataset.
3045
     */
3046
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, 
3047
                                                  String versionNumber, 
3048
                                                  final Dataset ds,
3049
                                                  UriInfo uriInfo, 
3050
                                                  HttpHeaders headers) throws WrappedResponse {
3051
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
NEW
3052
        boolean checkFilePerms = false;
×
NEW
3053
        boolean includeDeaccessioned = false;
×
NEW
3054
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
×
3055
    }
3056
    
3057
    /*
3058
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3059
     */
3060
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3061
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
NEW
3062
        boolean checkPermsWhenDeaccessioned = true;
×
NEW
3063
        boolean bypassAccessCheck = false;
×
NEW
3064
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3065
    }
3066

3067
    /*
3068
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3069
     */
3070
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3071
                                                  UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
NEW
3072
        boolean bypassAccessCheck = false;
×
NEW
3073
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3074
    }
3075

3076
    /*
3077
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
3078
     */
3079
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3080
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
3081
            boolean bypassAccessCheck)
3082
            throws WrappedResponse {
3083

3084
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
3085

3086
        if (dsv == null || dsv.getId() == null) {
×
NEW
3087
            throw new WrappedResponse(
×
NEW
3088
                    notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
3089
        }
3090
        if (dsv.isReleased()&& uriInfo!=null) {
×
3091
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
3092
            mdcLogService.logEntry(entry);
×
3093
        }
3094
        return dsv;
×
3095
    }
3096
 
3097
    @GET
3098
    @Path("{identifier}/locks")
3099
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3100

3101
        Dataset dataset = null;
×
3102
        try {
3103
            dataset = findDatasetOrDie(id);
×
3104
            Set<DatasetLock> locks;
3105
            if (lockType == null) {
×
3106
                locks = dataset.getLocks();
×
3107
            } else {
3108
                // request for a specific type lock:
3109
                DatasetLock lock = dataset.getLockFor(lockType);
×
3110

3111
                locks = new HashSet<>();
×
3112
                if (lock != null) {
×
3113
                    locks.add(lock);
×
3114
                }
3115
            }
3116
            
3117
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3118

3119
        } catch (WrappedResponse wr) {
×
3120
            return wr.getResponse();
×
3121
        }
3122
    }
3123

3124
    @DELETE
3125
    @AuthRequired
3126
    @Path("{identifier}/locks")
3127
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3128

3129
        return response(req -> {
×
3130
            try {
3131
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3132
                if (!user.isSuperuser()) {
×
3133
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3134
                }
3135
                Dataset dataset = findDatasetOrDie(id);
×
3136
                
3137
                if (lockType == null) {
×
3138
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
3139
                    for (DatasetLock lock : dataset.getLocks()) {
×
3140
                        locks.add(lock.getReason());
×
3141
                    }
×
3142
                    if (!locks.isEmpty()) {
×
3143
                        for (DatasetLock.Reason locktype : locks) {
×
3144
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
3145
                            // refresh the dataset:
3146
                            dataset = findDatasetOrDie(id);
×
3147
                        }
×
3148
                        // kick of dataset reindexing, in case the locks removed 
3149
                        // affected the search card:
3150
                        indexService.asyncIndexDataset(dataset, true);
×
3151
                        return ok("locks removed");
×
3152
                    }
3153
                    return ok("dataset not locked");
×
3154
                }
3155
                // request for a specific type lock:
3156
                DatasetLock lock = dataset.getLockFor(lockType);
×
3157
                if (lock != null) {
×
3158
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
3159
                    // refresh the dataset:
3160
                    dataset = findDatasetOrDie(id);
×
3161
                    // ... and kick of dataset reindexing, in case the lock removed 
3162
                    // affected the search card:
3163
                    indexService.asyncIndexDataset(dataset, true);
×
3164
                    return ok("lock type " + lock.getReason() + " removed");
×
3165
                }
3166
                return ok("no lock type " + lockType + " on the dataset");
×
3167
            } catch (WrappedResponse wr) {
×
3168
                return wr.getResponse();
×
3169
            }
3170

3171
        }, getRequestUser(crc));
×
3172

3173
    }
3174
    
3175
    @POST
3176
    @AuthRequired
3177
    @Path("{identifier}/lock/{type}")
3178
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
3179
        return response(req -> {
×
3180
            try {
3181
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3182
                if (!user.isSuperuser()) {
×
3183
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3184
                }
3185
                Dataset dataset = findDatasetOrDie(id);
×
3186
                DatasetLock lock = dataset.getLockFor(lockType);
×
3187
                if (lock != null) {
×
3188
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
3189
                }
3190
                lock = new DatasetLock(lockType, user);
×
3191
                execCommand(new AddLockCommand(req, dataset, lock));
×
3192
                // refresh the dataset:
3193
                dataset = findDatasetOrDie(id);
×
3194
                // ... and kick of dataset reindexing:
3195
                indexService.asyncIndexDataset(dataset, true);
×
3196

3197
                return ok("dataset locked with lock type " + lockType);
×
3198
            } catch (WrappedResponse wr) {
×
3199
                return wr.getResponse();
×
3200
            }
3201

3202
        }, getRequestUser(crc));
×
3203
    }
3204
    
3205
    @GET
3206
    @AuthRequired
3207
    @Path("locks")
3208
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
3209
        // This API is here, under /datasets, and not under /admin, because we
3210
        // likely want it to be accessible to admin users who may not necessarily 
3211
        // have localhost access, that would be required to get to /api/admin in 
3212
        // most installations. It is still reasonable however to limit access to
3213
        // this api to admin users only.
3214
        AuthenticatedUser apiUser;
3215
        try {
3216
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
3217
        } catch (WrappedResponse ex) {
×
3218
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3219
        }
×
3220
        if (!apiUser.isSuperuser()) {
×
3221
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3222
        }
3223
        
3224
        // Locks can be optinally filtered by type, user or both.
3225
        DatasetLock.Reason lockTypeValue = null;
×
3226
        AuthenticatedUser user = null; 
×
3227
        
3228
        // For the lock type, we use a QueryParam of type String, instead of 
3229
        // DatasetLock.Reason; that would be less code to write, but this way 
3230
        // we can check if the value passed matches a valid lock type ("reason") 
3231
        // and provide a helpful error message if it doesn't. If you use a 
3232
        // QueryParam of an Enum type, trying to pass an invalid value to it 
3233
        // results in a potentially confusing "404/NOT FOUND - requested 
3234
        // resource is not available".
3235
        if (lockType != null && !lockType.isEmpty()) {
×
3236
            try {
3237
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
3238
            } catch (IllegalArgumentException iax) {
×
3239
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
3240
                for (Reason r: Reason.values()) {
×
3241
                    reasonJoiner.add(r.name());
×
3242
                };
3243
                String errorMessage = "Invalid lock type value: " + lockType + 
×
3244
                        "; valid lock types: " + reasonJoiner.toString();
×
3245
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
3246
            }
×
3247
        }
3248
        
3249
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
3250
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
3251
            if (user == null) {
×
3252
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
3253
            }
3254
        }
3255
        
3256
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
3257
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
3258
                            
3259
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3260
    }   
3261
    
3262
    
3263
    @GET
3264
    @Path("{id}/makeDataCount/citations")
3265
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
3266
        
3267
        try {
3268
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3269
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
3270
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
3271
            for (DatasetExternalCitations citation : externalCitations) {
×
3272
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
3273
                /**
3274
                 * In the future we can imagine storing and presenting more
3275
                 * information about the citation such as the title of the paper
3276
                 * and the names of the authors. For now, we'll at least give
3277
                 * the URL of the citation so people can click and find out more
3278
                 * about the citation.
3279
                 */
3280
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
3281
                datasetsCitations.add(candidateObj);
×
3282
            }
×
3283
            return ok(datasetsCitations);
×
3284

3285
        } catch (WrappedResponse wr) {
×
3286
            return wr.getResponse();
×
3287
        }
3288

3289
    }
3290

3291
    @GET
3292
    @Path("{id}/makeDataCount/{metric}")
3293
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
3294
        String nullCurrentMonth = null;
×
3295
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
3296
    }
3297

3298
    @GET
3299
    @AuthRequired
3300
    @Path("{identifier}/storagesize")
3301
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
3302
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
3303
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
3304
    }
3305

3306
    @GET
3307
    @AuthRequired
3308
    @Path("{identifier}/versions/{versionId}/downloadsize")
3309
    public Response getDownloadSize(@Context ContainerRequestContext crc,
3310
                                    @PathParam("identifier") String dvIdtf,
3311
                                    @PathParam("versionId") String version,
3312
                                    @QueryParam("contentType") String contentType,
3313
                                    @QueryParam("accessStatus") String accessStatus,
3314
                                    @QueryParam("categoryName") String categoryName,
3315
                                    @QueryParam("tabularTagName") String tabularTagName,
3316
                                    @QueryParam("searchText") String searchText,
3317
                                    @QueryParam("mode") String mode,
3318
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
3319
                                    @Context UriInfo uriInfo,
3320
                                    @Context HttpHeaders headers) {
3321

3322
        return response(req -> {
×
3323
            FileSearchCriteria fileSearchCriteria;
3324
            try {
3325
                fileSearchCriteria = new FileSearchCriteria(
×
3326
                        contentType,
3327
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
3328
                        categoryName,
3329
                        tabularTagName,
3330
                        searchText
3331
                );
3332
            } catch (IllegalArgumentException e) {
×
3333
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
3334
            }
×
3335
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
3336
            try {
3337
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
3338
            } catch (IllegalArgumentException e) {
×
3339
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
3340
            }
×
3341
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
3342
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
3343
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
3344
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
3345
            jsonObjectBuilder.add("message", message);
×
3346
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
3347
            return ok(jsonObjectBuilder);
×
3348
        }, getRequestUser(crc));
×
3349
    }
3350

3351
    @GET
3352
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
3353
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
3354
        try {
3355
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3356
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
3357
            MakeDataCountUtil.MetricType metricType = null;
×
3358
            try {
3359
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
3360
            } catch (IllegalArgumentException ex) {
×
3361
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
3362
            }
×
3363
            String monthYear = null;
×
3364
            if (yyyymm != null) {
×
3365
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
3366
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
3367
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
3368
            }
3369
            if (country != null) {
×
3370
                country = country.toLowerCase();
×
3371
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
3372
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3373
                }
3374
            }
3375
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
3376
            if (datasetMetrics == null) {
×
3377
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3378
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
3379
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3380
            }
3381
            Long viewsTotalRegular = null;
×
3382
            Long viewsUniqueRegular = null;
×
3383
            Long downloadsTotalRegular = null;
×
3384
            Long downloadsUniqueRegular = null;
×
3385
            Long viewsTotalMachine = null;
×
3386
            Long viewsUniqueMachine = null;
×
3387
            Long downloadsTotalMachine = null;
×
3388
            Long downloadsUniqueMachine = null;
×
3389
            Long viewsTotal = null;
×
3390
            Long viewsUnique = null;
×
3391
            Long downloadsTotal = null;
×
3392
            Long downloadsUnique = null;
×
3393
            switch (metricSupplied) {
×
3394
                case "viewsTotal":
3395
                    viewsTotal = datasetMetrics.getViewsTotal();
×
3396
                    break;
×
3397
                case "viewsTotalRegular":
3398
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
3399
                    break;
×
3400
                case "viewsTotalMachine":
3401
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
3402
                    break;
×
3403
                case "viewsUnique":
3404
                    viewsUnique = datasetMetrics.getViewsUnique();
×
3405
                    break;
×
3406
                case "viewsUniqueRegular":
3407
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
3408
                    break;
×
3409
                case "viewsUniqueMachine":
3410
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
3411
                    break;
×
3412
                case "downloadsTotal":
3413
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
3414
                    break;
×
3415
                case "downloadsTotalRegular":
3416
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
3417
                    break;
×
3418
                case "downloadsTotalMachine":
3419
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
3420
                    break;
×
3421
                case "downloadsUnique":
3422
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
3423
                    break;
×
3424
                case "downloadsUniqueRegular":
3425
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
3426
                    break;
×
3427
                case "downloadsUniqueMachine":
3428
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
3429
                    break;
×
3430
                default:
3431
                    break;
3432
            }
3433
            /**
3434
             * TODO: Think more about the JSON output and the API design.
3435
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3436
             * now, by country. We could return multiple metrics (viewsTotal,
3437
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3438
             */
3439
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
3440
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
3441
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3442
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3443
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3444
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3445
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3446
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3447
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3448
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3449
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3450
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3451
            return ok(jsonObjectBuilder);
×
3452
        } catch (WrappedResponse wr) {
×
3453
            return wr.getResponse();
×
3454
        } catch (Exception e) {
×
3455
            //bad date - caught in sanitize call
3456
            return error(BAD_REQUEST, e.getMessage());
×
3457
        }
3458
    }
3459
    
3460
    @GET
3461
    @AuthRequired
3462
    @Path("{identifier}/storageDriver")
3463
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3464
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3465
        
3466
        Dataset dataset; 
3467
        
3468
        try {
3469
            dataset = findDatasetOrDie(dvIdtf);
×
3470
        } catch (WrappedResponse ex) {
×
3471
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3472
        }
×
3473
        
3474
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3475
    }
3476
    
3477
    @PUT
3478
    @AuthRequired
3479
    @Path("{identifier}/storageDriver")
3480
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3481
            String storageDriverLabel,
3482
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3483
        
3484
        // Superuser-only:
3485
        AuthenticatedUser user;
3486
        try {
3487
            user = getRequestAuthenticatedUserOrDie(crc);
×
3488
        } catch (WrappedResponse ex) {
×
3489
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3490
        }
×
3491
        if (!user.isSuperuser()) {
×
3492
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3493
        }
3494

3495
        Dataset dataset;
3496

3497
        try {
3498
            dataset = findDatasetOrDie(dvIdtf);
×
3499
        } catch (WrappedResponse ex) {
×
3500
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3501
        }
×
3502
        
3503
        // We don't want to allow setting this to a store id that does not exist: 
3504
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
3505
            if (store.getKey().equals(storageDriverLabel)) {
×
3506
                dataset.setStorageDriverId(store.getValue());
×
3507
                datasetService.merge(dataset);
×
3508
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3509
            }
3510
        }
×
3511
        return error(Response.Status.BAD_REQUEST,
×
3512
                "No Storage Driver found for : " + storageDriverLabel);
3513
    }
3514
    
3515
    @DELETE
3516
    @AuthRequired
3517
    @Path("{identifier}/storageDriver")
3518
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3519
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3520
        
3521
        // Superuser-only:
3522
        AuthenticatedUser user;
3523
        try {
3524
            user = getRequestAuthenticatedUserOrDie(crc);
×
3525
        } catch (WrappedResponse ex) {
×
3526
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3527
        }
×
3528
        if (!user.isSuperuser()) {
×
3529
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3530
        }
3531

3532
        Dataset dataset;
3533

3534
        try {
3535
            dataset = findDatasetOrDie(dvIdtf);
×
3536
        } catch (WrappedResponse ex) {
×
3537
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3538
        }
×
3539
        
3540
        dataset.setStorageDriverId(null);
×
3541
        datasetService.merge(dataset);
×
3542
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3543
    }
3544

3545
    @GET
3546
    @AuthRequired
3547
    @Path("{identifier}/curationLabelSet")
3548
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3549
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3550

3551
        try {
3552
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3553
            if (!user.isSuperuser()) {
×
3554
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3555
            }
3556
        } catch (WrappedResponse wr) {
×
3557
            return wr.getResponse();
×
3558
        }
×
3559

3560
        Dataset dataset;
3561

3562
        try {
3563
            dataset = findDatasetOrDie(dvIdtf);
×
3564
        } catch (WrappedResponse ex) {
×
3565
            return ex.getResponse();
×
3566
        }
×
3567

3568
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3569
    }
3570

3571
    @PUT
3572
    @AuthRequired
3573
    @Path("{identifier}/curationLabelSet")
3574
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3575
                                        @PathParam("identifier") String dvIdtf,
3576
                                        @QueryParam("name") String curationLabelSet,
3577
                                        @Context UriInfo uriInfo,
3578
                                        @Context HttpHeaders headers) throws WrappedResponse {
3579

3580
        // Superuser-only:
3581
        AuthenticatedUser user;
3582
        try {
3583
            user = getRequestAuthenticatedUserOrDie(crc);
×
3584
        } catch (WrappedResponse ex) {
×
3585
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3586
        }
×
3587
        if (!user.isSuperuser()) {
×
3588
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3589
        }
3590

3591
        Dataset dataset;
3592

3593
        try {
3594
            dataset = findDatasetOrDie(dvIdtf);
×
3595
        } catch (WrappedResponse ex) {
×
3596
            return ex.getResponse();
×
3597
        }
×
3598
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3599
            dataset.setCurationLabelSetName(curationLabelSet);
×
3600
            datasetService.merge(dataset);
×
3601
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3602
        } else {
3603
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3604
                if (setName.equals(curationLabelSet)) {
×
3605
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3606
                    datasetService.merge(dataset);
×
3607
                    return ok("Curation Label Set Name set to: " + setName);
×
3608
                }
3609
            }
×
3610
        }
3611
        return error(Response.Status.BAD_REQUEST,
×
3612
            "No Such Curation Label Set");
3613
    }
3614

3615
    @DELETE
3616
    @AuthRequired
3617
    @Path("{identifier}/curationLabelSet")
3618
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3619
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3620

3621
        // Superuser-only:
3622
        AuthenticatedUser user;
3623
        try {
3624
            user = getRequestAuthenticatedUserOrDie(crc);
×
3625
        } catch (WrappedResponse ex) {
×
3626
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3627
        }
×
3628
        if (!user.isSuperuser()) {
×
3629
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3630
        }
3631

3632
        Dataset dataset;
3633

3634
        try {
3635
            dataset = findDatasetOrDie(dvIdtf);
×
3636
        } catch (WrappedResponse ex) {
×
3637
            return ex.getResponse();
×
3638
        }
×
3639

3640
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3641
        datasetService.merge(dataset);
×
3642
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3643
    }
3644

3645
    @GET
3646
    @AuthRequired
3647
    @Path("{identifier}/allowedCurationLabels")
3648
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3649
                                             @PathParam("identifier") String dvIdtf,
3650
                                             @Context UriInfo uriInfo,
3651
                                             @Context HttpHeaders headers) throws WrappedResponse {
3652
        AuthenticatedUser user = null;
×
3653
        try {
3654
            user = getRequestAuthenticatedUserOrDie(crc);
×
3655
        } catch (WrappedResponse wr) {
×
3656
            return wr.getResponse();
×
3657
        }
×
3658

3659
        Dataset dataset;
3660

3661
        try {
3662
            dataset = findDatasetOrDie(dvIdtf);
×
3663
        } catch (WrappedResponse ex) {
×
3664
            return ex.getResponse();
×
3665
        }
×
3666
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3667
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
3668
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3669
        } else {
3670
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3671
        }
3672
    }
3673

3674
    @GET
3675
    @AuthRequired
3676
    @Path("{identifier}/timestamps")
3677
    @Produces(MediaType.APPLICATION_JSON)
3678
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3679

3680
        Dataset dataset = null;
×
3681
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3682
        try {
3683
            dataset = findDatasetOrDie(id);
×
3684
            User u = getRequestUser(crc);
×
3685
            Set<Permission> perms = new HashSet<Permission>();
×
3686
            perms.add(Permission.ViewUnpublishedDataset);
×
3687
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3688
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3689
            logger.fine("CSD: " + canSeeDraft);
×
3690
            logger.fine("IT: " + dataset.getIndexTime());
×
3691
            logger.fine("MT: " + dataset.getModificationTime());
×
3692
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
3693
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3694
            // Basic info if it's released
3695
            if (dataset.isReleased() || canSeeDraft) {
×
3696
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
3697
                if (dataset.getPublicationDate() != null) {
×
3698
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3699
                }
3700

3701
                if (dataset.getLastExportTime() != null) {
×
3702
                    timestamps.add("lastMetadataExportTime",
×
3703
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3704

3705
                }
3706

3707
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
3708
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
3709
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3710
                }
3711
                // If the modification/permissionmodification time is
3712
                // set and the index time is null or is before the mod time, the relevant index is stale
3713
                timestamps.add("hasStaleIndex",
×
3714
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
3715
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3716
                                : false);
×
3717
                timestamps.add("hasStalePermissionIndex",
×
3718
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3719
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3720
                                : false);
×
3721
            }
3722
            // More detail if you can see a draft
3723
            if (canSeeDraft) {
×
3724
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
3725
                if (dataset.getIndexTime() != null) {
×
3726
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3727
                }
3728
                if (dataset.getPermissionModificationTime() != null) {
×
3729
                    timestamps.add("lastPermissionUpdateTime",
×
3730
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3731
                }
3732
                if (dataset.getPermissionIndexTime() != null) {
×
3733
                    timestamps.add("lastPermissionIndexTime",
×
3734
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3735
                }
3736
                if (dataset.getGlobalIdCreateTime() != null) {
×
3737
                    timestamps.add("globalIdCreateTime", formatter
×
3738
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3739
                }
3740

3741
            }
3742
            return ok(timestamps);
×
3743
        } catch (WrappedResponse wr) {
×
3744
            return wr.getResponse();
×
3745
        }
3746
    }
3747

3748

3749
/****************************
3750
 * Globus Support Section:
3751
 * 
3752
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3753
 * app is launched and makes a callback to the
3754
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3755
 * needed to set up it's display.
3756
 * 
3757
 * At some point after that, the user will make a selection as to which files to
3758
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3759
 * to indicate a transfer is about to start. In addition to providing the
3760
 * details of where to transfer the files to/from, Dataverse also grants the
3761
 * Globus principal involved the relevant rw or r permission for the dataset.
3762
 * 
3763
 * Once the transfer is started, the app records the task id and sends it to
3764
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3765
 * monitors the transfer task and when it ultimately succeeds for fails it
3766
 * revokes the principal's permission and, for the transfer in case, adds the
3767
 * files to the dataset. (The dataset is locked until the transfer completes.)
3768
 * 
3769
 * (If no transfer is started within a specified timeout, permissions will
3770
 * automatically be revoked - see the GlobusServiceBean for details.)
3771
 *
3772
 * The option to reference a file at a remote endpoint (rather than transfer it)
3773
 * follows the first two steps of the process above but completes with a call to
3774
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3775
 * files can be added to the dataset immediately.)
3776
 */
3777

3778
    /**
3779
     * Retrieve the parameters and signed URLs required to perform a globus
3780
     * transfer. This api endpoint is expected to be called as a signed callback
3781
     * after the globus-dataverse app/other app is launched, but it will accept
3782
     * other forms of authentication.
3783
     * 
3784
     * @param crc
3785
     * @param datasetId
3786
     */
3787
    @GET
3788
    @AuthRequired
3789
    @Path("{id}/globusUploadParameters")
3790
    @Produces(MediaType.APPLICATION_JSON)
3791
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3792
            @QueryParam(value = "locale") String locale) {
3793
        // -------------------------------------
3794
        // (1) Get the user from the ContainerRequestContext
3795
        // -------------------------------------
3796
        AuthenticatedUser authUser;
3797
        try {
3798
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3799
        } catch (WrappedResponse e) {
×
3800
            return e.getResponse();
×
3801
        }
×
3802
        // -------------------------------------
3803
        // (2) Get the Dataset Id
3804
        // -------------------------------------
3805
        Dataset dataset;
3806

3807
        try {
3808
            dataset = findDatasetOrDie(datasetId);
×
3809
        } catch (WrappedResponse wr) {
×
3810
            return wr.getResponse();
×
3811
        }
×
3812
        String storeId = dataset.getEffectiveStorageDriverId();
×
3813
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3814
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3815
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
3816
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3817
        }
3818

3819
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3820

3821
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3822
        String transferEndpoint = null;
×
3823
        JsonArray referenceEndpointsWithPaths = null;
×
3824
        if (managed) {
×
3825
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3826
        } else {
3827
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3828
        }
3829

3830
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3831
        queryParams.add("queryParameters",
×
3832
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3833
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3834
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3835
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3836
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3837
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3838
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3839
        substitutedParams.keySet().forEach((key) -> {
×
3840
            params.add(key, substitutedParams.get(key));
×
3841
        });
×
3842
        params.add("managed", Boolean.toString(managed));
×
3843
        if (managed) {
×
3844
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
3845
            if (maxSize != null) {
×
3846
                params.add("fileSizeLimit", maxSize);
×
3847
            }
3848
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
3849
            if (limit != null) {
×
3850
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3851
            }
3852
        }
3853
        if (transferEndpoint != null) {
×
3854
            params.add("endpoint", transferEndpoint);
×
3855
        } else {
3856
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3857
        }
3858
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3859
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3860
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3861
        allowedApiCalls.add(
×
3862
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3863
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3864
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3865
        if(managed) {
×
3866
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3867
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3868
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
3869
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3870
        } else {
3871
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3872
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3873
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
3874
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3875
        }
3876
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3877
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3878
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3879
                .add(URLTokenUtil.TIMEOUT, 5));
×
3880
        allowedApiCalls.add(
×
3881
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3882
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3883
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3884

3885
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3886
    }
3887

3888
    /**
3889
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3890
     * 
3891
     * @param crc
3892
     * @param datasetId
3893
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3894
     * @return
3895
     * @throws IOException
3896
     * @throws ExecutionException
3897
     * @throws InterruptedException
3898
     */
3899
    @POST
3900
    @AuthRequired
3901
    @Path("{id}/requestGlobusUploadPaths")
3902
    @Consumes(MediaType.APPLICATION_JSON)
3903
    @Produces(MediaType.APPLICATION_JSON)
3904
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3905
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3906

3907
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3908

3909
        if (!systemConfig.isGlobusUpload()) {
×
3910
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
3911
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
3912
        }
3913

3914
        // -------------------------------------
3915
        // (1) Get the user from the ContainerRequestContext
3916
        // -------------------------------------
3917
        AuthenticatedUser authUser;
3918
        try {
3919
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3920
        } catch (WrappedResponse e) {
×
3921
            return e.getResponse();
×
3922
        }
×
3923

3924
        // -------------------------------------
3925
        // (2) Get the Dataset Id
3926
        // -------------------------------------
3927
        Dataset dataset;
3928

3929
        try {
3930
            dataset = findDatasetOrDie(datasetId);
×
3931
        } catch (WrappedResponse wr) {
×
3932
            return wr.getResponse();
×
3933
        }
×
3934
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
3935
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
3936

3937
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
3938
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3939
                try {
3940
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
3941
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
3942
                        return badRequest("No referencedFiles specified");
×
3943
                    }
3944
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
3945
                    return (ok(fileMap));
×
3946
                } catch (Exception e) {
×
3947
                    return badRequest(e.getLocalizedMessage());
×
3948
                }
3949
            } else {
3950
                try {
3951
                    String principal = params.getString("principal");
×
3952
                    int numberOfPaths = params.getInt("numberOfFiles");
×
3953
                    if (numberOfPaths <= 0) {
×
3954
                        return badRequest("numberOfFiles must be positive");
×
3955
                    }
3956

3957
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
3958
                    switch (response.getInt("status")) {
×
3959
                    case 201:
3960
                        return ok(response.getJsonObject("paths"));
×
3961
                    case 400:
3962
                        return badRequest("Unable to grant permission");
×
3963
                    case 409:
3964
                        return conflict("Permission already exists");
×
3965
                    default:
3966
                        return error(null, "Unexpected error when granting permission");
×
3967
                    }
3968

3969
                } catch (NullPointerException | ClassCastException e) {
×
3970
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
3971

3972
                }
3973
            }
3974
        } else {
3975
            return forbidden("User doesn't have permission to upload to this dataset");
×
3976
        }
3977

3978
    }
3979

3980
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
3981
     * 
3982
     * @param crc
3983
     * @param datasetId
3984
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
3985
     * @param uriInfo
3986
     * @return
3987
     * @throws IOException
3988
     * @throws ExecutionException
3989
     * @throws InterruptedException
3990
     */
3991
    @POST
3992
    @AuthRequired
3993
    @Path("{id}/addGlobusFiles")
3994
    @Consumes(MediaType.MULTIPART_FORM_DATA)
3995
    @Produces("application/json")
3996
    @Operation(summary = "Uploads a Globus file for a dataset", 
3997
               description = "Uploads a Globus file for a dataset")
3998
    @APIResponse(responseCode = "200",
3999
               description = "Globus file uploaded successfully to dataset")
4000
    @Tag(name = "addGlobusFilesToDataset", 
4001
         description = "Uploads a Globus file for a dataset")
4002
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4003
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
4004
                                            @PathParam("id") String datasetId,
4005
                                            @FormDataParam("jsonData") String jsonData,
4006
                                            @Context UriInfo uriInfo
4007
    ) throws IOException, ExecutionException, InterruptedException {
4008

4009
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
4010

4011
        if (!systemConfig.isHTTPUpload()) {
×
4012
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4013
        }
4014

4015
        // -------------------------------------
4016
        // (1) Get the user from the API key
4017
        // -------------------------------------
4018
        AuthenticatedUser authUser;
4019
        try {
4020
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4021
        } catch (WrappedResponse ex) {
×
4022
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
4023
            );
4024
        }
×
4025

4026
        // -------------------------------------
4027
        // (2) Get the Dataset Id
4028
        // -------------------------------------
4029
        Dataset dataset;
4030

4031
        try {
4032
            dataset = findDatasetOrDie(datasetId);
×
4033
        } catch (WrappedResponse wr) {
×
4034
            return wr.getResponse();
×
4035
        }
×
4036
        
4037
        JsonObject jsonObject = null;
×
4038
        try {
4039
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
4040
        } catch (Exception ex) {
×
4041
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
4042
            return badRequest("Error parsing json body");
×
4043

4044
        }
×
4045

4046
        //------------------------------------
4047
        // (2b) Make sure dataset does not have package file
4048
        // --------------------------------------
4049

4050
        for (DatasetVersion dv : dataset.getVersions()) {
×
4051
            if (dv.isHasPackageFile()) {
×
4052
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4053
                );
4054
            }
4055
        }
×
4056

4057

4058
        String lockInfoMessage = "Globus Upload API started ";
×
4059
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
4060
                (authUser).getId(), lockInfoMessage);
×
4061
        if (lock != null) {
×
4062
            dataset.addLock(lock);
×
4063
        } else {
4064
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
4065
        }
4066

4067

4068
        ApiToken token = authSvc.findApiTokenByUser(authUser);
×
4069

4070
        if(uriInfo != null) {
×
4071
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
4072
        }
4073

4074

4075
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
4076
        
4077
        // Async Call
4078
        globusService.globusUpload(jsonObject, token, dataset, requestUrl, authUser);
×
4079

4080
        return ok("Async call to Globus Upload started ");
×
4081

4082
    }
4083
    
4084
/**
4085
 * Retrieve the parameters and signed URLs required to perform a globus
4086
 * transfer/download. This api endpoint is expected to be called as a signed
4087
 * callback after the globus-dataverse app/other app is launched, but it will
4088
 * accept other forms of authentication.
4089
 * 
4090
 * @param crc
4091
 * @param datasetId
4092
 * @param locale
4093
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
4094
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
4095
 */
4096
    @GET
4097
    @AuthRequired
4098
    @Path("{id}/globusDownloadParameters")
4099
    @Produces(MediaType.APPLICATION_JSON)
4100
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4101
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
4102
        // -------------------------------------
4103
        // (1) Get the user from the ContainerRequestContext
4104
        // -------------------------------------
NEW
4105
        AuthenticatedUser authUser = null;
×
4106
        try {
4107
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4108
        } catch (WrappedResponse e) {
×
NEW
4109
            logger.fine("guest user globus download");
×
4110
        }
×
4111
        // -------------------------------------
4112
        // (2) Get the Dataset Id
4113
        // -------------------------------------
4114
        Dataset dataset;
4115

4116
        try {
4117
            dataset = findDatasetOrDie(datasetId);
×
4118
        } catch (WrappedResponse wr) {
×
4119
            return wr.getResponse();
×
4120
        }
×
4121
        String storeId = dataset.getEffectiveStorageDriverId();
×
4122
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4123
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4124
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
4125
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4126
        }
4127

4128
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
4129
        if (files == null) {
×
4130
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4131
        }
4132

4133
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
4134

4135
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
4136
        String transferEndpoint = null;
×
4137

4138
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
4139
        queryParams.add("queryParameters",
×
4140
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
4141
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
4142
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
4143
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
4144
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
4145
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
4146
        JsonObjectBuilder params = Json.createObjectBuilder();
×
4147
        substitutedParams.keySet().forEach((key) -> {
×
4148
            params.add(key, substitutedParams.get(key));
×
4149
        });
×
4150
        params.add("managed", Boolean.toString(managed));
×
4151
        if (managed) {
×
4152
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
4153
            params.add("endpoint", transferEndpoint);
×
4154
        }
4155
        params.add("files", files);
×
4156
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
4157
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
4158
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
4159
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4160
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
4161
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4162
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
4163
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4164
                .add(URLTokenUtil.URL_TEMPLATE,
×
4165
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
4166
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4167
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
4168
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
4169
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
4170
                .add(URLTokenUtil.TIMEOUT, 5));
×
4171
        allowedApiCalls.add(
×
4172
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
4173
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
4174
                        .add(URLTokenUtil.TIMEOUT, 5));
×
4175

4176
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
4177
    }
4178

4179
    /**
4180
     * Requests permissions for a given globus user to download the specified files
4181
     * the dataset and returns information about the paths to transfer from.
4182
     * 
4183
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
4184
     * 
4185
     * @param crc
4186
     * @param datasetId
4187
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
4188
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
4189
     * @throws IOException
4190
     * @throws ExecutionException
4191
     * @throws InterruptedException
4192
     */
4193
    @POST
4194
    @AuthRequired
4195
    @Path("{id}/requestGlobusDownload")
4196
    @Consumes(MediaType.APPLICATION_JSON)
4197
    @Produces(MediaType.APPLICATION_JSON)
4198
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4199
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
4200
            throws IOException, ExecutionException, InterruptedException {
4201

4202
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
4203

4204
        if (!systemConfig.isGlobusDownload()) {
×
4205
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4206
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4207
        }
4208

4209
        // -------------------------------------
4210
        // (1) Get the user from the ContainerRequestContext
4211
        // -------------------------------------
4212
        User user = getRequestUser(crc);
×
4213

4214
        // -------------------------------------
4215
        // (2) Get the Dataset Id
4216
        // -------------------------------------
4217
        Dataset dataset;
4218

4219
        try {
4220
            dataset = findDatasetOrDie(datasetId);
×
4221
        } catch (WrappedResponse wr) {
×
4222
            return wr.getResponse();
×
4223
        }
×
4224
        JsonObject body = null;
×
4225
        if (jsonBody != null) {
×
4226
            body = JsonUtil.getJsonObject(jsonBody);
×
4227
        }
4228
        Set<String> fileIds = null;
×
4229
        if (downloadId != null) {
×
4230
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
4231
            if (files != null) {
×
4232
                fileIds = files.keySet();
×
4233
            }
4234
        } else {
×
4235
            if ((body!=null) && body.containsKey("fileIds")) {
×
4236
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
4237
                fileIds = new HashSet<String>(fileVals.size());
×
4238
                for (JsonValue fileVal : fileVals) {
×
4239
                    String id = null;
×
4240
                    switch (fileVal.getValueType()) {
×
4241
                    case STRING:
4242
                        id = ((JsonString) fileVal).getString();
×
4243
                        break;
×
4244
                    case NUMBER:
4245
                        id = ((JsonNumber) fileVal).toString();
×
4246
                        break;
×
4247
                    default:
4248
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
4249
                    }
4250
                    ;
4251
                    fileIds.add(id);
×
4252
                }
×
4253
            } else {
×
4254
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
4255
            }
4256
        }
4257

4258
        if (fileIds.isEmpty()) {
×
4259
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4260
        }
4261
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
4262
        for (String id : fileIds) {
×
4263
            boolean published = false;
×
4264
            logger.info("File id: " + id);
×
4265

4266
            DataFile df = null;
×
4267
            try {
4268
                df = findDataFileOrDie(id);
×
4269
            } catch (WrappedResponse wr) {
×
4270
                return wr.getResponse();
×
4271
            }
×
4272
            if (!df.getOwner().equals(dataset)) {
×
4273
                return badRequest("All files must be in the dataset");
×
4274
            }
4275
            dataFiles.add(df);
×
4276

4277
            for (FileMetadata fm : df.getFileMetadatas()) {
×
4278
                if (fm.getDatasetVersion().isPublished()) {
×
4279
                    published = true;
×
4280
                    break;
×
4281
                }
4282
            }
×
4283

4284
            if (!published) {
×
4285
                // If the file is not published, they can still download the file, if the user
4286
                // has the permission to view unpublished versions:
4287

4288
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
4289
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
4290
                    return forbidden("User doesn't have permission to download file: " + id);
×
4291
                }
4292
            } else { // published and restricted and/or embargoed
4293
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
4294
                    // This line also handles all three authenticated session user, token user, and
4295
                    // guest cases.
4296
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
4297
                        return forbidden("User doesn't have permission to download file: " + id);
×
4298
                    }
4299

4300
            }
4301
        }
×
4302
        // Allowed to download all requested files
4303
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
4304
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4305
            // If managed, give the principal read permissions
4306
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
4307
            switch (status) {
×
4308
            case 201:
4309
                return ok(files);
×
4310
            case 400:
4311
                return badRequest("Unable to grant permission");
×
4312
            case 409:
4313
                return conflict("Permission already exists");
×
4314
            default:
4315
                return error(null, "Unexpected error when granting permission");
×
4316
            }
4317

4318
        }
4319

4320
        return ok(files);
×
4321
    }
4322

4323
    /**
4324
     * Monitors a globus download and removes permissions on the dir/dataset when
4325
     * the specified transfer task is completed.
4326
     * 
4327
     * @param crc
4328
     * @param datasetId
4329
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
4330
     *                  Globus task to monitor.
4331
     * @return
4332
     * @throws IOException
4333
     * @throws ExecutionException
4334
     * @throws InterruptedException
4335
     */
4336
    @POST
4337
    @AuthRequired
4338
    @Path("{id}/monitorGlobusDownload")
4339
    @Consumes(MediaType.APPLICATION_JSON)
4340
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4341
            String jsonData) throws IOException, ExecutionException, InterruptedException {
4342

4343
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
4344

4345
        if (!systemConfig.isGlobusDownload()) {
×
4346
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4347
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4348
        }
4349

4350
        // -------------------------------------
4351
        // (1) Get the user from the ContainerRequestContext
4352
        // -------------------------------------
4353
        User authUser;
4354
        authUser = getRequestUser(crc);
×
4355

4356
        // -------------------------------------
4357
        // (2) Get the Dataset Id
4358
        // -------------------------------------
4359
        Dataset dataset;
4360

4361
        try {
4362
            dataset = findDatasetOrDie(datasetId);
×
4363
        } catch (WrappedResponse wr) {
×
4364
            return wr.getResponse();
×
4365
        }
×
4366

4367
        // Async Call
4368
        globusService.globusDownload(jsonData, dataset, authUser);
×
4369

4370
        return ok("Async call to Globus Download started");
×
4371

4372
    }
4373

4374
    /**
4375
     * Add multiple Files to an existing Dataset
4376
     *
4377
     * @param idSupplied
4378
     * @param jsonData
4379
     * @return
4380
     */
4381
    @POST
4382
    @AuthRequired
4383
    @Path("{id}/addFiles")
4384
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4385
    @Produces("application/json")
4386
    @Operation(summary = "Uploads a set of files to a dataset", 
4387
               description = "Uploads a set of files to a dataset")
4388
    @APIResponse(responseCode = "200",
4389
               description = "Files uploaded successfully to dataset")
4390
    @Tag(name = "addFilesToDataset", 
4391
         description = "Uploads a set of files to a dataset")
4392
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4393
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4394
            @FormDataParam("jsonData") String jsonData) {
4395

4396
        if (!systemConfig.isHTTPUpload()) {
×
4397
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4398
        }
4399

4400
        // -------------------------------------
4401
        // (1) Get the user from the ContainerRequestContext
4402
        // -------------------------------------
4403
        User authUser;
4404
        authUser = getRequestUser(crc);
×
4405

4406
        // -------------------------------------
4407
        // (2) Get the Dataset Id
4408
        // -------------------------------------
4409
        Dataset dataset;
4410

4411
        try {
4412
            dataset = findDatasetOrDie(idSupplied);
×
4413
        } catch (WrappedResponse wr) {
×
4414
            return wr.getResponse();
×
4415
        }
×
4416

4417
        dataset.getLocks().forEach(dl -> {
×
4418
            logger.info(dl.toString());
×
4419
        });
×
4420

4421
        //------------------------------------
4422
        // (2a) Make sure dataset does not have package file
4423
        // --------------------------------------
4424

4425
        for (DatasetVersion dv : dataset.getVersions()) {
×
4426
            if (dv.isHasPackageFile()) {
×
4427
                return error(Response.Status.FORBIDDEN,
×
4428
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4429
                );
4430
            }
4431
        }
×
4432

4433
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4434

4435
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4436
                dvRequest,
4437
                this.ingestService,
4438
                this.datasetService,
4439
                this.fileService,
4440
                this.permissionSvc,
4441
                this.commandEngine,
4442
                this.systemConfig
4443
        );
4444

4445
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4446

4447
    }
4448

4449
    /**
4450
     * Replace multiple Files to an existing Dataset
4451
     *
4452
     * @param idSupplied
4453
     * @param jsonData
4454
     * @return
4455
     */
4456
    @POST
4457
    @AuthRequired
4458
    @Path("{id}/replaceFiles")
4459
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4460
    @Produces("application/json")
4461
    @Operation(summary = "Replace a set of files to a dataset", 
4462
               description = "Replace a set of files to a dataset")
4463
    @APIResponse(responseCode = "200",
4464
               description = "Files replaced successfully to dataset")
4465
    @Tag(name = "replaceFilesInDataset", 
4466
         description = "Replace a set of files to a dataset")
4467
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) 
4468
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4469
                                          @PathParam("id") String idSupplied,
4470
                                          @FormDataParam("jsonData") String jsonData) {
4471

4472
        if (!systemConfig.isHTTPUpload()) {
×
4473
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4474
        }
4475

4476
        // -------------------------------------
4477
        // (1) Get the user from the ContainerRequestContext
4478
        // -------------------------------------
4479
        User authUser;
4480
        authUser = getRequestUser(crc);
×
4481

4482
        // -------------------------------------
4483
        // (2) Get the Dataset Id
4484
        // -------------------------------------
4485
        Dataset dataset;
4486

4487
        try {
4488
            dataset = findDatasetOrDie(idSupplied);
×
4489
        } catch (WrappedResponse wr) {
×
4490
            return wr.getResponse();
×
4491
        }
×
4492

4493
        dataset.getLocks().forEach(dl -> {
×
4494
            logger.info(dl.toString());
×
4495
        });
×
4496

4497
        //------------------------------------
4498
        // (2a) Make sure dataset does not have package file
4499
        // --------------------------------------
4500

4501
        for (DatasetVersion dv : dataset.getVersions()) {
×
4502
            if (dv.isHasPackageFile()) {
×
4503
                return error(Response.Status.FORBIDDEN,
×
4504
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4505
                );
4506
            }
4507
        }
×
4508

4509
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4510

4511
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4512
                dvRequest,
4513
                this.ingestService,
4514
                this.datasetService,
4515
                this.fileService,
4516
                this.permissionSvc,
4517
                this.commandEngine,
4518
                this.systemConfig
4519
        );
4520

4521
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4522

4523
    }
4524

4525
    /**
4526
     * API to find curation assignments and statuses
4527
     *
4528
     * @return
4529
     * @throws WrappedResponse
4530
     */
4531
    @GET
4532
    @AuthRequired
4533
    @Path("/listCurationStates")
4534
    @Produces("text/csv")
4535
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4536

4537
        try {
4538
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
4539
            if (!user.isSuperuser()) {
×
4540
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4541
            }
4542
        } catch (WrappedResponse wr) {
×
4543
            return wr.getResponse();
×
4544
        }
×
4545

4546
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4547
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
4548
        allRoles.forEach(r -> {
×
4549
            if (r.permissions().contains(Permission.PublishDataset))
×
4550
                curationRoles.add(r);
×
4551
        });
×
4552
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4553
        curationRoles.forEach(r -> {
×
4554
            assignees.put(r.getAlias(), null);
×
4555
        });
×
4556

4557
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4558
                BundleUtil.getStringFromBundle("dataset"),
×
4559
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4560
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4561
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4562
                String.join(",", assignees.keySet())));
×
4563
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4564
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4565
            curationRoles.forEach(r -> {
×
4566
                assignees.put(r.getAlias(), new HashSet<String>());
×
4567
            });
×
4568
            for (RoleAssignment ra : ras) {
×
4569
                if (curationRoles.contains(ra.getRole())) {
×
4570
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4571
                }
4572
            }
×
4573
            DatasetVersion dsv = dataset.getLatestVersion();
×
4574
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4575
            String status = dsv.getExternalStatusLabel();
×
4576
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4577
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4578
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4579
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4580
            List<String> sList = new ArrayList<String>();
×
4581
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4582
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4583
        }
×
4584
        csvSB.append("\n");
×
4585
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4586
    }
4587

4588
    // APIs to manage archival status
4589

4590
    @GET
4591
    @AuthRequired
4592
    @Produces(MediaType.APPLICATION_JSON)
4593
    @Path("/{id}/{version}/archivalStatus")
4594
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4595
                                                    @PathParam("id") String datasetId,
4596
                                                    @PathParam("version") String versionNumber,
4597
                                                    @Context UriInfo uriInfo,
4598
                                                    @Context HttpHeaders headers) {
4599

4600
        try {
4601
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4602
            if (!au.isSuperuser()) {
×
4603
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4604
            }
4605
            DataverseRequest req = createDataverseRequest(au);
×
4606
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4607
                    headers);
4608

4609
            if (dsv.getArchivalCopyLocation() == null) {
×
4610
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4611
            } else {
4612
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
4613
                return ok(status);
×
4614
            }
4615
        } catch (WrappedResponse wr) {
×
4616
            return wr.getResponse();
×
4617
        }
4618
    }
4619

4620
    @PUT
4621
    @AuthRequired
4622
    @Consumes(MediaType.APPLICATION_JSON)
4623
    @Path("/{id}/{version}/archivalStatus")
4624
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4625
                                                    @PathParam("id") String datasetId,
4626
                                                    @PathParam("version") String versionNumber,
4627
                                                    String newStatus,
4628
                                                    @Context UriInfo uriInfo,
4629
                                                    @Context HttpHeaders headers) {
4630

4631
        logger.fine(newStatus);
×
4632
        try {
4633
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4634

4635
            if (!au.isSuperuser()) {
×
4636
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4637
            }
4638
            
4639
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
4640
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4641
            
4642
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4643
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
4644
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
4645
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4646

4647
                    DataverseRequest req = createDataverseRequest(au);
×
4648
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4649
                            uriInfo, headers);
4650

4651
                    if (dsv == null) {
×
4652
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4653
                    }
4654
                    if (isSingleVersionArchiving()) {
×
4655
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
4656
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
4657
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4658
                            }
4659
                        }
×
4660
                    }
4661

4662
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
4663
                    dsv = datasetversionService.merge(dsv);
×
4664
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
4665
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4666

4667
                    return ok("Status updated");
×
4668
                }
4669
            }
4670
        } catch (WrappedResponse wr) {
×
4671
            return wr.getResponse();
×
4672
        } catch (JsonException| IllegalStateException ex) {
×
4673
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4674
        }
×
4675
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4676
    }
4677
    
4678
    @DELETE
4679
    @AuthRequired
4680
    @Produces(MediaType.APPLICATION_JSON)
4681
    @Path("/{id}/{version}/archivalStatus")
4682
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4683
                                                       @PathParam("id") String datasetId,
4684
                                                       @PathParam("version") String versionNumber,
4685
                                                       @Context UriInfo uriInfo,
4686
                                                       @Context HttpHeaders headers) {
4687

4688
        try {
4689
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4690
            if (!au.isSuperuser()) {
×
4691
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4692
            }
4693

4694
            DataverseRequest req = createDataverseRequest(au);
×
4695
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4696
                    headers);
4697
            if (dsv == null) {
×
4698
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4699
            }
4700
            dsv.setArchivalCopyLocation(null);
×
4701
            dsv = datasetversionService.merge(dsv);
×
4702

4703
            return ok("Status deleted");
×
4704

4705
        } catch (WrappedResponse wr) {
×
4706
            return wr.getResponse();
×
4707
        }
4708
    }
4709
    
4710
    private boolean isSingleVersionArchiving() {
4711
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
4712
        if (className != null) {
×
4713
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4714
            try {
4715
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
4716
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
4717
            } catch (ClassNotFoundException e) {
×
4718
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4719
            } catch (ClassCastException cce) {
×
4720
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
4721
            }
×
4722
        }
4723
        return false;
×
4724
    }
4725
    
4726
    // This method provides a callback for an external tool to retrieve it's
4727
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4728
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4729
    // will be plain/unsigned.
4730
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4731
    // for a draft dataset), or public case.
4732
    @GET
4733
    @AuthRequired
4734
    @Path("{id}/versions/{version}/toolparams/{tid}")
4735
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4736
                                            @PathParam("tid") long externalToolId,
4737
                                            @PathParam("id") String datasetId,
4738
                                            @PathParam("version") String version,
4739
                                            @QueryParam(value = "locale") String locale) {
4740
        try {
4741
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
4742
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
4743
            if (target == null) {
×
4744
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4745
            }
4746
            
4747
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
4748
            if(externalTool==null) {
×
4749
                return error(BAD_REQUEST, "External tool not found.");
×
4750
            }
4751
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
4752
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4753
            }
4754
            ApiToken apiToken = null;
×
4755
            User u = getRequestUser(crc);
×
4756
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4757

4758
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4759
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
4760
        } catch (WrappedResponse wr) {
×
4761
            return wr.getResponse();
×
4762
        }
4763
    }
4764

4765
    @GET
4766
    @Path("summaryFieldNames")
4767
    public Response getDatasetSummaryFieldNames() {
4768
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
4769
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
4770
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4771
        for (String fieldName : fieldNames) {
×
4772
            fieldNamesArrayBuilder.add(fieldName);
×
4773
        }
4774
        return ok(fieldNamesArrayBuilder);
×
4775
    }
4776

4777
    @GET
4778
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4779
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4780
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4781
        if (privateUrlUser == null) {
×
4782
            return notFound("Private URL user not found");
×
4783
        }
4784
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4785
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4786
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4787
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4788
        }
4789
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4790
        if (dsv == null || dsv.getId() == null) {
×
4791
            return notFound("Dataset version not found");
×
4792
        }
4793
        JsonObjectBuilder responseJson;
4794
        if (isAnonymizedAccess) {
×
4795
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
4796
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4797
        } else {
×
4798
            responseJson = json(dsv, null, true, returnOwners);
×
4799
        }
4800
        return ok(responseJson);
×
4801
    }
4802

4803
    @GET
4804
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4805
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
4806
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4807
        if (privateUrlUser == null) {
×
4808
            return notFound("Private URL user not found");
×
4809
        }
4810
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4811
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
4812
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4813
    }
4814

4815
    @GET
4816
    @AuthRequired
4817
    @Path("{id}/versions/{versionId}/citation")
4818
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4819
                                              @PathParam("id") String datasetId,
4820
                                              @PathParam("versionId") String versionId,
4821
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4822
                                              @Context UriInfo uriInfo,
4823
                                              @Context HttpHeaders headers) {
NEW
4824
        boolean checkFilePerms = false;
×
4825
        return response(req -> ok(
×
NEW
4826
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers,
×
NEW
4827
                        includeDeaccessioned, checkFilePerms).getCitation(true, false)),
×
NEW
4828
                getRequestUser(crc));
×
4829
    }
4830

4831
    @POST
4832
    @AuthRequired
4833
    @Path("{id}/versions/{versionId}/deaccession")
4834
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
4835
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
4836
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4837
        }
4838
        return response(req -> {
×
4839
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4840
            try {
4841
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4842
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
4843
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
4844
                if (deaccessionForwardURL != null) {
×
4845
                    try {
4846
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4847
                    } catch (IllegalArgumentException iae) {
×
4848
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
4849
                    }
×
4850
                }
4851
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4852
                
NEW
4853
                return ok("Dataset " + 
×
NEW
4854
                        (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + 
×
4855
                        " deaccessioned for version " + versionId);
4856
            } catch (JsonParsingException jpe) {
×
4857
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4858
            }
4859
        }, getRequestUser(crc));
×
4860
    }
4861

4862
    @GET
4863
    @AuthRequired
4864
    @Path("{identifier}/guestbookEntryAtRequest")
4865
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4866
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4867

4868
        Dataset dataset;
4869

4870
        try {
4871
            dataset = findDatasetOrDie(dvIdtf);
×
4872
        } catch (WrappedResponse ex) {
×
4873
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4874
        }
×
4875
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
4876
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
4877
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4878
        }
4879
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
4880
    }
4881

4882
    @PUT
4883
    @AuthRequired
4884
    @Path("{identifier}/guestbookEntryAtRequest")
4885
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4886
                                               boolean gbAtRequest,
4887
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4888

4889
        // Superuser-only:
4890
        AuthenticatedUser user;
4891
        try {
4892
            user = getRequestAuthenticatedUserOrDie(crc);
×
4893
        } catch (WrappedResponse ex) {
×
4894
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4895
        }
×
4896
        if (!user.isSuperuser()) {
×
4897
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4898
        }
4899

4900
        Dataset dataset;
4901

4902
        try {
4903
            dataset = findDatasetOrDie(dvIdtf);
×
4904
        } catch (WrappedResponse ex) {
×
4905
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4906
        }
×
4907
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
4908
        if (!gbAtRequestOpt.isPresent()) {
×
4909
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
4910
        }
4911
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
4912
        dataset.setGuestbookEntryAtRequest(choice);
×
4913
        datasetService.merge(dataset);
×
4914
        return ok("Guestbook Entry At Request set to: " + choice);
×
4915
    }
4916

4917
    @DELETE
4918
    @AuthRequired
4919
    @Path("{identifier}/guestbookEntryAtRequest")
4920
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4921
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4922

4923
        // Superuser-only:
4924
        AuthenticatedUser user;
4925
        try {
4926
            user = getRequestAuthenticatedUserOrDie(crc);
×
4927
        } catch (WrappedResponse ex) {
×
4928
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4929
        }
×
4930
        if (!user.isSuperuser()) {
×
4931
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4932
        }
4933

4934
        Dataset dataset;
4935

4936
        try {
4937
            dataset = findDatasetOrDie(dvIdtf);
×
4938
        } catch (WrappedResponse ex) {
×
4939
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4940
        }
×
4941

4942
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
4943
        datasetService.merge(dataset);
×
4944
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4945
    }
4946

4947
    @GET
4948
    @AuthRequired
4949
    @Path("{id}/userPermissions")
4950
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
4951
        Dataset dataset;
4952
        try {
4953
            dataset = findDatasetOrDie(datasetId);
×
4954
        } catch (WrappedResponse wr) {
×
4955
            return wr.getResponse();
×
4956
        }
×
4957
        User requestUser = getRequestUser(crc);
×
4958
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
4959
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
4960
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
4961
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
4962
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
4963
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
4964
        return ok(jsonObjectBuilder);
×
4965
    }
4966

4967
    @GET
4968
    @AuthRequired
4969
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
4970
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
4971
                                                 @PathParam("id") String datasetId,
4972
                                                 @PathParam("versionId") String versionId,
4973
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4974
                                                 @Context UriInfo uriInfo,
4975
                                                 @Context HttpHeaders headers) {
4976
        return response(req -> {
×
4977
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
4978
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
4979
        }, getRequestUser(crc));
×
4980
    }
4981
    
4982
    /**
4983
     * Get the PidProvider that will be used for generating new DOIs in this dataset
4984
     *
4985
     * @return - the id of the effective PID generator for the given dataset
4986
     * @throws WrappedResponse
4987
     */
4988
    @GET
4989
    @AuthRequired
4990
    @Path("{identifier}/pidGenerator")
4991
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4992
            @Context HttpHeaders headers) throws WrappedResponse {
4993

4994
        Dataset dataset;
4995

4996
        try {
4997
            dataset = findDatasetOrDie(dvIdtf);
×
4998
        } catch (WrappedResponse ex) {
×
4999
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5000
        }
×
5001
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
5002
        if(pidProvider == null) {
×
5003
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
5004
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
5005
        }
5006
        String pidGeneratorId = pidProvider.getId();
×
5007
        return ok(pidGeneratorId);
×
5008
    }
5009

5010
    @PUT
5011
    @AuthRequired
5012
    @Path("{identifier}/pidGenerator")
5013
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
5014
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
5015

5016
        // Superuser-only:
5017
        AuthenticatedUser user;
5018
        try {
5019
            user = getRequestAuthenticatedUserOrDie(crc);
×
5020
        } catch (WrappedResponse ex) {
×
5021
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
5022
        }
×
5023
        if (!user.isSuperuser()) {
×
5024
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5025
        }
5026

5027
        Dataset dataset;
5028

5029
        try {
5030
            dataset = findDatasetOrDie(datasetId);
×
5031
        } catch (WrappedResponse ex) {
×
5032
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5033
        }
×
5034
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
5035
            dataset.setPidGeneratorId(generatorId);
×
5036
            datasetService.merge(dataset);
×
5037
            return ok("PID Generator set to: " + generatorId);
×
5038
        } else {
5039
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
5040
        }
5041

5042
    }
5043

5044
    @DELETE
5045
    @AuthRequired
5046
    @Path("{identifier}/pidGenerator")
5047
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5048
            @Context HttpHeaders headers) throws WrappedResponse {
5049

5050
        // Superuser-only:
5051
        AuthenticatedUser user;
5052
        try {
5053
            user = getRequestAuthenticatedUserOrDie(crc);
×
5054
        } catch (WrappedResponse ex) {
×
5055
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5056
        }
×
5057
        if (!user.isSuperuser()) {
×
5058
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5059
        }
5060

5061
        Dataset dataset;
5062

5063
        try {
5064
            dataset = findDatasetOrDie(dvIdtf);
×
5065
        } catch (WrappedResponse ex) {
×
5066
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5067
        }
×
5068

5069
        dataset.setPidGenerator(null);
×
5070
        datasetService.merge(dataset);
×
5071
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
5072
    }
5073

5074
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc