• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #22002

01 Apr 2024 07:56PM CUT coverage: 20.716% (+0.5%) from 20.173%
#22002

push

github

web-flow
Merge pull request #10453 from IQSS/develop

Merge 6.2 into master

704 of 2679 new or added lines in 152 files covered. (26.28%)

81 existing lines in 49 files now uncovered.

17160 of 82836 relevant lines covered (20.72%)

0.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.19
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4
import edu.harvard.iq.dataverse.*;
5
import edu.harvard.iq.dataverse.DatasetLock.Reason;
6
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
7
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
8
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
9
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
10
import edu.harvard.iq.dataverse.authorization.DataverseRole;
11
import edu.harvard.iq.dataverse.authorization.Permission;
12
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
13
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
14
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
15
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
16
import edu.harvard.iq.dataverse.authorization.users.User;
17
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
18
import edu.harvard.iq.dataverse.dataaccess.*;
19
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
20
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
21
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
22
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
23
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
24
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
25
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
26
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
27
import edu.harvard.iq.dataverse.engine.command.Command;
28
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
29
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
30
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
31
import edu.harvard.iq.dataverse.engine.command.impl.*;
32
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
33
import edu.harvard.iq.dataverse.export.ExportService;
34
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
35
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
36
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
37
import edu.harvard.iq.dataverse.globus.GlobusUtil;
38
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
39
import edu.harvard.iq.dataverse.makedatacount.*;
40
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
41
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
42
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
43
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
44
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
46
import edu.harvard.iq.dataverse.search.IndexServiceBean;
47
import edu.harvard.iq.dataverse.settings.JvmSettings;
48
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
49
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
50
import edu.harvard.iq.dataverse.util.*;
51
import edu.harvard.iq.dataverse.util.bagit.OREMap;
52
import edu.harvard.iq.dataverse.util.json.*;
53
import edu.harvard.iq.dataverse.workflow.Workflow;
54
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
55
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
56
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
57
import jakarta.ejb.EJB;
58
import jakarta.ejb.EJBException;
59
import jakarta.inject.Inject;
60
import jakarta.json.*;
61
import jakarta.json.stream.JsonParsingException;
62
import jakarta.servlet.http.HttpServletRequest;
63
import jakarta.servlet.http.HttpServletResponse;
64
import jakarta.ws.rs.*;
65
import jakarta.ws.rs.container.ContainerRequestContext;
66
import jakarta.ws.rs.core.*;
67
import jakarta.ws.rs.core.Response.Status;
68
import org.apache.commons.lang3.StringUtils;
69
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
70
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
71
import org.glassfish.jersey.media.multipart.FormDataParam;
72

73
import java.io.IOException;
74
import java.io.InputStream;
75
import java.net.URI;
76
import java.sql.Timestamp;
77
import java.text.MessageFormat;
78
import java.text.SimpleDateFormat;
79
import java.time.LocalDate;
80
import java.time.LocalDateTime;
81
import java.time.ZoneId;
82
import java.time.format.DateTimeFormatter;
83
import java.util.*;
84
import java.util.Map.Entry;
85
import java.util.concurrent.ExecutionException;
86
import java.util.function.Predicate;
87
import java.util.logging.Level;
88
import java.util.logging.Logger;
89
import java.util.regex.Pattern;
90
import java.util.stream.Collectors;
91

92
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
93
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
94
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
95
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
96

97
@Path("datasets")
UNCOV
98
public class Datasets extends AbstractApiBean {
×
99

100
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
101
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
102
    
103
    @Inject DataverseSession session;
104

105
    @EJB
106
    DatasetServiceBean datasetService;
107

108
    @EJB
109
    DataverseServiceBean dataverseService;
110
    
111
    @EJB
112
    GlobusServiceBean globusService;
113

114
    @EJB
115
    UserNotificationServiceBean userNotificationService;
116
    
117
    @EJB
118
    PermissionServiceBean permissionService;
119
    
120
    @EJB
121
    AuthenticationServiceBean authenticationServiceBean;
122
    
123
    @EJB
124
    DDIExportServiceBean ddiExportService;
125

126
    @EJB
127
    MetadataBlockServiceBean metadataBlockService;
128
    
129
    @EJB
130
    DataFileServiceBean fileService;
131

132
    @EJB
133
    IngestServiceBean ingestService;
134

135
    @EJB
136
    EjbDataverseEngine commandEngine;
137
    
138
    @EJB
139
    IndexServiceBean indexService;
140

141
    @EJB
142
    S3PackageImporter s3PackageImporter;
143
     
144
    @EJB
145
    SettingsServiceBean settingsService;
146

147
    // TODO: Move to AbstractApiBean
148
    @EJB
149
    DatasetMetricsServiceBean datasetMetricsSvc;
150
    
151
    @EJB
152
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
153

154
    @EJB
155
    EmbargoServiceBean embargoService;
156

157
    @Inject
158
    MakeDataCountLoggingServiceBean mdcLogService;
159
    
160
    @Inject
161
    DataverseRequestServiceBean dvRequestService;
162

163
    @Inject
164
    WorkflowServiceBean wfService;
165
    
166
    @Inject
167
    DataverseRoleServiceBean dataverseRoleService;
168

169
    @EJB
170
    DatasetVersionServiceBean datasetversionService;
171

172
    @Inject
173
    PrivateUrlServiceBean privateUrlService;
174

175
    @Inject
176
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
177

178
    /**
179
     * Used to consolidate the way we parse and handle dataset versions.
180
     * @param <T> 
181
     */
182
    public interface DsVersionHandler<T> {
183
        T handleLatest();
184
        T handleDraft();
185
        T handleSpecific( long major, long minor );
186
        T handleLatestPublished();
187
    }
188
    
189
    @GET
190
    @AuthRequired
191
    @Path("{id}")
192
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
193
        return response( req -> {
×
194
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id)));
×
195
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
NEW
196
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
197
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
198
            if((latest != null) && latest.isReleased()) {
×
199
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
200
                mdcLogService.logEntry(entry);
×
201
            }
202
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
203
        }, getRequestUser(crc));
×
204
    }
205
    
206
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
207
    // to obtain the dataset that we are trying to export - which would handle
208
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
209
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
210
    @GET
211
    @Path("/export")
212
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json" })
213
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
214

215
        try {
216
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
217
            if (dataset == null) {
×
218
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
219
            }
220
            
221
            ExportService instance = ExportService.getInstance();
×
222
            
223
            InputStream is = instance.getExport(dataset, exporter);
×
224
           
225
            String mediaType = instance.getMediaType(exporter);
×
226
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
227
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
228
            mdcLogService.logEntry(entry);
×
229
            
230
            return Response.ok()
×
231
                    .entity(is)
×
232
                    .type(mediaType).
×
233
                    build();
×
234
        } catch (Exception wr) {
×
235
            logger.warning(wr.getMessage());
×
236
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
237
        }
238
    }
239

240
    @DELETE
241
    @AuthRequired
242
    @Path("{id}")
243
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
244
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
245
        // (and there's a comment that says "TODO: remove this command")
246
        // do we need an exposed API call for it? 
247
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
248
        // if the dataset only has 1 version... In other words, the functionality 
249
        // currently provided by this API is covered between the "deleteDraftVersion" and
250
        // "destroyDataset" API calls.  
251
        // (The logic below follows the current implementation of the underlying 
252
        // commands!)
253

254
        User u = getRequestUser(crc);
×
255
        return response( req -> {
×
256
            Dataset doomed = findDatasetOrDie(id);
×
257
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
258
            boolean destroy = false;
×
259
            
260
            if (doomed.getVersions().size() == 1) {
×
261
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
262
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
263
                }
264
                destroy = true;
×
265
            } else {
266
                if (!doomedVersion.isDraft()) {
×
267
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
268
                }
269
            }
270
            
271
            // Gather the locations of the physical files that will need to be 
272
            // deleted once the destroy command execution has been finalized:
273
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
274
            
275
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
276
            
277
            // If we have gotten this far, the destroy command has succeeded, 
278
            // so we can finalize it by permanently deleting the physical files:
279
            // (DataFileService will double-check that the datafiles no 
280
            // longer exist in the database, before attempting to delete 
281
            // the physical files)
282
            if (!deleteStorageLocations.isEmpty()) {
×
283
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
284
            }
285
            
286
            return ok("Dataset " + id + " deleted");
×
287
        }, u);
288
    }
289
        
290
    @DELETE
291
    @AuthRequired
292
    @Path("{id}/destroy")
293
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
294

295
        User u = getRequestUser(crc);
×
296
        return response(req -> {
×
297
            // first check if dataset is released, and if so, if user is a superuser
298
            Dataset doomed = findDatasetOrDie(id);
×
299

300
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
301
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
302
            }
303

304
            // Gather the locations of the physical files that will need to be 
305
            // deleted once the destroy command execution has been finalized:
306
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
307

308
            execCommand(new DestroyDatasetCommand(doomed, req));
×
309

310
            // If we have gotten this far, the destroy command has succeeded, 
311
            // so we can finalize permanently deleting the physical files:
312
            // (DataFileService will double-check that the datafiles no 
313
            // longer exist in the database, before attempting to delete 
314
            // the physical files)
315
            if (!deleteStorageLocations.isEmpty()) {
×
316
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
317
            }
318

319
            return ok("Dataset " + id + " destroyed");
×
320
        }, u);
321
    }
322
    
323
    @DELETE
324
    @AuthRequired
325
    @Path("{id}/versions/{versionId}")
326
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
327
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
328
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
329
        }
330

331
        return response( req -> {
×
332
            Dataset dataset = findDatasetOrDie(id);
×
333
            DatasetVersion doomed = dataset.getLatestVersion();
×
334
            
335
            if (!doomed.isDraft()) {
×
336
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
337
            }
338
            
339
            // Gather the locations of the physical files that will need to be 
340
            // deleted once the destroy command execution has been finalized:
341
            
342
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
343
            
344
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
345
            
346
            // If we have gotten this far, the delete command has succeeded - 
347
            // by either deleting the Draft version of a published dataset, 
348
            // or destroying an unpublished one. 
349
            // This means we can finalize permanently deleting the physical files:
350
            // (DataFileService will double-check that the datafiles no 
351
            // longer exist in the database, before attempting to delete 
352
            // the physical files)
353
            if (!deleteStorageLocations.isEmpty()) {
×
354
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
355
            }
356
            
357
            return ok("Draft version of dataset " + id + " deleted");
×
358
        }, getRequestUser(crc));
×
359
    }
360
        
361
    @DELETE
362
    @AuthRequired
363
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
364
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
365
                boolean index = true;
×
366
        return response(req -> {
×
367
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
368
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
369
        }, getRequestUser(crc));
×
370
    }
371
        
372
    @PUT
373
    @AuthRequired
374
    @Path("{id}/citationdate")
375
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
376
        return response( req -> {
×
377
            if ( dsfTypeName.trim().isEmpty() ){
×
378
                return badRequest("Please provide a dataset field type in the requst body.");
×
379
            }
380
            DatasetFieldType dsfType = null;
×
381
            if (!":publicationDate".equals(dsfTypeName)) {
×
382
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
383
                if (dsfType == null) {
×
384
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
385
                }
386
            }
387

388
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
389
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
390
        }, getRequestUser(crc));
×
391
    }
392
    
393
    @DELETE
394
    @AuthRequired
395
    @Path("{id}/citationdate")
396
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
397
        return response( req -> {
×
398
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
399
            return ok("Citation Date for dataset " + id + " set to default");
×
400
        }, getRequestUser(crc));
×
401
    }
402
    
403
    @GET
404
    @AuthRequired
405
    @Path("{id}/versions")
406
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
407

408
        return response( req -> {
×
409
            Dataset dataset = findDatasetOrDie(id);
×
NEW
410
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
411

NEW
412
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
413
                                .stream()
×
NEW
414
                                .map( d -> json(d, deepLookup) )
×
415
                                .collect(toJsonArray()));
×
416
        }, getRequestUser(crc));
×
417
    }
418
    
419
    @GET
420
    @AuthRequired
421
    @Path("{id}/versions/{versionId}")
422
    public Response getVersion(@Context ContainerRequestContext crc,
423
                               @PathParam("id") String datasetId,
424
                               @PathParam("versionId") String versionId,
425
                               @QueryParam("excludeFiles") Boolean excludeFiles,
426
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
427
                               @QueryParam("returnOwners") boolean returnOwners,
428
                               @Context UriInfo uriInfo,
429
                               @Context HttpHeaders headers) {
430
        return response( req -> {
×
431
            
432
           
433
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
NEW
434
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
435

NEW
436
            Dataset dst = findDatasetOrDie(datasetId);
×
NEW
437
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, dst, uriInfo, headers, includeDeaccessioned, checkPerms);
×
438

439
            if (dsv == null || dsv.getId() == null) {
×
440
                return notFound("Dataset version not found");
×
441
            }
442

NEW
443
            if (excludeFiles == null ? true : !excludeFiles) {
×
444
                dsv = datasetversionService.findDeep(dsv.getId());
×
445
            }
NEW
446
            return ok(json(dsv, null, excludeFiles == null ? true : !excludeFiles, returnOwners));
×
447
        }, getRequestUser(crc));
×
448
    }
449

450
    @GET
451
    @AuthRequired
452
    @Path("{id}/versions/{versionId}/files")
453
    public Response getVersionFiles(@Context ContainerRequestContext crc,
454
                                    @PathParam("id") String datasetId,
455
                                    @PathParam("versionId") String versionId,
456
                                    @QueryParam("limit") Integer limit,
457
                                    @QueryParam("offset") Integer offset,
458
                                    @QueryParam("contentType") String contentType,
459
                                    @QueryParam("accessStatus") String accessStatus,
460
                                    @QueryParam("categoryName") String categoryName,
461
                                    @QueryParam("tabularTagName") String tabularTagName,
462
                                    @QueryParam("searchText") String searchText,
463
                                    @QueryParam("orderCriteria") String orderCriteria,
464
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
465
                                    @Context UriInfo uriInfo,
466
                                    @Context HttpHeaders headers) {
467
        return response(req -> {
×
468
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
469
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
470
            try {
471
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
472
            } catch (IllegalArgumentException e) {
×
473
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
474
            }
×
475
            FileSearchCriteria fileSearchCriteria;
476
            try {
477
                fileSearchCriteria = new FileSearchCriteria(
×
478
                        contentType,
479
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
480
                        categoryName,
481
                        tabularTagName,
482
                        searchText
483
                );
484
            } catch (IllegalArgumentException e) {
×
485
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
486
            }
×
NEW
487
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
NEW
488
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
UNCOV
489
        }, getRequestUser(crc));
×
490
    }
491

492
    @GET
493
    @AuthRequired
494
    @Path("{id}/versions/{versionId}/files/counts")
495
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
496
                                         @PathParam("id") String datasetId,
497
                                         @PathParam("versionId") String versionId,
498
                                         @QueryParam("contentType") String contentType,
499
                                         @QueryParam("accessStatus") String accessStatus,
500
                                         @QueryParam("categoryName") String categoryName,
501
                                         @QueryParam("tabularTagName") String tabularTagName,
502
                                         @QueryParam("searchText") String searchText,
503
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
504
                                         @Context UriInfo uriInfo,
505
                                         @Context HttpHeaders headers) {
506
        return response(req -> {
×
507
            FileSearchCriteria fileSearchCriteria;
508
            try {
509
                fileSearchCriteria = new FileSearchCriteria(
×
510
                        contentType,
511
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
512
                        categoryName,
513
                        tabularTagName,
514
                        searchText
515
                );
516
            } catch (IllegalArgumentException e) {
×
517
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
518
            }
×
519
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
520
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
521
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
522
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
523
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
524
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
525
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
526
            return ok(jsonObjectBuilder);
×
527
        }, getRequestUser(crc));
×
528
    }
529

530
    @GET
531
    @AuthRequired
532
    @Path("{id}/dirindex")
533
    @Produces("text/html")
534
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
535

536
        folderName = folderName == null ? "" : folderName;
×
537
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
538
        
539
        DatasetVersion version;
540
        try {
541
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
542
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
543
        } catch (WrappedResponse wr) {
×
544
            return wr.getResponse();
×
545
        }
×
546
        
547
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
548
        
549
        // return "NOT FOUND" if there is no such folder in the dataset version:
550
        
551
        if ("".equals(output)) {
×
552
            return notFound("Folder " + folderName + " does not exist");
×
553
        }
554
        
555
        
556
        String indexFileName = folderName.equals("") ? ".index.html"
×
557
                : ".index-" + folderName.replace('/', '_') + ".html";
×
558
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
559

560
        
561
        return Response.ok()
×
562
                .entity(output)
×
563
                //.type("application/html").
564
                .build();
×
565
    }
566
    
567
    @GET
568
    @AuthRequired
569
    @Path("{id}/versions/{versionId}/metadata")
570
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
571
        return response( req -> ok(
×
572
                    jsonByBlocks(
×
573
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
574
                                .getDatasetFields())), getRequestUser(crc));
×
575
    }
576
    
577
    @GET
578
    @AuthRequired
579
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
580
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
581
                                            @PathParam("id") String datasetId,
582
                                            @PathParam("versionNumber") String versionNumber,
583
                                            @PathParam("block") String blockName,
584
                                            @Context UriInfo uriInfo,
585
                                            @Context HttpHeaders headers) {
586
        
587
        return response( req -> {
×
588
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
589
            
590
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
591
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
592
                if ( p.getKey().getName().equals(blockName) ) {
×
593
                    return ok(json(p.getKey(), p.getValue()));
×
594
                }
595
            }
×
596
            return notFound("metadata block named " + blockName + " not found");
×
597
        }, getRequestUser(crc));
×
598
    }
599

600
    /**
601
     * Add Signposting
602
     * @param datasetId
603
     * @param versionId
604
     * @param uriInfo
605
     * @param headers
606
     * @return
607
     */
608
    @GET
609
    @AuthRequired
610
    @Path("{id}/versions/{versionId}/linkset")
611
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
612
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
613
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
614
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
615
        }
616
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
617
        try {
618
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
619
            return Response
×
620
                    .ok(Json.createObjectBuilder()
×
621
                            .add("linkset",
×
622
                                    new SignpostingResources(systemConfig, dsv,
623
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
624
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
625
                                                    .getJsonLinkset())
×
626
                            .build())
×
627
                    .type(MediaType.APPLICATION_JSON).build();
×
628
        } catch (WrappedResponse wr) {
×
629
            return wr.getResponse();
×
630
        }
631
    }
632

633
    @GET
634
    @AuthRequired
635
    @Path("{id}/modifyRegistration")
636
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
637
        return response( req -> {
×
638
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
639
            return ok("Dataset " + id + " target url updated");
×
640
        }, getRequestUser(crc));
×
641
    }
642
    
643
    @POST
644
    @AuthRequired
645
    @Path("/modifyRegistrationAll")
646
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
647
        return response( req -> {
×
648
            datasetService.findAll().forEach( ds -> {
×
649
                try {
650
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
651
                } catch (WrappedResponse ex) {
×
652
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
653
                }
×
654
            });
×
655
            return ok("Update All Dataset target url completed");
×
656
        }, getRequestUser(crc));
×
657
    }
658
    
659
    @POST
660
    @AuthRequired
661
    @Path("{id}/modifyRegistrationMetadata")
662
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
663

664
        try {
665
            Dataset dataset = findDatasetOrDie(id);
×
666
            if (!dataset.isReleased()) {
×
667
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
668
            }
669
        } catch (WrappedResponse ex) {
×
670
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
671
        }
×
672

673
        return response(req -> {
×
674
            execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req));
×
675
            List<String> args = Arrays.asList(id);
×
676
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
677
        }, getRequestUser(crc));
×
678
    }
679
    
680
    @GET
681
    @AuthRequired
682
    @Path("/modifyRegistrationPIDMetadataAll")
683
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
684
        return response( req -> {
×
685
            datasetService.findAll().forEach( ds -> {
×
686
                try {
687
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
688
                } catch (WrappedResponse ex) {
×
689
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
690
                }
×
691
            });
×
692
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
693
        }, getRequestUser(crc));
×
694
    }
695
  
696
    @PUT
697
    @AuthRequired
698
    @Path("{id}/versions/{versionId}")
699
    @Consumes(MediaType.APPLICATION_JSON)
700
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
701
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
702
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
703
        }
704
        
705
        try {
706
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
707
            Dataset ds = findDatasetOrDie(id);
×
708
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
709
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
710
            
711
            // clear possibly stale fields from the incoming dataset version.
712
            // creation and modification dates are updated by the commands.
713
            incomingVersion.setId(null);
×
714
            incomingVersion.setVersionNumber(null);
×
715
            incomingVersion.setMinorVersionNumber(null);
×
716
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
717
            incomingVersion.setDataset(ds);
×
718
            incomingVersion.setCreateTime(null);
×
719
            incomingVersion.setLastUpdateTime(null);
×
720
            
721
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
722
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
723
            }
724
            
725
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
726
            
727
            DatasetVersion managedVersion;
728
            if (updateDraft) {
×
729
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
730
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
731
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
732
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
733
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
734
                if (!hasValidTerms) {
×
735
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
736
                }
737
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
738
                managedVersion = managedDataset.getOrCreateEditVersion();
×
739
            } else {
×
740
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
741
                if (!hasValidTerms) {
×
742
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
743
                }
744
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
745
            }
746
            return ok( json(managedVersion, true) );
×
747
                    
748
        } catch (JsonParseException ex) {
×
749
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
750
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
751
            
752
        } catch (WrappedResponse ex) {
×
753
            return ex.getResponse();
×
754
            
755
        }
756
    }
757

758
    @GET
759
    @AuthRequired
760
    @Path("{id}/versions/{versionId}/metadata")
761
    @Produces("application/ld+json, application/json-ld")
762
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
763
        try {
764
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
765
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
766
            OREMap ore = new OREMap(dsv,
×
767
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
768
            return ok(ore.getOREMapBuilder(true));
×
769

770
        } catch (WrappedResponse ex) {
×
771
            ex.printStackTrace();
×
772
            return ex.getResponse();
×
773
        } catch (Exception jpe) {
×
774
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
775
            jpe.printStackTrace();
×
776
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
777
        }
778
    }
779

780
    @GET
781
    @AuthRequired
782
    @Path("{id}/metadata")
783
    @Produces("application/ld+json, application/json-ld")
784
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
NEW
785
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
786
    }
787

788
    @PUT
789
    @AuthRequired
790
    @Path("{id}/metadata")
791
    @Consumes("application/ld+json, application/json-ld")
792
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
793

794
        try {
795
            Dataset ds = findDatasetOrDie(id);
×
796
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
797
            //Get draft state as of now
798

799
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
800
            //Get the current draft or create a new version to update
801
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
802
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
803
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
804
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
805
            if (!hasValidTerms) {
×
806
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
807
            }
808
            DatasetVersion managedVersion;
809
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
810
            managedVersion = managedDataset.getLatestVersion();
×
811
            String info = updateDraft ? "Version Updated" : "Version Created";
×
812
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
813

814
        } catch (WrappedResponse ex) {
×
815
            return ex.getResponse();
×
816
        } catch (JsonParsingException jpe) {
×
817
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
818
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
819
        }
820
    }
821

822
    @PUT
823
    @AuthRequired
824
    @Path("{id}/metadata/delete")
825
    @Consumes("application/ld+json, application/json-ld")
826
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
827
        try {
828
            Dataset ds = findDatasetOrDie(id);
×
829
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
830
            //Get draft state as of now
831

832
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
833
            //Get the current draft or create a new version to update
834
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
835
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
836
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
837
            DatasetVersion managedVersion;
838
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
839
            managedVersion = managedDataset.getLatestVersion();
×
840
            String info = updateDraft ? "Version Updated" : "Version Created";
×
841
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
842

843
        } catch (WrappedResponse ex) {
×
844
            ex.printStackTrace();
×
845
            return ex.getResponse();
×
846
        } catch (JsonParsingException jpe) {
×
847
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
848
            jpe.printStackTrace();
×
849
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
850
        }
851
    }
852

853
    @PUT
854
    @AuthRequired
855
    @Path("{id}/deleteMetadata")
856
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
857

858
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
859

860
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
861
    }
862

863
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
864
        try {
865

866
            Dataset ds = findDatasetOrDie(id);
×
867
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
868
            //Get the current draft or create a new version to update
869
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
870
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
871
            List<DatasetField> fields = new LinkedList<>();
×
872
            DatasetField singleField = null;
×
873

874
            JsonArray fieldsJson = json.getJsonArray("fields");
×
875
            if (fieldsJson == null) {
×
876
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
877
                fields.add(singleField);
×
878
            } else {
879
                fields = jsonParser().parseMultipleFields(json);
×
880
            }
881

882
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
883

884
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
885
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
886
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
887

888
            for (DatasetField updateField : fields) {
×
889
                boolean found = false;
×
890
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
891
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
892
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
893
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
894
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
895
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
896
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
897
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
898
                                                found = true;
×
899
                                                controlledVocabularyItemsToRemove.add(existing);
×
900
                                            }
901
                                        }
×
902
                                        if (!found) {
×
903
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
904
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
905
                                        }
906
                                    }
×
907
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
908
                                        dsf.getControlledVocabularyValues().remove(remove);
×
909
                                    }
×
910

911
                                } else {
912
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
913
                                        found = true;
×
914
                                        dsf.setSingleControlledVocabularyValue(null);
×
915
                                    }
916

917
                                }
918
                            } else {
919
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
920
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
921
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
922
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
923
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
924
                                                    found = true;
×
925
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
926
                                                }
927
                                            }
×
928
                                            if (!found) {
×
929
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
930
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
931
                                            }
932
                                        }
×
933
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
934
                                            dsf.getDatasetFieldValues().remove(remove);
×
935
                                        });
×
936

937
                                    } else {
938
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
939
                                            found = true;
×
940
                                            dsf.setSingleValue(null);
×
941
                                        }
942

943
                                    }
944
                                } else {
945
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
946
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
947
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
948
                                            String existingString = getCompoundDisplayValue(existing);
×
949
                                            if (existingString.equals(deleteVal)) {
×
950
                                                found = true;
×
951
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
952
                                            }
953
                                        }
×
954
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
955
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
956
                                        });
×
957
                                        if (!found) {
×
958
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
959
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
960
                                        }
961
                                    }
×
962
                                }
963
                            }
964
                        } else {
965
                            found = true;
×
966
                            dsf.setSingleValue(null);
×
967
                            dsf.setSingleControlledVocabularyValue(null);
×
968
                        }
969
                        break;
×
970
                    }
971
                }
×
972
                if (!found){
×
973
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
974
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
975
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
976
                }
977
            }
×
978

979

980
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
981
            return ok(json(managedVersion, true));
×
982

983
        } catch (JsonParseException ex) {
×
984
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
985
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
986

987
        } catch (WrappedResponse ex) {
×
988
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
989
            return ex.getResponse();
×
990

991
        }
992
    
993
    }
994
    
995
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
996
        String returnString = "";
×
997
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
998
            for (String value : dsf.getValues()) {
×
999
                if (!(value == null)) {
×
1000
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1001
                }
1002
            }
×
1003
        }
×
1004
        return returnString;
×
1005
    }
1006
    
1007
    @PUT
1008
    @AuthRequired
1009
    @Path("{id}/editMetadata")
1010
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1011

1012
        Boolean replaceData = replace != null;
×
1013
        DataverseRequest req = null;
×
1014
        req = createDataverseRequest(getRequestUser(crc));
×
1015

1016
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1017
    }
1018
    
1019
    
1020
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1021
        try {
1022
           
1023
            Dataset ds = findDatasetOrDie(id);
×
1024
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1025
            //Get the current draft or create a new version to update
1026
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1027
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1028
            List<DatasetField> fields = new LinkedList<>();
×
1029
            DatasetField singleField = null;
×
1030
            
1031
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1032
            if (fieldsJson == null) {
×
1033
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
1034
                fields.add(singleField);
×
1035
            } else {
1036
                fields = jsonParser().parseMultipleFields(json);
×
1037
            }
1038
            
1039

1040
            String valdationErrors = validateDatasetFieldValues(fields);
×
1041

1042
            if (!valdationErrors.isEmpty()) {
×
1043
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1044
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1045
            }
1046

1047
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1048

1049
            //loop through the update fields     
1050
            // and compare to the version fields  
1051
            //if exist add/replace values
1052
            //if not add entire dsf
1053
            for (DatasetField updateField : fields) {
×
1054
                boolean found = false;
×
1055
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1056
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1057
                        found = true;
×
1058
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1059
                            List priorCVV = new ArrayList<>();
×
1060
                            String cvvDisplay = "";
×
1061

1062
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1063
                                cvvDisplay = dsf.getDisplayValue();
×
1064
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1065
                                    priorCVV.add(cvvOld);
×
1066
                                }
×
1067
                            }
1068

1069
                            if (replaceData) {
×
1070
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1071
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1072
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1073
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1074
                                    priorCVV.clear();
×
1075
                                    dsf.getControlledVocabularyValues().clear();
×
1076
                                } else {
1077
                                    dsf.setSingleValue("");
×
1078
                                    dsf.setSingleControlledVocabularyValue(null);
×
1079
                                }
1080
                              cvvDisplay="";
×
1081
                            }
1082
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1083
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1084
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1085
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
1086
                                            priorCVV.add(cvv);
×
1087
                                        }
1088
                                    }
×
1089
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1090
                                } else {
1091
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1092
                                }
1093
                            } else {
1094
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1095
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1096
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1097
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1098
                                                dfv.setDatasetField(dsf);
×
1099
                                                dsf.getDatasetFieldValues().add(dfv);
×
1100
                                            }
1101
                                        }
×
1102
                                    } else {
1103
                                        dsf.setSingleValue(updateField.getValue());
×
1104
                                    }
1105
                                } else {
1106
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1107
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1108
                                            dfcv.setParentDatasetField(dsf);
×
1109
                                            dsf.setDatasetVersion(dsv);
×
1110
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1111
                                        }
1112
                                    }
×
1113
                                }
1114
                            }
1115
                        } else {
×
1116
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
1117
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1118
                            }
1119
                        }
1120
                        break;
1121
                    }
1122
                }
×
1123
                if (!found) {
×
1124
                    updateField.setDatasetVersion(dsv);
×
1125
                    dsv.getDatasetFields().add(updateField);
×
1126
                }
1127
            }
×
1128
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1129

1130
            return ok(json(managedVersion, true));
×
1131

1132
        } catch (JsonParseException ex) {
×
1133
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1134
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1135

1136
        } catch (WrappedResponse ex) {
×
1137
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1138
            return ex.getResponse();
×
1139

1140
        }
1141
    }
1142
    
1143
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1144
        StringBuilder error = new StringBuilder();
×
1145

1146
        for (DatasetField dsf : fields) {
×
1147
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1148
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1149
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1150
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) {
×
1151
                error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1152
            }
1153
        }
×
1154

1155
        if (!error.toString().isEmpty()) {
×
1156
            return (error.toString());
×
1157
        }
1158
        return "";
×
1159
    }
1160
    
1161
    /**
1162
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1163
     */
1164
    @GET
1165
    @AuthRequired
1166
    @Path("{id}/actions/:publish")
1167
    @Deprecated
1168
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1169
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
1170
        return publishDataset(crc, id, type, false);
×
1171
    }
1172

1173
    @POST
1174
    @AuthRequired
1175
    @Path("{id}/actions/:publish")
1176
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1177
        try {
1178
            if (type == null) {
×
1179
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1180
            }
1181
            boolean updateCurrent=false;
×
1182
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1183
            type = type.toLowerCase();
×
1184
            boolean isMinor=false;
×
1185
            switch (type) {
×
1186
                case "minor":
1187
                    isMinor = true;
×
1188
                    break;
×
1189
                case "major":
1190
                    isMinor = false;
×
1191
                    break;
×
1192
                case "updatecurrent":
1193
                    if (user.isSuperuser()) {
×
1194
                        updateCurrent = true;
×
1195
                    } else {
1196
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1197
                    }
1198
                    break;
1199
                default:
1200
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1201
            }
1202

1203
            Dataset ds = findDatasetOrDie(id);
×
1204
            
1205
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1206
            if (!hasValidTerms) {
×
1207
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1208
            }
1209
            
1210
            if (mustBeIndexed) {
×
1211
                logger.fine("IT: " + ds.getIndexTime());
×
1212
                logger.fine("MT: " + ds.getModificationTime());
×
1213
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1214
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1215
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
1216
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1217
                }
1218
                /*
1219
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1220
                 * modification or permission modification times. The checks here are trying to
1221
                 * see if indexing or permissionindexing could be pending, so they check to see
1222
                 * if the relevant modification time is set and if so, whether the index is also
1223
                 * set and if so, if it after the modification time. If the modification time is
1224
                 * set and the index time is null or is before the mod time, the 409/conflict
1225
                 * error is returned.
1226
                 *
1227
                 */
1228
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1229
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
1230
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1231
                }
1232
            }
1233
            if (updateCurrent) {
×
1234
                /*
1235
                 * Note: The code here mirrors that in the
1236
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1237
                 * to the core logic (i.e. beyond updating the messaging about results) should
1238
                 * be applied to the code there as well.
1239
                 */
1240
                String errorMsg = null;
×
1241
                String successMsg = null;
×
1242
                try {
1243
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1244
                    ds = commandEngine.submit(cmd);
×
1245
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1246

1247
                    // If configured, update archive copy as well
1248
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1249
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1250
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
1251
                    if (archiveCommand != null) {
×
1252
                        // Delete the record of any existing copy since it is now out of date/incorrect
1253
                        updateVersion.setArchivalCopyLocation(null);
×
1254
                        /*
1255
                         * Then try to generate and submit an archival copy. Note that running this
1256
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1257
                         * "The attribute [id] of class
1258
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1259
                         * key column in the database. Updates are not allowed." To avoid that, and to
1260
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1261
                         * pulled this out as a separate submit().
1262
                         */
1263
                        try {
1264
                            updateVersion = commandEngine.submit(archiveCommand);
×
1265
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
1266
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1267
                            } else {
1268
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1269
                            }
1270
                        } catch (CommandException ex) {
×
1271
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1272
                            logger.severe(ex.getMessage());
×
1273
                        }
×
1274
                    }
1275
                } catch (CommandException ex) {
×
1276
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1277
                    logger.severe(ex.getMessage());
×
1278
                }
×
1279
                if (errorMsg != null) {
×
1280
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1281
                } else {
1282
                    return Response.ok(Json.createObjectBuilder()
×
1283
                            .add("status", ApiConstants.STATUS_OK)
×
1284
                            .add("status_details", successMsg)
×
1285
                            .add("data", json(ds)).build())
×
1286
                            .type(MediaType.APPLICATION_JSON)
×
1287
                            .build();
×
1288
                }
1289
            } else {
1290
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
1291
                        createDataverseRequest(user),
×
1292
                        isMinor));
1293
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1294
            }
1295
        } catch (WrappedResponse ex) {
×
1296
            return ex.getResponse();
×
1297
        }
1298
    }
1299

1300
    @POST
1301
    @AuthRequired
1302
    @Path("{id}/actions/:releasemigrated")
1303
    @Consumes("application/ld+json, application/json-ld")
1304
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1305
        try {
1306
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1307
            if (!user.isSuperuser()) {
×
1308
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1309
            }
1310

1311
            Dataset ds = findDatasetOrDie(id);
×
1312
            try {
1313
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1314
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1315
                logger.fine("Submitted date: " + pubDate);
×
1316
                LocalDateTime dateTime = null;
×
1317
                if(!StringUtils.isEmpty(pubDate)) {
×
1318
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
1319
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1320
                    //Set version release date
1321
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1322
                }
1323
                // dataset.getPublicationDateFormattedYYYYMMDD())
1324
                // Assign a version number if not set
1325
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1326

1327
                    if (ds.getVersions().size() == 1) {
×
1328
                        // First Release
1329
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1330
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1331
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1332
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
1333
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1334
                    } else {
1335
                        // major, non-first release
1336
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
1337
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1338
                    }
1339
                }
1340
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1341
                    //Also set publication date if this is the first
1342
                    if(dateTime != null) {
×
1343
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1344
                    }
1345
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1346
                    // is null, so set it here.
1347
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1348
                }
1349
            } catch (Exception e) {
×
1350
                logger.fine(e.getMessage());
×
1351
                throw new BadRequestException("Unable to set publication date ("
×
1352
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
1353
            }
×
1354
            /*
1355
             * Note: The code here mirrors that in the
1356
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1357
             * to the core logic (i.e. beyond updating the messaging about results) should
1358
             * be applied to the code there as well.
1359
             */
1360
            String errorMsg = null;
×
1361
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1362

1363
            try {
1364
                // ToDo - should this be in onSuccess()? May relate to todo above
1365
                if (prePubWf.isPresent()) {
×
1366
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1367
                    wfService.start(prePubWf.get(),
×
1368
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1369
                            false);
1370
                } else {
1371
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1372
                            createDataverseRequest(user), !contactPIDProvider);
×
1373
                    ds = commandEngine.submit(cmd);
×
1374
                }
1375
            } catch (CommandException ex) {
×
1376
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1377
                logger.severe(ex.getMessage());
×
1378
            }
×
1379

1380
            if (errorMsg != null) {
×
1381
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1382
            } else {
1383
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1384
            }
1385

1386
        } catch (WrappedResponse ex) {
×
1387
            return ex.getResponse();
×
1388
        }
1389
    }
1390

1391
    @POST
1392
    @AuthRequired
1393
    @Path("{id}/move/{targetDataverseAlias}")
1394
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1395
        try {
1396
            User u = getRequestUser(crc);
×
1397
            Dataset ds = findDatasetOrDie(id);
×
1398
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1399
            if (target == null) {
×
1400
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1401
            }
1402
            //Command requires Super user - it will be tested by the command
1403
            execCommand(new MoveDatasetCommand(
×
1404
                    createDataverseRequest(u), ds, target, force
×
1405
            ));
1406
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1407
        } catch (WrappedResponse ex) {
×
1408
            if (ex.getCause() instanceof UnforcedCommandException) {
×
1409
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1410
            } else {
1411
                return ex.getResponse();
×
1412
            }
1413
        }
1414
    }
1415

1416
    @POST
1417
    @AuthRequired
1418
    @Path("{id}/files/actions/:set-embargo")
1419
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1420

1421
        // user is authenticated
1422
        AuthenticatedUser authenticatedUser = null;
×
1423
        try {
1424
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1425
        } catch (WrappedResponse ex) {
×
1426
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1427
        }
×
1428

1429
        Dataset dataset;
1430
        try {
1431
            dataset = findDatasetOrDie(id);
×
1432
        } catch (WrappedResponse ex) {
×
1433
            return ex.getResponse();
×
1434
        }
×
1435
        
1436
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1437
        
1438
        if (!hasValidTerms){
×
1439
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1440
        }
1441

1442
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1443
        /*
1444
         * This is only a pre-test - if there's no draft version, there are clearly no
1445
         * files that a normal user can change. The converse is not true. A draft
1446
         * version could contain only files that have already been released. Further, we
1447
         * haven't checked the file list yet so the user could still be trying to change
1448
         * released files even if there are some unreleased/draft-only files. Doing this
1449
         * check here does avoid having to do further parsing for some error cases. It
1450
         * also checks the user can edit this dataset, so we don't have to make that
1451
         * check later.
1452
         */
1453

1454
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1455
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1456
        }
1457

1458
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1459
        long maxEmbargoDurationInMonths = 0;
×
1460
        try {
1461
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1462
        } catch (NumberFormatException nfe){
×
1463
            if (nfe.getMessage().contains("null")) {
×
1464
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1465
            }
1466
        }
×
1467
        if (maxEmbargoDurationInMonths == 0){
×
1468
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1469
        }
1470

1471
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1472

1473
        Embargo embargo = new Embargo();
×
1474

1475

1476
        LocalDate currentDateTime = LocalDate.now();
×
1477
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1478

1479
        // check :MaxEmbargoDurationInMonths if -1
1480
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1481
        // dateAvailable is not in the past
1482
        if (dateAvailable.isAfter(currentDateTime)){
×
1483
            embargo.setDateAvailable(dateAvailable);
×
1484
        } else {
1485
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1486
        }
1487

1488
        // dateAvailable is within limits
1489
        if (maxEmbargoDateTime != null){
×
1490
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
1491
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1492
            }
1493
        }
1494

1495
        embargo.setReason(json.getString("reason"));
×
1496

1497
        List<DataFile> datasetFiles = dataset.getFiles();
×
1498
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1499

1500
        // extract fileIds from json, find datafiles and add to list
1501
        if (json.containsKey("fileIds")){
×
1502
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1503
            for (JsonValue jsv : fileIds) {
×
1504
                try {
1505
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1506
                    filesToEmbargo.add(dataFile);
×
1507
                } catch (WrappedResponse ex) {
×
1508
                    return ex.getResponse();
×
1509
                }
×
1510
            }
×
1511
        }
1512

1513
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1514
        // check if files belong to dataset
1515
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1516
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1517
            boolean badFiles = false;
×
1518
            for (DataFile datafile : filesToEmbargo) {
×
1519
                // superuser can overrule an existing embargo, even on released files
1520
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1521
                    restrictedFiles.add(datafile.getId());
×
1522
                    badFiles = true;
×
1523
                }
1524
            }
×
1525
            if (badFiles) {
×
1526
                return Response.status(Status.FORBIDDEN)
×
1527
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1528
                                .add("message", "You do not have permission to embargo the following files")
×
1529
                                .add("files", restrictedFiles).build())
×
1530
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1531
            }
1532
            embargo=embargoService.merge(embargo);
×
1533
            // Good request, so add the embargo. Track any existing embargoes so we can
1534
            // delete them if there are no files left that reference them.
1535
            for (DataFile datafile : filesToEmbargo) {
×
1536
                Embargo emb = datafile.getEmbargo();
×
1537
                if (emb != null) {
×
1538
                    emb.getDataFiles().remove(datafile);
×
1539
                    if (emb.getDataFiles().isEmpty()) {
×
1540
                        orphanedEmbargoes.add(emb);
×
1541
                    }
1542
                }
1543
                // Save merges the datafile with an embargo into the context
1544
                datafile.setEmbargo(embargo);
×
1545
                fileService.save(datafile);
×
1546
            }
×
1547
            //Call service to get action logged
1548
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1549
            if (orphanedEmbargoes.size() > 0) {
×
1550
                for (Embargo emb : orphanedEmbargoes) {
×
1551
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1552
                }
×
1553
            }
1554
            //If superuser, report changes to any released files
1555
            if (authenticatedUser.isSuperuser()) {
×
1556
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1557
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1558
                if (!releasedFiles.isBlank()) {
×
1559
                    actionLogSvc
×
1560
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
1561
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1562
                                            + releasedFiles + ".")
1563
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1564
                }
1565
            }
1566
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1567
        } else {
1568
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1569
        }
1570
    }
1571

1572
    @POST
1573
    @AuthRequired
1574
    @Path("{id}/files/actions/:unset-embargo")
1575
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1576

1577
        // user is authenticated
1578
        AuthenticatedUser authenticatedUser = null;
×
1579
        try {
1580
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1581
        } catch (WrappedResponse ex) {
×
1582
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1583
        }
×
1584

1585
        Dataset dataset;
1586
        try {
1587
            dataset = findDatasetOrDie(id);
×
1588
        } catch (WrappedResponse ex) {
×
1589
            return ex.getResponse();
×
1590
        }
×
1591

1592
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1593
        // check if files are unreleased(DRAFT?)
1594
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1595
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1596
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1597
        }
1598

1599
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1600
        //Todo - is 400 right for embargoes not enabled
1601
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
1602
        int maxEmbargoDurationInMonths = 0;
×
1603
        try {
1604
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1605
        } catch (NumberFormatException nfe){
×
1606
            if (nfe.getMessage().contains("null")) {
×
1607
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1608
            }
1609
        }
×
1610
        if (maxEmbargoDurationInMonths == 0){
×
1611
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1612
        }
1613

1614
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1615

1616
        List<DataFile> datasetFiles = dataset.getFiles();
×
1617
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1618

1619
        // extract fileIds from json, find datafiles and add to list
1620
        if (json.containsKey("fileIds")){
×
1621
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1622
            for (JsonValue jsv : fileIds) {
×
1623
                try {
1624
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1625
                    embargoFilesToUnset.add(dataFile);
×
1626
                } catch (WrappedResponse ex) {
×
1627
                    return ex.getResponse();
×
1628
                }
×
1629
            }
×
1630
        }
1631

1632
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1633
        // check if files belong to dataset
1634
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1635
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1636
            boolean badFiles = false;
×
1637
            for (DataFile datafile : embargoFilesToUnset) {
×
1638
                // superuser can overrule an existing embargo, even on released files
1639
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1640
                    restrictedFiles.add(datafile.getId());
×
1641
                    badFiles = true;
×
1642
                }
1643
            }
×
1644
            if (badFiles) {
×
1645
                return Response.status(Status.FORBIDDEN)
×
1646
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1647
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1648
                                .add("files", restrictedFiles).build())
×
1649
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1650
            }
1651
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1652
            // delete them if there are no files left that reference them.
1653
            for (DataFile datafile : embargoFilesToUnset) {
×
1654
                Embargo emb = datafile.getEmbargo();
×
1655
                if (emb != null) {
×
1656
                    emb.getDataFiles().remove(datafile);
×
1657
                    if (emb.getDataFiles().isEmpty()) {
×
1658
                        orphanedEmbargoes.add(emb);
×
1659
                    }
1660
                }
1661
                // Save merges the datafile with an embargo into the context
1662
                datafile.setEmbargo(null);
×
1663
                fileService.save(datafile);
×
1664
            }
×
1665
            if (orphanedEmbargoes.size() > 0) {
×
1666
                for (Embargo emb : orphanedEmbargoes) {
×
1667
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1668
                }
×
1669
            }
1670
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1671
            if(!releasedFiles.isBlank()) {
×
1672
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1673
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
1674
                actionLogSvc.log(removeRecord);
×
1675
            }
1676
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1677
        } else {
1678
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1679
        }
1680
    }
1681

1682

1683
    @PUT
1684
    @AuthRequired
1685
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
1686
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
1687
        try {
1688
            User u = getRequestUser(crc);
×
1689
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
1690
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
1691
            if (linked == null){
×
1692
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
1693
            }
1694
            if (linking == null) {
×
1695
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
1696
            }
1697
            execCommand(new LinkDatasetCommand(
×
1698
                    createDataverseRequest(u), linking, linked
×
1699
            ));
1700
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
1701
        } catch (WrappedResponse ex) {
×
1702
            return ex.getResponse();
×
1703
        }
1704
    }
1705

1706
    @GET
1707
    @Path("{id}/versions/{versionId}/customlicense")
1708
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
1709
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
1710
        User user = session.getUser();
×
1711
        String persistentId;
1712
        try {
1713
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
1714
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
1715
            }
1716
            persistentId = getRequestParameter(":persistentId".substring(1));
×
1717
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
1718
                versionId = "DRAFT";
×
1719
            }
1720
        } catch (WrappedResponse wrappedResponse) {
×
1721
            return wrappedResponse.getResponse();
×
1722
        }
×
1723
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
1724
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
1725
    }
1726

1727

1728
    @GET
1729
    @AuthRequired
1730
    @Path("{id}/links")
1731
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
1732
        try {
1733
            User u = getRequestUser(crc);
×
1734
            if (!u.isSuperuser()) {
×
1735
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
1736
            }
1737
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1738

1739
            long datasetId = dataset.getId();
×
1740
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
1741
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
1742
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
1743
                dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
×
1744
            }
×
1745
            JsonObjectBuilder response = Json.createObjectBuilder();
×
1746
            response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
×
1747
            return ok(response);
×
1748
        } catch (WrappedResponse wr) {
×
1749
            return wr.getResponse();
×
1750
        }
1751
    }
1752

1753
    /**
1754
     * Add a given assignment to a given user or group
1755
     * @param ra     role assignment DTO
1756
     * @param id     dataset id
1757
     * @param apiKey
1758
     */
1759
    @POST
1760
    @AuthRequired
1761
    @Path("{identifier}/assignments")
1762
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
1763
        try {
1764
            Dataset dataset = findDatasetOrDie(id);
×
1765
            
1766
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
1767
            if (assignee == null) {
×
1768
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
1769
            }
1770
            
1771
            DataverseRole theRole;
1772
            Dataverse dv = dataset.getOwner();
×
1773
            theRole = null;
×
1774
            while ((theRole == null) && (dv != null)) {
×
1775
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
1776
                    if (aRole.getAlias().equals(ra.getRole())) {
×
1777
                        theRole = aRole;
×
1778
                        break;
×
1779
                    }
1780
                }
×
1781
                dv = dv.getOwner();
×
1782
            }
1783
            if (theRole == null) {
×
1784
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
1785
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
1786
            }
1787

1788
            String privateUrlToken = null;
×
1789
            return ok(
×
1790
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
1791
        } catch (WrappedResponse ex) {
×
1792
            List<String> args = Arrays.asList(ex.getMessage());
×
1793
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
1794
            return ex.getResponse();
×
1795
        }
1796

1797
    }
1798
    
1799
    @DELETE
1800
    @AuthRequired
1801
    @Path("{identifier}/assignments/{id}")
1802
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
1803
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
1804
        if (ra != null) {
×
1805
            try {
1806
                findDatasetOrDie(dsId);
×
1807
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
1808
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
1809
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
1810
            } catch (WrappedResponse ex) {
×
1811
                return ex.getResponse();
×
1812
            }
1813
        } else {
1814
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
1815
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
1816
        }
1817
    }
1818

1819
    @GET
1820
    @AuthRequired
1821
    @Path("{identifier}/assignments")
1822
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
1823
        return response(req ->
×
1824
                ok(execCommand(
×
1825
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
1826
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
1827
    }
1828

1829
    @GET
1830
    @AuthRequired
1831
    @Path("{id}/privateUrl")
1832
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
1833
        return response( req -> {
×
1834
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
1835
            return (privateUrl != null) ? ok(json(privateUrl))
×
1836
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
1837
        }, getRequestUser(crc));
×
1838
    }
1839

1840
    @POST
1841
    @AuthRequired
1842
    @Path("{id}/privateUrl")
1843
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
1844
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
1845
            throw new NotAcceptableException("Anonymized Access not enabled");
×
1846
        }
1847
        return response(req ->
×
1848
                ok(json(execCommand(
×
1849
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
1850
    }
1851

1852
    @DELETE
1853
    @AuthRequired
1854
    @Path("{id}/privateUrl")
1855
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
1856
        return response( req -> {
×
1857
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1858
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
1859
            if (privateUrl != null) {
×
1860
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
1861
                return ok("Private URL deleted.");
×
1862
            } else {
1863
                return notFound("No Private URL to delete.");
×
1864
            }
1865
        }, getRequestUser(crc));
×
1866
    }
1867

1868
    @GET
1869
    @AuthRequired
1870
    @Path("{id}/thumbnail/candidates")
1871
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
1872
        try {
1873
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1874
            boolean canUpdateThumbnail = false;
×
1875
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
1876
            if (!canUpdateThumbnail) {
×
1877
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
1878
            }
1879
            JsonArrayBuilder data = Json.createArrayBuilder();
×
1880
            boolean considerDatasetLogoAsCandidate = true;
×
1881
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
1882
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
1883
                String base64image = datasetThumbnail.getBase64image();
×
1884
                if (base64image != null) {
×
1885
                    logger.fine("found a candidate!");
×
1886
                    candidate.add("base64image", base64image);
×
1887
                }
1888
                DataFile dataFile = datasetThumbnail.getDataFile();
×
1889
                if (dataFile != null) {
×
1890
                    candidate.add("dataFileId", dataFile.getId());
×
1891
                }
1892
                data.add(candidate);
×
1893
            }
×
1894
            return ok(data);
×
1895
        } catch (WrappedResponse ex) {
×
1896
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
1897
        }
1898
    }
1899

1900
    @GET
1901
    @Produces({"image/png"})
1902
    @Path("{id}/thumbnail")
1903
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
1904
        try {
1905
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1906
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
1907
            if(is == null) {
×
1908
                return notFound("Thumbnail not available");
×
1909
            }
1910
            return Response.ok(is).build();
×
1911
        } catch (WrappedResponse wr) {
×
1912
            return notFound("Thumbnail not available");
×
1913
        }
1914
    }
1915

1916
    @GET
1917
    @Produces({ "image/png" })
1918
    @Path("{id}/logo")
1919
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
1920
        try {
1921
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1922
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
1923
            if (is == null) {
×
1924
                return notFound("Logo not available");
×
1925
            }
1926
            return Response.ok(is).build();
×
1927
        } catch (WrappedResponse wr) {
×
1928
            return notFound("Logo not available");
×
1929
        }
1930
    }
1931

1932
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
1933
    @POST
1934
    @AuthRequired
1935
    @Path("{id}/thumbnail/{dataFileId}")
1936
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
1937
        try {
1938
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
1939
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
1940
        } catch (WrappedResponse wr) {
×
1941
            return wr.getResponse();
×
1942
        }
1943
    }
1944

1945
    @POST
1946
    @AuthRequired
1947
    @Path("{id}/thumbnail")
1948
    @Consumes(MediaType.MULTIPART_FORM_DATA)
1949
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
1950
        try {
1951
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
1952
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
1953
        } catch (WrappedResponse wr) {
×
1954
            return wr.getResponse();
×
1955
        }
1956
    }
1957

1958
    @DELETE
1959
    @AuthRequired
1960
    @Path("{id}/thumbnail")
1961
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
1962
        try {
1963
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
1964
            return ok("Dataset thumbnail removed.");
×
1965
        } catch (WrappedResponse wr) {
×
1966
            return wr.getResponse();
×
1967
        }
1968
    }
1969

1970
    @GET
1971
    @AuthRequired
1972
    @Path("{identifier}/dataCaptureModule/rsync")
1973
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
1974
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
1975
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
1976
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
1977
        }
1978
        Dataset dataset = null;
×
1979
        try {
1980
            dataset = findDatasetOrDie(id);
×
1981
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1982
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
1983
            
1984
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
1985
            if (lock == null) {
×
1986
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
1987
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
1988
            }
1989
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
1990
        } catch (WrappedResponse wr) {
×
1991
            return wr.getResponse();
×
1992
        } catch (EJBException ex) {
×
1993
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
1994
        }
1995
    }
1996
    
1997
    /**
1998
     * This api endpoint triggers the creation of a "package" file in a dataset
1999
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2000
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2001
     * The "package" can be downloaded over RSAL.
2002
     *
2003
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2004
     *
2005
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2006
     * But due to the possibly immense number of files (millions) the package approach was taken.
2007
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2008
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2009
     * -MAD 4.9.1
2010
     */
2011
    @POST
2012
    @AuthRequired
2013
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2014
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2015
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
2016
        AuthenticatedUser authenticatedUser = null;
×
2017
        try {
2018
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2019
        } catch (WrappedResponse ex) {
×
2020
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2021
        }
×
2022
        if (!authenticatedUser.isSuperuser()) {
×
2023
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2024
        }
2025
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2026
        try {
2027
            Dataset dataset = findDatasetOrDie(id);
×
2028
            if ("validation passed".equals(statusMessageFromDcm)) {
×
2029
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2030

2031
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2032
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2033
                int totalSize = jsonFromDcm.getInt("totalSize");
×
2034
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2035
                
2036
                if (storageDriverType.equals("file")) {
×
2037
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2038

2039
                    ImportMode importMode = ImportMode.MERGE;
×
2040
                    try {
2041
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2042
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2043
                        String message = jsonFromImportJobKickoff.getString("message");
×
2044
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2045
                        job.add("jobId", jobId);
×
2046
                        job.add("message", message);
×
2047
                        return ok(job);
×
2048
                    } catch (WrappedResponse wr) {
×
2049
                        String message = wr.getMessage();
×
2050
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2051
                    }
2052
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2053
                    
2054
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2055
                    try {
2056
                        
2057
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2058
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
2059
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2060
                        
2061
                        if (packageFile == null) {
×
2062
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
2063
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2064
                        }
2065
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2066
                        if (dcmLock == null) {
×
2067
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2068
                        } else {
2069
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
2070
                            dataset.removeLock(dcmLock);
×
2071
                        }
2072
                        
2073
                        // update version using the command engine to enforce user permissions and constraints
2074
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2075
                            try {
2076
                                Command<Dataset> cmd;
2077
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2078
                                commandEngine.submit(cmd);
×
2079
                            } catch (CommandException ex) {
×
2080
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
2081
                            }
×
2082
                        } else {
2083
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2084
                                    + "single version in draft mode.";
2085
                            logger.log(Level.SEVERE, constraintError);
×
2086
                        }
2087

2088
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2089
                        return ok(job);
×
2090
                        
2091
                    } catch (IOException e) {
×
2092
                        String message = e.getMessage();
×
2093
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2094
                    }
2095
                } else {
2096
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2097
                }
2098
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2099
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2100
                distinctAuthors.values().forEach((value) -> {
×
2101
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2102
                });
×
2103
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2104
                if (superUsers != null && !superUsers.isEmpty()) {
×
2105
                    superUsers.forEach((au) -> {
×
2106
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2107
                    });
×
2108
                }
2109
                return ok("User notified about checksum validation failure.");
×
2110
            } else {
2111
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2112
            }
2113
        } catch (WrappedResponse ex) {
×
2114
            return ex.getResponse();
×
2115
        }
2116
    }
2117
    
2118

2119
    @POST
2120
    @AuthRequired
2121
    @Path("{id}/submitForReview")
2122
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2123
        try {
2124
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
2125
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2126
            
2127
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2128
            
2129
            result.add("inReview", inReview);
×
2130
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2131
            return ok(result);
×
2132
        } catch (WrappedResponse wr) {
×
2133
            return wr.getResponse();
×
2134
        }
2135
    }
2136

2137
    @POST
2138
    @AuthRequired
2139
    @Path("{id}/returnToAuthor")
2140
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2141

2142
        if (jsonBody == null || jsonBody.isEmpty()) {
×
2143
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2144
        }
2145
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2146
        try {
2147
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2148
            String reasonForReturn = null;
×
2149
            reasonForReturn = json.getString("reasonForReturn");
×
UNCOV
2150
            if (reasonForReturn == null || reasonForReturn.isEmpty()) {
×
NEW
2151
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2152
            }
2153
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2154
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2155

2156
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2157
            result.add("inReview", false);
×
2158
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2159
            return ok(result);
×
2160
        } catch (WrappedResponse wr) {
×
2161
            return wr.getResponse();
×
2162
        }
2163
    }
2164

2165
    @GET
2166
    @AuthRequired
2167
    @Path("{id}/curationStatus")
2168
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2169
        try {
2170
            Dataset ds = findDatasetOrDie(idSupplied);
×
2171
            DatasetVersion dsv = ds.getLatestVersion();
×
2172
            User user = getRequestUser(crc);
×
2173
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
2174
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2175
            } else {
2176
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2177
            }
2178
        } catch (WrappedResponse wr) {
×
2179
            return wr.getResponse();
×
2180
        }
2181
    }
2182

2183
    @PUT
2184
    @AuthRequired
2185
    @Path("{id}/curationStatus")
2186
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2187
        Dataset ds = null;
×
2188
        User u = null;
×
2189
        try {
2190
            ds = findDatasetOrDie(idSupplied);
×
2191
            u = getRequestUser(crc);
×
2192
        } catch (WrappedResponse wr) {
×
2193
            return wr.getResponse();
×
2194
        }
×
2195
        try {
2196
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2197
            return ok("Curation Status updated");
×
2198
        } catch (WrappedResponse wr) {
×
2199
            // Just change to Bad Request and send
2200
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2201
        }
2202
    }
2203

2204
    @DELETE
2205
    @AuthRequired
2206
    @Path("{id}/curationStatus")
2207
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2208
        Dataset ds = null;
×
2209
        User u = null;
×
2210
        try {
2211
            ds = findDatasetOrDie(idSupplied);
×
2212
            u = getRequestUser(crc);
×
2213
        } catch (WrappedResponse wr) {
×
2214
            return wr.getResponse();
×
2215
        }
×
2216
        try {
2217
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2218
            return ok("Curation Status deleted");
×
2219
        } catch (WrappedResponse wr) {
×
2220
            //Just change to Bad Request and send
2221
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2222
        }
2223
    }
2224

2225
    @GET
2226
    @AuthRequired
2227
    @Path("{id}/uploadurls")
2228
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2229
        try {
2230
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2231

2232
            boolean canUpdateDataset = false;
×
2233
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2234
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2235
            if (!canUpdateDataset) {
×
2236
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2237
            }
2238
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2239
            if (s3io == null) {
×
2240
                return error(Response.Status.NOT_FOUND,
×
2241
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2242
            }
NEW
2243
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
NEW
2244
            if (maxSize != null) {
×
NEW
2245
                if(fileSize > maxSize) {
×
NEW
2246
                    return error(Response.Status.BAD_REQUEST,
×
2247
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2248
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2249
                }
2250
            }
NEW
2251
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
NEW
2252
            if (limit != null) {
×
NEW
2253
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
NEW
2254
                    return error(Response.Status.BAD_REQUEST,
×
2255
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
NEW
2256
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2257
                }
2258
            }
2259
            JsonObjectBuilder response = null;
×
2260
            String storageIdentifier = null;
×
2261
            try {
2262
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
2263
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2264

2265
            } catch (IOException io) {
×
2266
                logger.warning(io.getMessage());
×
2267
                throw new WrappedResponse(io,
×
2268
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
2269
            }
×
2270

2271
            response.add("storageIdentifier", storageIdentifier);
×
2272
            return ok(response);
×
2273
        } catch (WrappedResponse wr) {
×
2274
            return wr.getResponse();
×
2275
        }
2276
    }
2277

2278
    @DELETE
2279
    @AuthRequired
2280
    @Path("mpupload")
2281
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2282
        try {
2283
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2284
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2285
            User user = session.getUser();
×
2286
            if (!user.isAuthenticated()) {
×
2287
                try {
2288
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2289
                } catch (WrappedResponse ex) {
×
2290
                    logger.info(
×
2291
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2292
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2293
                    throw ex;
×
2294
                }
×
2295
            }
2296
            boolean allowed = false;
×
2297
            if (dataset != null) {
×
2298
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2299
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2300
            } else {
2301
                /*
2302
                 * The only legitimate case where a global id won't correspond to a dataset is
2303
                 * for uploads during creation. Given that this call will still fail unless all
2304
                 * three parameters correspond to an active multipart upload, it should be safe
2305
                 * to allow the attempt for an authenticated user. If there are concerns about
2306
                 * permissions, one could check with the current design that the user is allowed
2307
                 * to create datasets in some dataverse that is configured to use the storage
2308
                 * provider specified in the storageidentifier, but testing for the ability to
2309
                 * create a dataset in a specific dataverse would requiring changing the design
2310
                 * somehow (e.g. adding the ownerId to this call).
2311
                 */
2312
                allowed = true;
×
2313
            }
2314
            if (!allowed) {
×
2315
                return error(Response.Status.FORBIDDEN,
×
2316
                        "You are not permitted to abort file uploads with the supplied parameters.");
2317
            }
2318
            try {
2319
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2320
            } catch (IOException io) {
×
2321
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2322
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2323
                logger.warning(io.getMessage());
×
2324
                throw new WrappedResponse(io,
×
2325
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2326
            }
×
2327
            return Response.noContent().build();
×
2328
        } catch (WrappedResponse wr) {
×
2329
            return wr.getResponse();
×
2330
        }
2331
    }
2332

2333
    @PUT
2334
    @AuthRequired
2335
    @Path("mpupload")
2336
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2337
        try {
2338
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2339
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2340
            User user = session.getUser();
×
2341
            if (!user.isAuthenticated()) {
×
2342
                try {
2343
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2344
                } catch (WrappedResponse ex) {
×
2345
                    logger.info(
×
2346
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2347
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2348
                    throw ex;
×
2349
                }
×
2350
            }
2351
            boolean allowed = false;
×
2352
            if (dataset != null) {
×
2353
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2354
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2355
            } else {
2356
                /*
2357
                 * The only legitimate case where a global id won't correspond to a dataset is
2358
                 * for uploads during creation. Given that this call will still fail unless all
2359
                 * three parameters correspond to an active multipart upload, it should be safe
2360
                 * to allow the attempt for an authenticated user. If there are concerns about
2361
                 * permissions, one could check with the current design that the user is allowed
2362
                 * to create datasets in some dataverse that is configured to use the storage
2363
                 * provider specified in the storageidentifier, but testing for the ability to
2364
                 * create a dataset in a specific dataverse would requiring changing the design
2365
                 * somehow (e.g. adding the ownerId to this call).
2366
                 */
2367
                allowed = true;
×
2368
            }
2369
            if (!allowed) {
×
2370
                return error(Response.Status.FORBIDDEN,
×
2371
                        "You are not permitted to complete file uploads with the supplied parameters.");
2372
            }
2373
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
2374
            logger.info("Etags: " + partETagBody);
×
2375
            try {
2376
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2377
                for (String partNo : object.keySet()) {
×
2378
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2379
                }
×
2380
                for (PartETag et : eTagList) {
×
2381
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2382
                }
×
2383
            } catch (JsonException je) {
×
2384
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2385
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2386
            }
×
2387
            try {
2388
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2389
            } catch (IOException io) {
×
2390
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2391
                logger.warning(io.getMessage());
×
2392
                try {
2393
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2394
                } catch (IOException e) {
×
2395
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2396
                    logger.severe(io.getMessage());
×
2397
                }
×
2398

2399
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2400
            }
×
2401
            return ok("Multipart Upload completed");
×
2402
        } catch (WrappedResponse wr) {
×
2403
            return wr.getResponse();
×
2404
        }
2405
    }
2406

2407
    /**
2408
     * Add a File to an existing Dataset
2409
     *
2410
     * @param idSupplied
2411
     * @param jsonData
2412
     * @param fileInputStream
2413
     * @param contentDispositionHeader
2414
     * @param formDataBodyPart
2415
     * @return
2416
     */
2417
    @POST
2418
    @AuthRequired
2419
    @Path("{id}/add")
2420
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2421
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2422
                    @PathParam("id") String idSupplied,
2423
                    @FormDataParam("jsonData") String jsonData,
2424
                    @FormDataParam("file") InputStream fileInputStream,
2425
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2426
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2427
                    ){
2428

2429
        if (!systemConfig.isHTTPUpload()) {
×
2430
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2431
        }
2432

2433
        // -------------------------------------
2434
        // (1) Get the user from the ContainerRequestContext
2435
        // -------------------------------------
2436
        User authUser;
2437
        authUser = getRequestUser(crc);
×
2438

2439
        // -------------------------------------
2440
        // (2) Get the Dataset Id
2441
        //  
2442
        // -------------------------------------
2443
        Dataset dataset;
2444
        
2445
        try {
2446
            dataset = findDatasetOrDie(idSupplied);
×
2447
        } catch (WrappedResponse wr) {
×
2448
            return wr.getResponse();
×
2449
        }
×
2450
        
2451
        //------------------------------------
2452
        // (2a) Make sure dataset does not have package file
2453
        //
2454
        // --------------------------------------
2455
        
2456
        for (DatasetVersion dv : dataset.getVersions()) {
×
2457
            if (dv.isHasPackageFile()) {
×
2458
                return error(Response.Status.FORBIDDEN,
×
2459
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2460
                );
2461
            }
2462
        }
×
2463

2464
        // (2a) Load up optional params via JSON
2465
        //---------------------------------------
2466
        OptionalFileParams optionalFileParams = null;
×
2467
        msgt("(api) jsonData: " + jsonData);
×
2468

2469
        try {
2470
            optionalFileParams = new OptionalFileParams(jsonData);
×
2471
        } catch (DataFileTagException ex) {
×
2472
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2473
        }
2474
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2475
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
2476
        }
×
2477
        
2478
        // -------------------------------------
2479
        // (3) Get the file name and content type
2480
        // -------------------------------------
2481
        String newFilename = null;
×
2482
        String newFileContentType = null;
×
2483
        String newStorageIdentifier = null;
×
2484
        if (null == contentDispositionHeader) {
×
2485
            if (optionalFileParams.hasStorageIdentifier()) {
×
2486
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
2487
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2488
                
2489
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
2490
                    return error(BAD_REQUEST,
×
2491
                            "Dataset store configuration does not allow provided storageIdentifier.");
2492
                }
2493
                if (optionalFileParams.hasFileName()) {
×
2494
                    newFilename = optionalFileParams.getFileName();
×
2495
                    if (optionalFileParams.hasMimetype()) {
×
2496
                        newFileContentType = optionalFileParams.getMimeType();
×
2497
                    }
2498
                }
2499
            } else {
2500
                return error(BAD_REQUEST,
×
2501
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2502
            }
2503
        } else {
2504
            newFilename = contentDispositionHeader.getFileName();
×
2505
            // Let's see if the form data part has the mime (content) type specified.
2506
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2507
            // because that defaults to "text/plain" when no "Content-Type:" header is
2508
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2509
            // is there. If not, we'll default to "application/octet-stream" - the generic
2510
            // unknown type. This will prompt the application to run type detection and
2511
            // potentially find something more accurate.
2512
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2513

2514
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2515
                if (header.equalsIgnoreCase("Content-Type")) {
×
2516
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2517
                }
2518
            }
×
2519
            if (newFileContentType == null) {
×
2520
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2521
            }
2522
        }
2523

2524

2525
        //-------------------
2526
        // (3) Create the AddReplaceFileHelper object
2527
        //-------------------
2528
        msg("ADD!");
×
2529

2530
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
2531
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2532
                ingestService,
2533
                datasetService,
2534
                fileService,
2535
                permissionSvc,
2536
                commandEngine,
2537
                systemConfig);
2538

2539

2540
        //-------------------
2541
        // (4) Run "runAddFileByDatasetId"
2542
        //-------------------
2543
        addFileHelper.runAddFileByDataset(dataset,
×
2544
                newFilename,
2545
                newFileContentType,
2546
                newStorageIdentifier,
2547
                fileInputStream,
2548
                optionalFileParams);
2549

2550

2551
        if (addFileHelper.hasError()){
×
2552
            //conflict response status added for 8859
2553
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
2554
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2555
            }
2556
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2557
        } else {
2558
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2559
            try {
2560
                //msgt("as String: " + addFileHelper.getSuccessResult());
2561
                /**
2562
                 * @todo We need a consistent, sane way to communicate a human
2563
                 * readable message to an API client suitable for human
2564
                 * consumption. Imagine if the UI were built in Angular or React
2565
                 * and we want to return a message from the API as-is to the
2566
                 * user. Human readable.
2567
                 */
2568
                logger.fine("successMsg: " + successMsg);
×
2569
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2570
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
2571
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2572
                } else {
2573
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2574
                }
2575

2576
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2577
            } catch (NoFilesException ex) {
×
2578
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
2579
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2580

2581
            }
2582
        }
2583
        
2584
    } // end: addFileToDataset
2585

2586

2587
    /**
2588
     * Clean storage of a Dataset
2589
     *
2590
     * @param idSupplied
2591
     * @return
2592
     */
2593
    @GET
2594
    @AuthRequired
2595
    @Path("{id}/cleanStorage")
2596
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2597
        // get user and dataset
2598
        User authUser = getRequestUser(crc);
×
2599

2600
        Dataset dataset;
2601
        try {
2602
            dataset = findDatasetOrDie(idSupplied);
×
2603
        } catch (WrappedResponse wr) {
×
2604
            return wr.getResponse();
×
2605
        }
×
2606
        
2607
        // check permissions
2608
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
2609
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
2610
        }
2611

2612
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
2613

2614
        // check if no legacy files are present
2615
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
2616
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
2617
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
2618
        }
2619

2620
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
2621
        List<String> deleted;
2622
        try {
2623
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
2624
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
2625
        } catch (IOException ex) {
×
2626
            logger.log(Level.SEVERE, null, ex);
×
2627
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
2628
        }
×
2629

2630
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
2631
        
2632
    }
2633

2634
    private static Set<String> getDatasetFilenames(Dataset dataset) {
2635
        Set<String> files = new HashSet<>();
×
2636
        for (DataFile dataFile: dataset.getFiles()) {
×
2637
            String storageIdentifier = dataFile.getStorageIdentifier();
×
2638
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
2639
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
2640
            files.add(locationParts[locationParts.length-1]);
×
2641
        }
×
2642
        return files;
×
2643
    }
2644

2645
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
2646
        return f -> {
1✔
2647
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
2648
        };
2649
    }
2650

2651
    private void msg(String m) {
2652
        //System.out.println(m);
2653
        logger.fine(m);
×
2654
    }
×
2655

2656
    private void dashes() {
2657
        msg("----------------");
×
2658
    }
×
2659

2660
    private void msgt(String m) {
2661
        dashes();
×
2662
        msg(m);
×
2663
        dashes();
×
2664
    }
×
2665

2666

2667
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
2668
            throws WrappedResponse {
2669
        switch (versionId) {
×
2670
            case DS_VERSION_LATEST:
2671
                return hdl.handleLatest();
×
2672
            case DS_VERSION_DRAFT:
2673
                return hdl.handleDraft();
×
2674
            case DS_VERSION_LATEST_PUBLISHED:
2675
                return hdl.handleLatestPublished();
×
2676
            default:
2677
                try {
2678
                    String[] versions = versionId.split("\\.");
×
2679
                    switch (versions.length) {
×
2680
                        case 1:
2681
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
2682
                        case 2:
2683
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
2684
                        default:
2685
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
2686
                    }
2687
                } catch (NumberFormatException nfe) {
×
2688
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
2689
                }
2690
        }
2691
    }
2692

2693
    /*
2694
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
2695
     * a deaccessioned dataset.
2696
     */
2697
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse {
2698
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
NEW
2699
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, false, false);
×
2700
    }
2701
    
2702
    /*
2703
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
2704
     */
2705
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse{
NEW
2706
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, true);
×
2707
    }
2708

2709
    /*
2710
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
2711
     */
2712
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
NEW
2713
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
2714
        if (dsv == null || dsv.getId() == null) {
×
2715
            throw new WrappedResponse(notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
2716
        }
2717
        if (dsv.isReleased()&& uriInfo!=null) {
×
2718
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
2719
            mdcLogService.logEntry(entry);
×
2720
        }
2721
        return dsv;
×
2722
    }
2723
    
2724
    @GET
2725
    @Path("{identifier}/locks")
2726
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
2727

2728
        Dataset dataset = null;
×
2729
        try {
2730
            dataset = findDatasetOrDie(id);
×
2731
            Set<DatasetLock> locks;
2732
            if (lockType == null) {
×
2733
                locks = dataset.getLocks();
×
2734
            } else {
2735
                // request for a specific type lock:
2736
                DatasetLock lock = dataset.getLockFor(lockType);
×
2737

2738
                locks = new HashSet<>();
×
2739
                if (lock != null) {
×
2740
                    locks.add(lock);
×
2741
                }
2742
            }
2743
            
2744
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
2745

2746
        } catch (WrappedResponse wr) {
×
2747
            return wr.getResponse();
×
2748
        }
2749
    }
2750

2751
    @DELETE
2752
    @AuthRequired
2753
    @Path("{identifier}/locks")
2754
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
2755

2756
        return response(req -> {
×
2757
            try {
2758
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2759
                if (!user.isSuperuser()) {
×
2760
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
2761
                }
2762
                Dataset dataset = findDatasetOrDie(id);
×
2763
                
2764
                if (lockType == null) {
×
2765
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
2766
                    for (DatasetLock lock : dataset.getLocks()) {
×
2767
                        locks.add(lock.getReason());
×
2768
                    }
×
2769
                    if (!locks.isEmpty()) {
×
2770
                        for (DatasetLock.Reason locktype : locks) {
×
2771
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
2772
                            // refresh the dataset:
2773
                            dataset = findDatasetOrDie(id);
×
2774
                        }
×
2775
                        // kick of dataset reindexing, in case the locks removed 
2776
                        // affected the search card:
2777
                        indexService.asyncIndexDataset(dataset, true);
×
2778
                        return ok("locks removed");
×
2779
                    }
2780
                    return ok("dataset not locked");
×
2781
                }
2782
                // request for a specific type lock:
2783
                DatasetLock lock = dataset.getLockFor(lockType);
×
2784
                if (lock != null) {
×
2785
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
2786
                    // refresh the dataset:
2787
                    dataset = findDatasetOrDie(id);
×
2788
                    // ... and kick of dataset reindexing, in case the lock removed 
2789
                    // affected the search card:
2790
                    indexService.asyncIndexDataset(dataset, true);
×
2791
                    return ok("lock type " + lock.getReason() + " removed");
×
2792
                }
2793
                return ok("no lock type " + lockType + " on the dataset");
×
2794
            } catch (WrappedResponse wr) {
×
2795
                return wr.getResponse();
×
2796
            }
2797

2798
        }, getRequestUser(crc));
×
2799

2800
    }
2801
    
2802
    @POST
2803
    @AuthRequired
2804
    @Path("{identifier}/lock/{type}")
2805
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
2806
        return response(req -> {
×
2807
            try {
2808
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2809
                if (!user.isSuperuser()) {
×
2810
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
2811
                }
2812
                Dataset dataset = findDatasetOrDie(id);
×
2813
                DatasetLock lock = dataset.getLockFor(lockType);
×
2814
                if (lock != null) {
×
2815
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
2816
                }
2817
                lock = new DatasetLock(lockType, user);
×
2818
                execCommand(new AddLockCommand(req, dataset, lock));
×
2819
                // refresh the dataset:
2820
                dataset = findDatasetOrDie(id);
×
2821
                // ... and kick of dataset reindexing:
2822
                indexService.asyncIndexDataset(dataset, true);
×
2823

2824
                return ok("dataset locked with lock type " + lockType);
×
2825
            } catch (WrappedResponse wr) {
×
2826
                return wr.getResponse();
×
2827
            }
2828

2829
        }, getRequestUser(crc));
×
2830
    }
2831
    
2832
    @GET
2833
    @AuthRequired
2834
    @Path("locks")
2835
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
2836
        // This API is here, under /datasets, and not under /admin, because we
2837
        // likely want it to be accessible to admin users who may not necessarily 
2838
        // have localhost access, that would be required to get to /api/admin in 
2839
        // most installations. It is still reasonable however to limit access to
2840
        // this api to admin users only.
2841
        AuthenticatedUser apiUser;
2842
        try {
2843
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
2844
        } catch (WrappedResponse ex) {
×
2845
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
2846
        }
×
2847
        if (!apiUser.isSuperuser()) {
×
2848
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2849
        }
2850
        
2851
        // Locks can be optinally filtered by type, user or both.
2852
        DatasetLock.Reason lockTypeValue = null;
×
2853
        AuthenticatedUser user = null; 
×
2854
        
2855
        // For the lock type, we use a QueryParam of type String, instead of 
2856
        // DatasetLock.Reason; that would be less code to write, but this way 
2857
        // we can check if the value passed matches a valid lock type ("reason") 
2858
        // and provide a helpful error message if it doesn't. If you use a 
2859
        // QueryParam of an Enum type, trying to pass an invalid value to it 
2860
        // results in a potentially confusing "404/NOT FOUND - requested 
2861
        // resource is not available".
2862
        if (lockType != null && !lockType.isEmpty()) {
×
2863
            try {
2864
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
2865
            } catch (IllegalArgumentException iax) {
×
2866
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
2867
                for (Reason r: Reason.values()) {
×
2868
                    reasonJoiner.add(r.name());
×
2869
                };
2870
                String errorMessage = "Invalid lock type value: " + lockType + 
×
2871
                        "; valid lock types: " + reasonJoiner.toString();
×
2872
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
2873
            }
×
2874
        }
2875
        
2876
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
2877
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
2878
            if (user == null) {
×
2879
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
2880
            }
2881
        }
2882
        
2883
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
2884
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
2885
                            
2886
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
2887
    }   
2888
    
2889
    
2890
    @GET
2891
    @Path("{id}/makeDataCount/citations")
2892
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
2893
        
2894
        try {
2895
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2896
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
2897
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
2898
            for (DatasetExternalCitations citation : externalCitations) {
×
2899
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
2900
                /**
2901
                 * In the future we can imagine storing and presenting more
2902
                 * information about the citation such as the title of the paper
2903
                 * and the names of the authors. For now, we'll at least give
2904
                 * the URL of the citation so people can click and find out more
2905
                 * about the citation.
2906
                 */
2907
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
2908
                datasetsCitations.add(candidateObj);
×
2909
            }
×
2910
            return ok(datasetsCitations);
×
2911

2912
        } catch (WrappedResponse wr) {
×
2913
            return wr.getResponse();
×
2914
        }
2915

2916
    }
2917

2918
    @GET
2919
    @Path("{id}/makeDataCount/{metric}")
2920
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
2921
        String nullCurrentMonth = null;
×
2922
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
2923
    }
2924

2925
    @GET
2926
    @AuthRequired
2927
    @Path("{identifier}/storagesize")
2928
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
2929
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
2930
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
2931
    }
2932

2933
    @GET
2934
    @AuthRequired
2935
    @Path("{identifier}/versions/{versionId}/downloadsize")
2936
    public Response getDownloadSize(@Context ContainerRequestContext crc,
2937
                                    @PathParam("identifier") String dvIdtf,
2938
                                    @PathParam("versionId") String version,
2939
                                    @QueryParam("contentType") String contentType,
2940
                                    @QueryParam("accessStatus") String accessStatus,
2941
                                    @QueryParam("categoryName") String categoryName,
2942
                                    @QueryParam("tabularTagName") String tabularTagName,
2943
                                    @QueryParam("searchText") String searchText,
2944
                                    @QueryParam("mode") String mode,
2945
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
2946
                                    @Context UriInfo uriInfo,
2947
                                    @Context HttpHeaders headers) {
2948

2949
        return response(req -> {
×
2950
            FileSearchCriteria fileSearchCriteria;
2951
            try {
2952
                fileSearchCriteria = new FileSearchCriteria(
×
2953
                        contentType,
2954
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
2955
                        categoryName,
2956
                        tabularTagName,
2957
                        searchText
2958
                );
2959
            } catch (IllegalArgumentException e) {
×
2960
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
2961
            }
×
2962
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
2963
            try {
2964
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
2965
            } catch (IllegalArgumentException e) {
×
2966
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
2967
            }
×
2968
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
2969
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
2970
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
2971
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
2972
            jsonObjectBuilder.add("message", message);
×
2973
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
2974
            return ok(jsonObjectBuilder);
×
2975
        }, getRequestUser(crc));
×
2976
    }
2977

2978
    @GET
2979
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
2980
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
2981
        try {
2982
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2983
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
2984
            MakeDataCountUtil.MetricType metricType = null;
×
2985
            try {
2986
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
2987
            } catch (IllegalArgumentException ex) {
×
2988
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2989
            }
×
2990
            String monthYear = null;
×
2991
            if (yyyymm != null) {
×
2992
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
2993
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
2994
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
2995
            }
2996
            if (country != null) {
×
2997
                country = country.toLowerCase();
×
2998
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
2999
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3000
                }
3001
            }
3002
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
3003
            if (datasetMetrics == null) {
×
3004
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3005
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
3006
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3007
            }
3008
            Long viewsTotalRegular = null;
×
3009
            Long viewsUniqueRegular = null;
×
3010
            Long downloadsTotalRegular = null;
×
3011
            Long downloadsUniqueRegular = null;
×
3012
            Long viewsTotalMachine = null;
×
3013
            Long viewsUniqueMachine = null;
×
3014
            Long downloadsTotalMachine = null;
×
3015
            Long downloadsUniqueMachine = null;
×
3016
            Long viewsTotal = null;
×
3017
            Long viewsUnique = null;
×
3018
            Long downloadsTotal = null;
×
3019
            Long downloadsUnique = null;
×
3020
            switch (metricSupplied) {
×
3021
                case "viewsTotal":
3022
                    viewsTotal = datasetMetrics.getViewsTotal();
×
3023
                    break;
×
3024
                case "viewsTotalRegular":
3025
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
3026
                    break;
×
3027
                case "viewsTotalMachine":
3028
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
3029
                    break;
×
3030
                case "viewsUnique":
3031
                    viewsUnique = datasetMetrics.getViewsUnique();
×
3032
                    break;
×
3033
                case "viewsUniqueRegular":
3034
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
3035
                    break;
×
3036
                case "viewsUniqueMachine":
3037
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
3038
                    break;
×
3039
                case "downloadsTotal":
3040
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
3041
                    break;
×
3042
                case "downloadsTotalRegular":
3043
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
3044
                    break;
×
3045
                case "downloadsTotalMachine":
3046
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
3047
                    break;
×
3048
                case "downloadsUnique":
3049
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
3050
                    break;
×
3051
                case "downloadsUniqueRegular":
3052
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
3053
                    break;
×
3054
                case "downloadsUniqueMachine":
3055
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
3056
                    break;
×
3057
                default:
3058
                    break;
3059
            }
3060
            /**
3061
             * TODO: Think more about the JSON output and the API design.
3062
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3063
             * now, by country. We could return multiple metrics (viewsTotal,
3064
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3065
             */
3066
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
3067
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
3068
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3069
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3070
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3071
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3072
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3073
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3074
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3075
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3076
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3077
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3078
            return ok(jsonObjectBuilder);
×
3079
        } catch (WrappedResponse wr) {
×
3080
            return wr.getResponse();
×
3081
        } catch (Exception e) {
×
3082
            //bad date - caught in sanitize call
3083
            return error(BAD_REQUEST, e.getMessage());
×
3084
        }
3085
    }
3086
    
3087
    @GET
3088
    @AuthRequired
3089
    @Path("{identifier}/storageDriver")
3090
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3091
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3092
        
3093
        Dataset dataset; 
3094
        
3095
        try {
3096
            dataset = findDatasetOrDie(dvIdtf);
×
3097
        } catch (WrappedResponse ex) {
×
3098
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3099
        }
×
3100
        
3101
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3102
    }
3103
    
3104
    @PUT
3105
    @AuthRequired
3106
    @Path("{identifier}/storageDriver")
3107
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3108
            String storageDriverLabel,
3109
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3110
        
3111
        // Superuser-only:
3112
        AuthenticatedUser user;
3113
        try {
3114
            user = getRequestAuthenticatedUserOrDie(crc);
×
3115
        } catch (WrappedResponse ex) {
×
3116
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3117
        }
×
3118
        if (!user.isSuperuser()) {
×
3119
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3120
        }
3121

3122
        Dataset dataset;
3123

3124
        try {
3125
            dataset = findDatasetOrDie(dvIdtf);
×
3126
        } catch (WrappedResponse ex) {
×
3127
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3128
        }
×
3129
        
3130
        // We don't want to allow setting this to a store id that does not exist: 
3131
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
3132
            if (store.getKey().equals(storageDriverLabel)) {
×
3133
                dataset.setStorageDriverId(store.getValue());
×
3134
                datasetService.merge(dataset);
×
3135
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3136
            }
3137
        }
×
3138
        return error(Response.Status.BAD_REQUEST,
×
3139
                "No Storage Driver found for : " + storageDriverLabel);
3140
    }
3141
    
3142
    @DELETE
3143
    @AuthRequired
3144
    @Path("{identifier}/storageDriver")
3145
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3146
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3147
        
3148
        // Superuser-only:
3149
        AuthenticatedUser user;
3150
        try {
3151
            user = getRequestAuthenticatedUserOrDie(crc);
×
3152
        } catch (WrappedResponse ex) {
×
3153
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3154
        }
×
3155
        if (!user.isSuperuser()) {
×
3156
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3157
        }
3158

3159
        Dataset dataset;
3160

3161
        try {
3162
            dataset = findDatasetOrDie(dvIdtf);
×
3163
        } catch (WrappedResponse ex) {
×
3164
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3165
        }
×
3166
        
3167
        dataset.setStorageDriverId(null);
×
3168
        datasetService.merge(dataset);
×
3169
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3170
    }
3171

3172
    @GET
3173
    @AuthRequired
3174
    @Path("{identifier}/curationLabelSet")
3175
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3176
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3177

3178
        try {
3179
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3180
            if (!user.isSuperuser()) {
×
3181
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3182
            }
3183
        } catch (WrappedResponse wr) {
×
3184
            return wr.getResponse();
×
3185
        }
×
3186

3187
        Dataset dataset;
3188

3189
        try {
3190
            dataset = findDatasetOrDie(dvIdtf);
×
3191
        } catch (WrappedResponse ex) {
×
3192
            return ex.getResponse();
×
3193
        }
×
3194

3195
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3196
    }
3197

3198
    @PUT
3199
    @AuthRequired
3200
    @Path("{identifier}/curationLabelSet")
3201
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3202
                                        @PathParam("identifier") String dvIdtf,
3203
                                        @QueryParam("name") String curationLabelSet,
3204
                                        @Context UriInfo uriInfo,
3205
                                        @Context HttpHeaders headers) throws WrappedResponse {
3206

3207
        // Superuser-only:
3208
        AuthenticatedUser user;
3209
        try {
3210
            user = getRequestAuthenticatedUserOrDie(crc);
×
3211
        } catch (WrappedResponse ex) {
×
3212
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3213
        }
×
3214
        if (!user.isSuperuser()) {
×
3215
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3216
        }
3217

3218
        Dataset dataset;
3219

3220
        try {
3221
            dataset = findDatasetOrDie(dvIdtf);
×
3222
        } catch (WrappedResponse ex) {
×
3223
            return ex.getResponse();
×
3224
        }
×
3225
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3226
            dataset.setCurationLabelSetName(curationLabelSet);
×
3227
            datasetService.merge(dataset);
×
3228
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3229
        } else {
3230
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3231
                if (setName.equals(curationLabelSet)) {
×
3232
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3233
                    datasetService.merge(dataset);
×
3234
                    return ok("Curation Label Set Name set to: " + setName);
×
3235
                }
3236
            }
×
3237
        }
3238
        return error(Response.Status.BAD_REQUEST,
×
3239
            "No Such Curation Label Set");
3240
    }
3241

3242
    @DELETE
3243
    @AuthRequired
3244
    @Path("{identifier}/curationLabelSet")
3245
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3246
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3247

3248
        // Superuser-only:
3249
        AuthenticatedUser user;
3250
        try {
3251
            user = getRequestAuthenticatedUserOrDie(crc);
×
3252
        } catch (WrappedResponse ex) {
×
3253
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3254
        }
×
3255
        if (!user.isSuperuser()) {
×
3256
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3257
        }
3258

3259
        Dataset dataset;
3260

3261
        try {
3262
            dataset = findDatasetOrDie(dvIdtf);
×
3263
        } catch (WrappedResponse ex) {
×
3264
            return ex.getResponse();
×
3265
        }
×
3266

3267
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3268
        datasetService.merge(dataset);
×
3269
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3270
    }
3271

3272
    @GET
3273
    @AuthRequired
3274
    @Path("{identifier}/allowedCurationLabels")
3275
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3276
                                             @PathParam("identifier") String dvIdtf,
3277
                                             @Context UriInfo uriInfo,
3278
                                             @Context HttpHeaders headers) throws WrappedResponse {
3279
        AuthenticatedUser user = null;
×
3280
        try {
3281
            user = getRequestAuthenticatedUserOrDie(crc);
×
3282
        } catch (WrappedResponse wr) {
×
3283
            return wr.getResponse();
×
3284
        }
×
3285

3286
        Dataset dataset;
3287

3288
        try {
3289
            dataset = findDatasetOrDie(dvIdtf);
×
3290
        } catch (WrappedResponse ex) {
×
3291
            return ex.getResponse();
×
3292
        }
×
3293
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3294
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
3295
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3296
        } else {
3297
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3298
        }
3299
    }
3300

3301
    @GET
3302
    @AuthRequired
3303
    @Path("{identifier}/timestamps")
3304
    @Produces(MediaType.APPLICATION_JSON)
3305
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3306

3307
        Dataset dataset = null;
×
3308
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3309
        try {
3310
            dataset = findDatasetOrDie(id);
×
3311
            User u = getRequestUser(crc);
×
3312
            Set<Permission> perms = new HashSet<Permission>();
×
3313
            perms.add(Permission.ViewUnpublishedDataset);
×
3314
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3315
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3316
            logger.fine("CSD: " + canSeeDraft);
×
3317
            logger.fine("IT: " + dataset.getIndexTime());
×
3318
            logger.fine("MT: " + dataset.getModificationTime());
×
3319
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
3320
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3321
            // Basic info if it's released
3322
            if (dataset.isReleased() || canSeeDraft) {
×
3323
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
3324
                if (dataset.getPublicationDate() != null) {
×
3325
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3326
                }
3327

3328
                if (dataset.getLastExportTime() != null) {
×
3329
                    timestamps.add("lastMetadataExportTime",
×
3330
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3331

3332
                }
3333

3334
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
3335
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
3336
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3337
                }
3338
                // If the modification/permissionmodification time is
3339
                // set and the index time is null or is before the mod time, the relevant index is stale
3340
                timestamps.add("hasStaleIndex",
×
3341
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
3342
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3343
                                : false);
×
3344
                timestamps.add("hasStalePermissionIndex",
×
3345
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3346
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3347
                                : false);
×
3348
            }
3349
            // More detail if you can see a draft
3350
            if (canSeeDraft) {
×
3351
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
3352
                if (dataset.getIndexTime() != null) {
×
3353
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3354
                }
3355
                if (dataset.getPermissionModificationTime() != null) {
×
3356
                    timestamps.add("lastPermissionUpdateTime",
×
3357
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3358
                }
3359
                if (dataset.getPermissionIndexTime() != null) {
×
3360
                    timestamps.add("lastPermissionIndexTime",
×
3361
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3362
                }
3363
                if (dataset.getGlobalIdCreateTime() != null) {
×
3364
                    timestamps.add("globalIdCreateTime", formatter
×
3365
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3366
                }
3367

3368
            }
3369
            return ok(timestamps);
×
3370
        } catch (WrappedResponse wr) {
×
3371
            return wr.getResponse();
×
3372
        }
3373
    }
3374

3375

3376
/****************************
3377
 * Globus Support Section:
3378
 * 
3379
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3380
 * app is launched and makes a callback to the
3381
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3382
 * needed to set up it's display.
3383
 * 
3384
 * At some point after that, the user will make a selection as to which files to
3385
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3386
 * to indicate a transfer is about to start. In addition to providing the
3387
 * details of where to transfer the files to/from, Dataverse also grants the
3388
 * Globus principal involved the relevant rw or r permission for the dataset.
3389
 * 
3390
 * Once the transfer is started, the app records the task id and sends it to
3391
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3392
 * monitors the transfer task and when it ultimately succeeds for fails it
3393
 * revokes the principal's permission and, for the transfer in case, adds the
3394
 * files to the dataset. (The dataset is locked until the transfer completes.)
3395
 * 
3396
 * (If no transfer is started within a specified timeout, permissions will
3397
 * automatically be revoked - see the GlobusServiceBean for details.)
3398
 *
3399
 * The option to reference a file at a remote endpoint (rather than transfer it)
3400
 * follows the first two steps of the process above but completes with a call to
3401
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3402
 * files can be added to the dataset immediately.)
3403
 */
3404

3405
    /**
3406
     * Retrieve the parameters and signed URLs required to perform a globus
3407
     * transfer. This api endpoint is expected to be called as a signed callback
3408
     * after the globus-dataverse app/other app is launched, but it will accept
3409
     * other forms of authentication.
3410
     * 
3411
     * @param crc
3412
     * @param datasetId
3413
     */
3414
    @GET
3415
    @AuthRequired
3416
    @Path("{id}/globusUploadParameters")
3417
    @Produces(MediaType.APPLICATION_JSON)
3418
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3419
            @QueryParam(value = "locale") String locale) {
3420
        // -------------------------------------
3421
        // (1) Get the user from the ContainerRequestContext
3422
        // -------------------------------------
3423
        AuthenticatedUser authUser;
3424
        try {
3425
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3426
        } catch (WrappedResponse e) {
×
3427
            return e.getResponse();
×
3428
        }
×
3429
        // -------------------------------------
3430
        // (2) Get the Dataset Id
3431
        // -------------------------------------
3432
        Dataset dataset;
3433

3434
        try {
3435
            dataset = findDatasetOrDie(datasetId);
×
3436
        } catch (WrappedResponse wr) {
×
3437
            return wr.getResponse();
×
3438
        }
×
3439
        String storeId = dataset.getEffectiveStorageDriverId();
×
3440
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3441
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3442
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
3443
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3444
        }
3445

3446
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3447

3448
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3449
        String transferEndpoint = null;
×
3450
        JsonArray referenceEndpointsWithPaths = null;
×
3451
        if (managed) {
×
3452
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3453
        } else {
3454
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3455
        }
3456

3457
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3458
        queryParams.add("queryParameters",
×
3459
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3460
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3461
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3462
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3463
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3464
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3465
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3466
        substitutedParams.keySet().forEach((key) -> {
×
3467
            params.add(key, substitutedParams.get(key));
×
3468
        });
×
3469
        params.add("managed", Boolean.toString(managed));
×
NEW
3470
        if (managed) {
×
NEW
3471
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
NEW
3472
            if (maxSize != null) {
×
NEW
3473
                params.add("fileSizeLimit", maxSize);
×
3474
            }
NEW
3475
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
NEW
3476
            if (limit != null) {
×
NEW
3477
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3478
            }
3479
        }
3480
        if (transferEndpoint != null) {
×
3481
            params.add("endpoint", transferEndpoint);
×
3482
        } else {
3483
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3484
        }
3485
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3486
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3487
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3488
        allowedApiCalls.add(
×
3489
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3490
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3491
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3492
        if(managed) {
×
3493
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3494
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3495
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
3496
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3497
        } else {
3498
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3499
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3500
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
3501
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3502
        }
3503
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3504
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3505
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3506
                .add(URLTokenUtil.TIMEOUT, 5));
×
3507
        allowedApiCalls.add(
×
3508
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3509
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3510
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3511

3512
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3513
    }
3514

3515
    /**
3516
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3517
     * 
3518
     * @param crc
3519
     * @param datasetId
3520
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3521
     * @return
3522
     * @throws IOException
3523
     * @throws ExecutionException
3524
     * @throws InterruptedException
3525
     */
3526
    @POST
3527
    @AuthRequired
3528
    @Path("{id}/requestGlobusUploadPaths")
3529
    @Consumes(MediaType.APPLICATION_JSON)
3530
    @Produces(MediaType.APPLICATION_JSON)
3531
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3532
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3533

3534
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3535

3536
        if (!systemConfig.isGlobusUpload()) {
×
3537
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
3538
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
3539
        }
3540

3541
        // -------------------------------------
3542
        // (1) Get the user from the ContainerRequestContext
3543
        // -------------------------------------
3544
        AuthenticatedUser authUser;
3545
        try {
3546
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3547
        } catch (WrappedResponse e) {
×
3548
            return e.getResponse();
×
3549
        }
×
3550

3551
        // -------------------------------------
3552
        // (2) Get the Dataset Id
3553
        // -------------------------------------
3554
        Dataset dataset;
3555

3556
        try {
3557
            dataset = findDatasetOrDie(datasetId);
×
3558
        } catch (WrappedResponse wr) {
×
3559
            return wr.getResponse();
×
3560
        }
×
3561
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
3562
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
3563

3564
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
3565
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3566
                try {
3567
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
3568
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
3569
                        return badRequest("No referencedFiles specified");
×
3570
                    }
3571
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
3572
                    return (ok(fileMap));
×
3573
                } catch (Exception e) {
×
3574
                    return badRequest(e.getLocalizedMessage());
×
3575
                }
3576
            } else {
3577
                try {
3578
                    String principal = params.getString("principal");
×
3579
                    int numberOfPaths = params.getInt("numberOfFiles");
×
3580
                    if (numberOfPaths <= 0) {
×
3581
                        return badRequest("numberOfFiles must be positive");
×
3582
                    }
3583

3584
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
3585
                    switch (response.getInt("status")) {
×
3586
                    case 201:
3587
                        return ok(response.getJsonObject("paths"));
×
3588
                    case 400:
3589
                        return badRequest("Unable to grant permission");
×
3590
                    case 409:
3591
                        return conflict("Permission already exists");
×
3592
                    default:
3593
                        return error(null, "Unexpected error when granting permission");
×
3594
                    }
3595

3596
                } catch (NullPointerException | ClassCastException e) {
×
3597
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
3598

3599
                }
3600
            }
3601
        } else {
3602
            return forbidden("User doesn't have permission to upload to this dataset");
×
3603
        }
3604

3605
    }
3606

3607
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
3608
     * 
3609
     * @param crc
3610
     * @param datasetId
3611
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
3612
     * @param uriInfo
3613
     * @return
3614
     * @throws IOException
3615
     * @throws ExecutionException
3616
     * @throws InterruptedException
3617
     */
3618
    @POST
3619
    @AuthRequired
3620
    @Path("{id}/addGlobusFiles")
3621
    @Consumes(MediaType.MULTIPART_FORM_DATA)
3622
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
3623
                                            @PathParam("id") String datasetId,
3624
                                            @FormDataParam("jsonData") String jsonData,
3625
                                            @Context UriInfo uriInfo
3626
    ) throws IOException, ExecutionException, InterruptedException {
3627

3628
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
3629

3630
        if (!systemConfig.isHTTPUpload()) {
×
3631
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
3632
        }
3633

3634
        // -------------------------------------
3635
        // (1) Get the user from the API key
3636
        // -------------------------------------
3637
        AuthenticatedUser authUser;
3638
        try {
3639
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3640
        } catch (WrappedResponse ex) {
×
3641
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
3642
            );
3643
        }
×
3644

3645
        // -------------------------------------
3646
        // (2) Get the Dataset Id
3647
        // -------------------------------------
3648
        Dataset dataset;
3649

3650
        try {
3651
            dataset = findDatasetOrDie(datasetId);
×
3652
        } catch (WrappedResponse wr) {
×
3653
            return wr.getResponse();
×
3654
        }
×
3655
        
3656
        JsonObject jsonObject = null;
×
3657
        try {
3658
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
3659
        } catch (Exception ex) {
×
3660
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
3661
            return badRequest("Error parsing json body");
×
3662

3663
        }
×
3664

3665
        //------------------------------------
3666
        // (2b) Make sure dataset does not have package file
3667
        // --------------------------------------
3668

3669
        for (DatasetVersion dv : dataset.getVersions()) {
×
3670
            if (dv.isHasPackageFile()) {
×
3671
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
3672
                );
3673
            }
3674
        }
×
3675

3676

3677
        String lockInfoMessage = "Globus Upload API started ";
×
3678
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
3679
                (authUser).getId(), lockInfoMessage);
×
3680
        if (lock != null) {
×
3681
            dataset.addLock(lock);
×
3682
        } else {
3683
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
3684
        }
3685

3686

3687
        ApiToken token = authSvc.findApiTokenByUser(authUser);
×
3688

3689
        if(uriInfo != null) {
×
3690
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
3691
        }
3692

3693

3694
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
3695
        
3696
        // Async Call
3697
        globusService.globusUpload(jsonObject, token, dataset, requestUrl, authUser);
×
3698

3699
        return ok("Async call to Globus Upload started ");
×
3700

3701
    }
3702
    
3703
/**
3704
 * Retrieve the parameters and signed URLs required to perform a globus
3705
 * transfer/download. This api endpoint is expected to be called as a signed
3706
 * callback after the globus-dataverse app/other app is launched, but it will
3707
 * accept other forms of authentication.
3708
 * 
3709
 * @param crc
3710
 * @param datasetId
3711
 * @param locale
3712
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
3713
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
3714
 */
3715
    @GET
3716
    @AuthRequired
3717
    @Path("{id}/globusDownloadParameters")
3718
    @Produces(MediaType.APPLICATION_JSON)
3719
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3720
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
3721
        // -------------------------------------
3722
        // (1) Get the user from the ContainerRequestContext
3723
        // -------------------------------------
3724
        AuthenticatedUser authUser;
3725
        try {
3726
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3727
        } catch (WrappedResponse e) {
×
3728
            return e.getResponse();
×
3729
        }
×
3730
        // -------------------------------------
3731
        // (2) Get the Dataset Id
3732
        // -------------------------------------
3733
        Dataset dataset;
3734

3735
        try {
3736
            dataset = findDatasetOrDie(datasetId);
×
3737
        } catch (WrappedResponse wr) {
×
3738
            return wr.getResponse();
×
3739
        }
×
3740
        String storeId = dataset.getEffectiveStorageDriverId();
×
3741
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3742
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3743
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
3744
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
3745
        }
3746

3747
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
3748
        if (files == null) {
×
3749
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
3750
        }
3751

3752
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3753

3754
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3755
        String transferEndpoint = null;
×
3756

3757
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3758
        queryParams.add("queryParameters",
×
3759
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3760
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3761
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3762
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3763
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3764
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3765
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3766
        substitutedParams.keySet().forEach((key) -> {
×
3767
            params.add(key, substitutedParams.get(key));
×
3768
        });
×
3769
        params.add("managed", Boolean.toString(managed));
×
3770
        if (managed) {
×
3771
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3772
            params.add("endpoint", transferEndpoint);
×
3773
        }
3774
        params.add("files", files);
×
3775
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3776
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3777
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
3778
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3779
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
3780
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3781
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
3782
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3783
                .add(URLTokenUtil.URL_TEMPLATE,
×
3784
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
3785
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3786
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3787
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3788
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3789
                .add(URLTokenUtil.TIMEOUT, 5));
×
3790
        allowedApiCalls.add(
×
3791
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3792
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3793
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3794

3795
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3796
    }
3797

3798
    /**
3799
     * Requests permissions for a given globus user to download the specified files
3800
     * the dataset and returns information about the paths to transfer from.
3801
     * 
3802
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
3803
     * 
3804
     * @param crc
3805
     * @param datasetId
3806
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
3807
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
3808
     * @throws IOException
3809
     * @throws ExecutionException
3810
     * @throws InterruptedException
3811
     */
3812
    @POST
3813
    @AuthRequired
3814
    @Path("{id}/requestGlobusDownload")
3815
    @Consumes(MediaType.APPLICATION_JSON)
3816
    @Produces(MediaType.APPLICATION_JSON)
3817
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3818
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
3819
            throws IOException, ExecutionException, InterruptedException {
3820

3821
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
3822

3823
        if (!systemConfig.isGlobusDownload()) {
×
3824
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
3825
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
3826
        }
3827

3828
        // -------------------------------------
3829
        // (1) Get the user from the ContainerRequestContext
3830
        // -------------------------------------
3831
        User user = getRequestUser(crc);
×
3832

3833
        // -------------------------------------
3834
        // (2) Get the Dataset Id
3835
        // -------------------------------------
3836
        Dataset dataset;
3837

3838
        try {
3839
            dataset = findDatasetOrDie(datasetId);
×
3840
        } catch (WrappedResponse wr) {
×
3841
            return wr.getResponse();
×
3842
        }
×
3843
        JsonObject body = null;
×
3844
        if (jsonBody != null) {
×
3845
            body = JsonUtil.getJsonObject(jsonBody);
×
3846
        }
3847
        Set<String> fileIds = null;
×
3848
        if (downloadId != null) {
×
3849
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
3850
            if (files != null) {
×
3851
                fileIds = files.keySet();
×
3852
            }
3853
        } else {
×
3854
            if ((body!=null) && body.containsKey("fileIds")) {
×
3855
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
3856
                fileIds = new HashSet<String>(fileVals.size());
×
3857
                for (JsonValue fileVal : fileVals) {
×
3858
                    String id = null;
×
3859
                    switch (fileVal.getValueType()) {
×
3860
                    case STRING:
3861
                        id = ((JsonString) fileVal).getString();
×
3862
                        break;
×
3863
                    case NUMBER:
3864
                        id = ((JsonNumber) fileVal).toString();
×
3865
                        break;
×
3866
                    default:
3867
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
3868
                    }
3869
                    ;
3870
                    fileIds.add(id);
×
3871
                }
×
3872
            } else {
×
3873
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
3874
            }
3875
        }
3876

3877
        if (fileIds.isEmpty()) {
×
3878
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
3879
        }
3880
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
3881
        for (String id : fileIds) {
×
3882
            boolean published = false;
×
3883
            logger.info("File id: " + id);
×
3884

3885
            DataFile df = null;
×
3886
            try {
3887
                df = findDataFileOrDie(id);
×
3888
            } catch (WrappedResponse wr) {
×
3889
                return wr.getResponse();
×
3890
            }
×
3891
            if (!df.getOwner().equals(dataset)) {
×
3892
                return badRequest("All files must be in the dataset");
×
3893
            }
3894
            dataFiles.add(df);
×
3895

3896
            for (FileMetadata fm : df.getFileMetadatas()) {
×
3897
                if (fm.getDatasetVersion().isPublished()) {
×
3898
                    published = true;
×
3899
                    break;
×
3900
                }
3901
            }
×
3902

3903
            if (!published) {
×
3904
                // If the file is not published, they can still download the file, if the user
3905
                // has the permission to view unpublished versions:
3906

3907
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
3908
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
3909
                    return forbidden("User doesn't have permission to download file: " + id);
×
3910
                }
3911
            } else { // published and restricted and/or embargoed
3912
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
3913
                    // This line also handles all three authenticated session user, token user, and
3914
                    // guest cases.
3915
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
3916
                        return forbidden("User doesn't have permission to download file: " + id);
×
3917
                    }
3918

3919
            }
3920
        }
×
3921
        // Allowed to download all requested files
3922
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
3923
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3924
            // If managed, give the principal read permissions
3925
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
3926
            switch (status) {
×
3927
            case 201:
3928
                return ok(files);
×
3929
            case 400:
3930
                return badRequest("Unable to grant permission");
×
3931
            case 409:
3932
                return conflict("Permission already exists");
×
3933
            default:
3934
                return error(null, "Unexpected error when granting permission");
×
3935
            }
3936

3937
        }
3938

3939
        return ok(files);
×
3940
    }
3941

3942
    /**
3943
     * Monitors a globus download and removes permissions on the dir/dataset when
3944
     * the specified transfer task is completed.
3945
     * 
3946
     * @param crc
3947
     * @param datasetId
3948
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
3949
     *                  Globus task to monitor.
3950
     * @return
3951
     * @throws IOException
3952
     * @throws ExecutionException
3953
     * @throws InterruptedException
3954
     */
3955
    @POST
3956
    @AuthRequired
3957
    @Path("{id}/monitorGlobusDownload")
3958
    @Consumes(MediaType.APPLICATION_JSON)
3959
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3960
            String jsonData) throws IOException, ExecutionException, InterruptedException {
3961

3962
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
3963

3964
        if (!systemConfig.isGlobusDownload()) {
×
3965
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
3966
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
3967
        }
3968

3969
        // -------------------------------------
3970
        // (1) Get the user from the ContainerRequestContext
3971
        // -------------------------------------
3972
        User authUser;
3973
        authUser = getRequestUser(crc);
×
3974

3975
        // -------------------------------------
3976
        // (2) Get the Dataset Id
3977
        // -------------------------------------
3978
        Dataset dataset;
3979

3980
        try {
3981
            dataset = findDatasetOrDie(datasetId);
×
3982
        } catch (WrappedResponse wr) {
×
3983
            return wr.getResponse();
×
3984
        }
×
3985

3986
        // Async Call
3987
        globusService.globusDownload(jsonData, dataset, authUser);
×
3988

3989
        return ok("Async call to Globus Download started");
×
3990

3991
    }
3992

3993
    /**
3994
     * Add multiple Files to an existing Dataset
3995
     *
3996
     * @param idSupplied
3997
     * @param jsonData
3998
     * @return
3999
     */
4000
    @POST
4001
    @AuthRequired
4002
    @Path("{id}/addFiles")
4003
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4004
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4005
            @FormDataParam("jsonData") String jsonData) {
4006

4007
        if (!systemConfig.isHTTPUpload()) {
×
4008
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4009
        }
4010

4011
        // -------------------------------------
4012
        // (1) Get the user from the ContainerRequestContext
4013
        // -------------------------------------
4014
        User authUser;
4015
        authUser = getRequestUser(crc);
×
4016

4017
        // -------------------------------------
4018
        // (2) Get the Dataset Id
4019
        // -------------------------------------
4020
        Dataset dataset;
4021

4022
        try {
4023
            dataset = findDatasetOrDie(idSupplied);
×
4024
        } catch (WrappedResponse wr) {
×
4025
            return wr.getResponse();
×
4026
        }
×
4027

4028
        dataset.getLocks().forEach(dl -> {
×
4029
            logger.info(dl.toString());
×
4030
        });
×
4031

4032
        //------------------------------------
4033
        // (2a) Make sure dataset does not have package file
4034
        // --------------------------------------
4035

4036
        for (DatasetVersion dv : dataset.getVersions()) {
×
4037
            if (dv.isHasPackageFile()) {
×
4038
                return error(Response.Status.FORBIDDEN,
×
4039
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4040
                );
4041
            }
4042
        }
×
4043

4044
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4045

4046
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4047
                dvRequest,
4048
                this.ingestService,
4049
                this.datasetService,
4050
                this.fileService,
4051
                this.permissionSvc,
4052
                this.commandEngine,
4053
                this.systemConfig
4054
        );
4055

4056
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4057

4058
    }
4059

4060
    /**
4061
     * Replace multiple Files to an existing Dataset
4062
     *
4063
     * @param idSupplied
4064
     * @param jsonData
4065
     * @return
4066
     */
4067
    @POST
4068
    @AuthRequired
4069
    @Path("{id}/replaceFiles")
4070
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4071
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4072
                                          @PathParam("id") String idSupplied,
4073
                                          @FormDataParam("jsonData") String jsonData) {
4074

4075
        if (!systemConfig.isHTTPUpload()) {
×
4076
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4077
        }
4078

4079
        // -------------------------------------
4080
        // (1) Get the user from the ContainerRequestContext
4081
        // -------------------------------------
4082
        User authUser;
4083
        authUser = getRequestUser(crc);
×
4084

4085
        // -------------------------------------
4086
        // (2) Get the Dataset Id
4087
        // -------------------------------------
4088
        Dataset dataset;
4089

4090
        try {
4091
            dataset = findDatasetOrDie(idSupplied);
×
4092
        } catch (WrappedResponse wr) {
×
4093
            return wr.getResponse();
×
4094
        }
×
4095

4096
        dataset.getLocks().forEach(dl -> {
×
4097
            logger.info(dl.toString());
×
4098
        });
×
4099

4100
        //------------------------------------
4101
        // (2a) Make sure dataset does not have package file
4102
        // --------------------------------------
4103

4104
        for (DatasetVersion dv : dataset.getVersions()) {
×
4105
            if (dv.isHasPackageFile()) {
×
4106
                return error(Response.Status.FORBIDDEN,
×
4107
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4108
                );
4109
            }
4110
        }
×
4111

4112
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4113

4114
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4115
                dvRequest,
4116
                this.ingestService,
4117
                this.datasetService,
4118
                this.fileService,
4119
                this.permissionSvc,
4120
                this.commandEngine,
4121
                this.systemConfig
4122
        );
4123

4124
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4125

4126
    }
4127

4128
    /**
4129
     * API to find curation assignments and statuses
4130
     *
4131
     * @return
4132
     * @throws WrappedResponse
4133
     */
4134
    @GET
4135
    @AuthRequired
4136
    @Path("/listCurationStates")
4137
    @Produces("text/csv")
4138
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4139

4140
        try {
4141
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
4142
            if (!user.isSuperuser()) {
×
4143
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4144
            }
4145
        } catch (WrappedResponse wr) {
×
4146
            return wr.getResponse();
×
4147
        }
×
4148

4149
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4150
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
4151
        allRoles.forEach(r -> {
×
4152
            if (r.permissions().contains(Permission.PublishDataset))
×
4153
                curationRoles.add(r);
×
4154
        });
×
4155
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4156
        curationRoles.forEach(r -> {
×
4157
            assignees.put(r.getAlias(), null);
×
4158
        });
×
4159

4160
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4161
                BundleUtil.getStringFromBundle("dataset"),
×
4162
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4163
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4164
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4165
                String.join(",", assignees.keySet())));
×
4166
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4167
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4168
            curationRoles.forEach(r -> {
×
4169
                assignees.put(r.getAlias(), new HashSet<String>());
×
4170
            });
×
4171
            for (RoleAssignment ra : ras) {
×
4172
                if (curationRoles.contains(ra.getRole())) {
×
4173
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4174
                }
4175
            }
×
4176
            DatasetVersion dsv = dataset.getLatestVersion();
×
4177
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4178
            String status = dsv.getExternalStatusLabel();
×
4179
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4180
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4181
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4182
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4183
            List<String> sList = new ArrayList<String>();
×
4184
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4185
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4186
        }
×
4187
        csvSB.append("\n");
×
4188
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4189
    }
4190

4191
    // APIs to manage archival status
4192

4193
    @GET
4194
    @AuthRequired
4195
    @Produces(MediaType.APPLICATION_JSON)
4196
    @Path("/{id}/{version}/archivalStatus")
4197
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4198
                                                    @PathParam("id") String datasetId,
4199
                                                    @PathParam("version") String versionNumber,
4200
                                                    @Context UriInfo uriInfo,
4201
                                                    @Context HttpHeaders headers) {
4202

4203
        try {
4204
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4205
            if (!au.isSuperuser()) {
×
4206
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4207
            }
4208
            DataverseRequest req = createDataverseRequest(au);
×
4209
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4210
                    headers);
4211

4212
            if (dsv.getArchivalCopyLocation() == null) {
×
NEW
4213
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4214
            } else {
4215
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
4216
                return ok(status);
×
4217
            }
4218
        } catch (WrappedResponse wr) {
×
4219
            return wr.getResponse();
×
4220
        }
4221
    }
4222

4223
    @PUT
4224
    @AuthRequired
4225
    @Consumes(MediaType.APPLICATION_JSON)
4226
    @Path("/{id}/{version}/archivalStatus")
4227
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4228
                                                    @PathParam("id") String datasetId,
4229
                                                    @PathParam("version") String versionNumber,
4230
                                                    String newStatus,
4231
                                                    @Context UriInfo uriInfo,
4232
                                                    @Context HttpHeaders headers) {
4233

4234
        logger.fine(newStatus);
×
4235
        try {
4236
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4237

4238
            if (!au.isSuperuser()) {
×
4239
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4240
            }
4241
            
4242
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
4243
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4244
            
4245
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4246
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
4247
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
4248
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4249

4250
                    DataverseRequest req = createDataverseRequest(au);
×
4251
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4252
                            uriInfo, headers);
4253

4254
                    if (dsv == null) {
×
4255
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4256
                    }
4257
                    if (isSingleVersionArchiving()) {
×
4258
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
4259
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
4260
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4261
                            }
4262
                        }
×
4263
                    }
4264

4265
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
4266
                    dsv = datasetversionService.merge(dsv);
×
4267
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
4268
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4269

4270
                    return ok("Status updated");
×
4271
                }
4272
            }
4273
        } catch (WrappedResponse wr) {
×
4274
            return wr.getResponse();
×
4275
        } catch (JsonException| IllegalStateException ex) {
×
4276
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4277
        }
×
4278
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4279
    }
4280
    
4281
    @DELETE
4282
    @AuthRequired
4283
    @Produces(MediaType.APPLICATION_JSON)
4284
    @Path("/{id}/{version}/archivalStatus")
4285
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4286
                                                       @PathParam("id") String datasetId,
4287
                                                       @PathParam("version") String versionNumber,
4288
                                                       @Context UriInfo uriInfo,
4289
                                                       @Context HttpHeaders headers) {
4290

4291
        try {
4292
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4293
            if (!au.isSuperuser()) {
×
4294
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4295
            }
4296

4297
            DataverseRequest req = createDataverseRequest(au);
×
4298
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4299
                    headers);
4300
            if (dsv == null) {
×
4301
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4302
            }
4303
            dsv.setArchivalCopyLocation(null);
×
4304
            dsv = datasetversionService.merge(dsv);
×
4305

4306
            return ok("Status deleted");
×
4307

4308
        } catch (WrappedResponse wr) {
×
4309
            return wr.getResponse();
×
4310
        }
4311
    }
4312
    
4313
    private boolean isSingleVersionArchiving() {
4314
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
4315
        if (className != null) {
×
4316
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4317
            try {
4318
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
4319
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
4320
            } catch (ClassNotFoundException e) {
×
4321
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4322
            } catch (ClassCastException cce) {
×
4323
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
4324
            }
×
4325
        }
4326
        return false;
×
4327
    }
4328
    
4329
    // This method provides a callback for an external tool to retrieve it's
4330
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4331
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4332
    // will be plain/unsigned.
4333
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4334
    // for a draft dataset), or public case.
4335
    @GET
4336
    @AuthRequired
4337
    @Path("{id}/versions/{version}/toolparams/{tid}")
4338
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4339
                                            @PathParam("tid") long externalToolId,
4340
                                            @PathParam("id") String datasetId,
4341
                                            @PathParam("version") String version,
4342
                                            @QueryParam(value = "locale") String locale) {
4343
        try {
4344
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
4345
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
4346
            if (target == null) {
×
4347
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4348
            }
4349
            
4350
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
4351
            if(externalTool==null) {
×
4352
                return error(BAD_REQUEST, "External tool not found.");
×
4353
            }
4354
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
4355
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4356
            }
4357
            ApiToken apiToken = null;
×
4358
            User u = getRequestUser(crc);
×
4359
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4360

4361
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4362
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
4363
        } catch (WrappedResponse wr) {
×
4364
            return wr.getResponse();
×
4365
        }
4366
    }
4367

4368
    @GET
4369
    @Path("summaryFieldNames")
4370
    public Response getDatasetSummaryFieldNames() {
4371
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
4372
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
4373
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4374
        for (String fieldName : fieldNames) {
×
4375
            fieldNamesArrayBuilder.add(fieldName);
×
4376
        }
4377
        return ok(fieldNamesArrayBuilder);
×
4378
    }
4379

4380
    @GET
4381
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4382
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4383
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4384
        if (privateUrlUser == null) {
×
4385
            return notFound("Private URL user not found");
×
4386
        }
4387
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4388
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4389
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4390
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4391
        }
4392
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4393
        if (dsv == null || dsv.getId() == null) {
×
4394
            return notFound("Dataset version not found");
×
4395
        }
4396
        JsonObjectBuilder responseJson;
4397
        if (isAnonymizedAccess) {
×
4398
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
NEW
4399
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4400
        } else {
×
NEW
4401
            responseJson = json(dsv, null, true, returnOwners);
×
4402
        }
4403
        return ok(responseJson);
×
4404
    }
4405

4406
    @GET
4407
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4408
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
4409
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4410
        if (privateUrlUser == null) {
×
4411
            return notFound("Private URL user not found");
×
4412
        }
4413
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4414
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
4415
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4416
    }
4417

4418
    @GET
4419
    @AuthRequired
4420
    @Path("{id}/versions/{versionId}/citation")
4421
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4422
                                              @PathParam("id") String datasetId,
4423
                                              @PathParam("versionId") String versionId,
4424
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4425
                                              @Context UriInfo uriInfo,
4426
                                              @Context HttpHeaders headers) {
4427
        return response(req -> ok(
×
NEW
4428
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned, false).getCitation(true, false)), getRequestUser(crc));
×
4429
    }
4430

4431
    @POST
4432
    @AuthRequired
4433
    @Path("{id}/versions/{versionId}/deaccession")
4434
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
4435
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
4436
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4437
        }
4438
        return response(req -> {
×
NEW
4439
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4440
            try {
4441
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4442
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
4443
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
4444
                if (deaccessionForwardURL != null) {
×
4445
                    try {
4446
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4447
                    } catch (IllegalArgumentException iae) {
×
4448
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
4449
                    }
×
4450
                }
4451
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4452
                return ok("Dataset " + datasetId + " deaccessioned for version " + versionId);
×
4453
            } catch (JsonParsingException jpe) {
×
4454
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4455
            }
4456
        }, getRequestUser(crc));
×
4457
    }
4458

4459
    @GET
4460
    @AuthRequired
4461
    @Path("{identifier}/guestbookEntryAtRequest")
4462
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4463
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4464

4465
        Dataset dataset;
4466

4467
        try {
4468
            dataset = findDatasetOrDie(dvIdtf);
×
4469
        } catch (WrappedResponse ex) {
×
4470
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4471
        }
×
4472
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
4473
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
4474
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4475
        }
4476
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
4477
    }
4478

4479
    @PUT
4480
    @AuthRequired
4481
    @Path("{identifier}/guestbookEntryAtRequest")
4482
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4483
                                               boolean gbAtRequest,
4484
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4485

4486
        // Superuser-only:
4487
        AuthenticatedUser user;
4488
        try {
4489
            user = getRequestAuthenticatedUserOrDie(crc);
×
4490
        } catch (WrappedResponse ex) {
×
4491
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4492
        }
×
4493
        if (!user.isSuperuser()) {
×
4494
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4495
        }
4496

4497
        Dataset dataset;
4498

4499
        try {
4500
            dataset = findDatasetOrDie(dvIdtf);
×
4501
        } catch (WrappedResponse ex) {
×
4502
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4503
        }
×
4504
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
4505
        if (!gbAtRequestOpt.isPresent()) {
×
4506
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
4507
        }
4508
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
4509
        dataset.setGuestbookEntryAtRequest(choice);
×
4510
        datasetService.merge(dataset);
×
4511
        return ok("Guestbook Entry At Request set to: " + choice);
×
4512
    }
4513

4514
    @DELETE
4515
    @AuthRequired
4516
    @Path("{identifier}/guestbookEntryAtRequest")
4517
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4518
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4519

4520
        // Superuser-only:
4521
        AuthenticatedUser user;
4522
        try {
4523
            user = getRequestAuthenticatedUserOrDie(crc);
×
4524
        } catch (WrappedResponse ex) {
×
4525
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4526
        }
×
4527
        if (!user.isSuperuser()) {
×
4528
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4529
        }
4530

4531
        Dataset dataset;
4532

4533
        try {
4534
            dataset = findDatasetOrDie(dvIdtf);
×
4535
        } catch (WrappedResponse ex) {
×
4536
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4537
        }
×
4538

4539
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
4540
        datasetService.merge(dataset);
×
4541
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4542
    }
4543

4544
    @GET
4545
    @AuthRequired
4546
    @Path("{id}/userPermissions")
4547
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
4548
        Dataset dataset;
4549
        try {
4550
            dataset = findDatasetOrDie(datasetId);
×
4551
        } catch (WrappedResponse wr) {
×
4552
            return wr.getResponse();
×
4553
        }
×
4554
        User requestUser = getRequestUser(crc);
×
4555
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
4556
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
4557
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
4558
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
4559
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
4560
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
4561
        return ok(jsonObjectBuilder);
×
4562
    }
4563

4564
    @GET
4565
    @AuthRequired
4566
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
4567
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
4568
                                                 @PathParam("id") String datasetId,
4569
                                                 @PathParam("versionId") String versionId,
4570
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4571
                                                 @Context UriInfo uriInfo,
4572
                                                 @Context HttpHeaders headers) {
NEW
4573
        return response(req -> {
×
NEW
4574
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
NEW
4575
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
NEW
4576
        }, getRequestUser(crc));
×
4577
    }
4578
    
4579
    /**
4580
     * Get the PidProvider that will be used for generating new DOIs in this dataset
4581
     *
4582
     * @return - the id of the effective PID generator for the given dataset
4583
     * @throws WrappedResponse
4584
     */
4585
    @GET
4586
    @AuthRequired
4587
    @Path("{identifier}/pidGenerator")
4588
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4589
            @Context HttpHeaders headers) throws WrappedResponse {
4590

4591
        Dataset dataset;
4592

4593
        try {
NEW
4594
            dataset = findDatasetOrDie(dvIdtf);
×
NEW
4595
        } catch (WrappedResponse ex) {
×
NEW
4596
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
NEW
4597
        }
×
NEW
4598
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
NEW
4599
        if(pidProvider == null) {
×
4600
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
NEW
4601
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
4602
        }
NEW
4603
        String pidGeneratorId = pidProvider.getId();
×
NEW
4604
        return ok(pidGeneratorId);
×
4605
    }
4606

4607
    @PUT
4608
    @AuthRequired
4609
    @Path("{identifier}/pidGenerator")
4610
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
4611
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
4612

4613
        // Superuser-only:
4614
        AuthenticatedUser user;
4615
        try {
NEW
4616
            user = getRequestAuthenticatedUserOrDie(crc);
×
NEW
4617
        } catch (WrappedResponse ex) {
×
NEW
4618
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
NEW
4619
        }
×
NEW
4620
        if (!user.isSuperuser()) {
×
NEW
4621
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4622
        }
4623

4624
        Dataset dataset;
4625

4626
        try {
NEW
4627
            dataset = findDatasetOrDie(datasetId);
×
NEW
4628
        } catch (WrappedResponse ex) {
×
NEW
4629
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
NEW
4630
        }
×
NEW
4631
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
NEW
4632
            dataset.setPidGeneratorId(generatorId);
×
NEW
4633
            datasetService.merge(dataset);
×
NEW
4634
            return ok("PID Generator set to: " + generatorId);
×
4635
        } else {
NEW
4636
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
4637
        }
4638

4639
    }
4640

4641
    @DELETE
4642
    @AuthRequired
4643
    @Path("{identifier}/pidGenerator")
4644
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4645
            @Context HttpHeaders headers) throws WrappedResponse {
4646

4647
        // Superuser-only:
4648
        AuthenticatedUser user;
4649
        try {
NEW
4650
            user = getRequestAuthenticatedUserOrDie(crc);
×
NEW
4651
        } catch (WrappedResponse ex) {
×
NEW
4652
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
NEW
4653
        }
×
NEW
4654
        if (!user.isSuperuser()) {
×
NEW
4655
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4656
        }
4657

4658
        Dataset dataset;
4659

4660
        try {
NEW
4661
            dataset = findDatasetOrDie(dvIdtf);
×
NEW
4662
        } catch (WrappedResponse ex) {
×
NEW
4663
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
NEW
4664
        }
×
4665

NEW
4666
        dataset.setPidGenerator(null);
×
NEW
4667
        datasetService.merge(dataset);
×
NEW
4668
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
4669
    }
4670

4671
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc