• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #23111

10 Sep 2024 03:14PM CUT coverage: 20.681% (-0.05%) from 20.734%
#23111

Pull #10781

github

landreev
Merge branch 'develop' into 10623-globus-improvements
Resolved conflicts:
	src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
(#10623)
Pull Request #10781: Improved handling of Globus uploads

4 of 417 new or added lines in 15 files covered. (0.96%)

9 existing lines in 3 files now uncovered.

17550 of 84861 relevant lines covered (20.68%)

0.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.16
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4
import edu.harvard.iq.dataverse.*;
5
import edu.harvard.iq.dataverse.DatasetLock.Reason;
6
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
7
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
8
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
9
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
10
import edu.harvard.iq.dataverse.authorization.DataverseRole;
11
import edu.harvard.iq.dataverse.authorization.Permission;
12
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
13
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
14
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
15
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
16
import edu.harvard.iq.dataverse.authorization.users.User;
17
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
18
import edu.harvard.iq.dataverse.dataaccess.*;
19
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
20
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
21
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
22
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
23
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
24
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
25
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
26
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
27
import edu.harvard.iq.dataverse.engine.command.Command;
28
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
29
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
30
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
31
import edu.harvard.iq.dataverse.engine.command.impl.*;
32
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
33
import edu.harvard.iq.dataverse.export.ExportService;
34
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
35
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
36
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
37
import edu.harvard.iq.dataverse.globus.GlobusUtil;
38
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
39
import edu.harvard.iq.dataverse.makedatacount.*;
40
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
41
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
42
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
43
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
44
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
46
import edu.harvard.iq.dataverse.search.IndexServiceBean;
47
import edu.harvard.iq.dataverse.settings.FeatureFlags;
48
import edu.harvard.iq.dataverse.settings.JvmSettings;
49
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
50
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
51
import edu.harvard.iq.dataverse.util.*;
52
import edu.harvard.iq.dataverse.util.bagit.OREMap;
53
import edu.harvard.iq.dataverse.util.json.*;
54
import edu.harvard.iq.dataverse.workflow.Workflow;
55
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
56
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
57
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
58
import jakarta.ejb.EJB;
59
import jakarta.ejb.EJBException;
60
import jakarta.inject.Inject;
61
import jakarta.json.*;
62
import jakarta.json.stream.JsonParsingException;
63
import jakarta.servlet.http.HttpServletRequest;
64
import jakarta.servlet.http.HttpServletResponse;
65
import jakarta.ws.rs.*;
66
import jakarta.ws.rs.container.ContainerRequestContext;
67
import jakarta.ws.rs.core.*;
68
import jakarta.ws.rs.core.Response.Status;
69
import org.apache.commons.lang3.StringUtils;
70
import org.eclipse.microprofile.openapi.annotations.Operation;
71
import org.eclipse.microprofile.openapi.annotations.media.Content;
72
import org.eclipse.microprofile.openapi.annotations.media.Schema;
73
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
74
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
75
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
76
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
77
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
78
import org.glassfish.jersey.media.multipart.FormDataParam;
79

80
import java.io.IOException;
81
import java.io.InputStream;
82
import java.net.URI;
83
import java.sql.Timestamp;
84
import java.text.MessageFormat;
85
import java.text.SimpleDateFormat;
86
import java.time.LocalDate;
87
import java.time.LocalDateTime;
88
import java.time.ZoneId;
89
import java.time.format.DateTimeFormatter;
90
import java.time.format.DateTimeParseException;
91
import java.util.*;
92
import java.util.Map.Entry;
93
import java.util.concurrent.ExecutionException;
94
import java.util.function.Predicate;
95
import java.util.logging.Level;
96
import java.util.logging.Logger;
97
import java.util.regex.Pattern;
98
import java.util.stream.Collectors;
99

100
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
101
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
102
import edu.harvard.iq.dataverse.dataset.DatasetType;
103
import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean;
104
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
105
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
106
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
107
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
108

109
@Path("datasets")
110
public class Datasets extends AbstractApiBean {
×
111

112
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
113
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
114
    
115
    @Inject DataverseSession session;
116

117
    @EJB
118
    DatasetServiceBean datasetService;
119

120
    @EJB
121
    DataverseServiceBean dataverseService;
122
    
123
    @EJB
124
    GlobusServiceBean globusService;
125

126
    @EJB
127
    UserNotificationServiceBean userNotificationService;
128
    
129
    @EJB
130
    PermissionServiceBean permissionService;
131
    
132
    @EJB
133
    AuthenticationServiceBean authenticationServiceBean;
134
    
135
    @EJB
136
    DDIExportServiceBean ddiExportService;
137

138
    @EJB
139
    MetadataBlockServiceBean metadataBlockService;
140
    
141
    @EJB
142
    DataFileServiceBean fileService;
143

144
    @EJB
145
    IngestServiceBean ingestService;
146

147
    @EJB
148
    EjbDataverseEngine commandEngine;
149
    
150
    @EJB
151
    IndexServiceBean indexService;
152

153
    @EJB
154
    S3PackageImporter s3PackageImporter;
155
     
156
    @EJB
157
    SettingsServiceBean settingsService;
158

159
    // TODO: Move to AbstractApiBean
160
    @EJB
161
    DatasetMetricsServiceBean datasetMetricsSvc;
162
    
163
    @EJB
164
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
165

166
    @EJB
167
    EmbargoServiceBean embargoService;
168

169
    @EJB
170
    RetentionServiceBean retentionService;
171

172
    @Inject
173
    MakeDataCountLoggingServiceBean mdcLogService;
174
    
175
    @Inject
176
    DataverseRequestServiceBean dvRequestService;
177

178
    @Inject
179
    WorkflowServiceBean wfService;
180
    
181
    @Inject
182
    DataverseRoleServiceBean dataverseRoleService;
183

184
    @EJB
185
    DatasetVersionServiceBean datasetversionService;
186

187
    @Inject
188
    PrivateUrlServiceBean privateUrlService;
189

190
    @Inject
191
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
192

193
    @Inject
194
    DatasetTypeServiceBean datasetTypeSvc;
195

196
    /**
197
     * Used to consolidate the way we parse and handle dataset versions.
198
     * @param <T> 
199
     */
200
    public interface DsVersionHandler<T> {
201
        T handleLatest();
202
        T handleDraft();
203
        T handleSpecific( long major, long minor );
204
        T handleLatestPublished();
205
    }
206
    
207
    @GET
208
    @AuthRequired
209
    @Path("{id}")
210
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
211
        return response( req -> {
×
212
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true)));
×
213
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
214
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
215
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
216
            if((latest != null) && latest.isReleased()) {
×
217
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
218
                mdcLogService.logEntry(entry);
×
219
            }
220
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
221
        }, getRequestUser(crc));
×
222
    }
223
    
224
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
225
    // to obtain the dataset that we are trying to export - which would handle
226
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
227
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
228
    @GET
229
    @Path("/export")
230
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" })
231
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
232

233
        try {
234
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
235
            if (dataset == null) {
×
236
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
237
            }
238
            
239
            ExportService instance = ExportService.getInstance();
×
240
            
241
            InputStream is = instance.getExport(dataset, exporter);
×
242
           
243
            String mediaType = instance.getMediaType(exporter);
×
244
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
245
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
246
            mdcLogService.logEntry(entry);
×
247
            
248
            return Response.ok()
×
249
                    .entity(is)
×
250
                    .type(mediaType).
×
251
                    build();
×
252
        } catch (Exception wr) {
×
253
            logger.warning(wr.getMessage());
×
254
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
255
        }
256
    }
257

258
    @DELETE
259
    @AuthRequired
260
    @Path("{id}")
261
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
262
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
263
        // (and there's a comment that says "TODO: remove this command")
264
        // do we need an exposed API call for it? 
265
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
266
        // if the dataset only has 1 version... In other words, the functionality 
267
        // currently provided by this API is covered between the "deleteDraftVersion" and
268
        // "destroyDataset" API calls.  
269
        // (The logic below follows the current implementation of the underlying 
270
        // commands!)
271

272
        User u = getRequestUser(crc);
×
273
        return response( req -> {
×
274
            Dataset doomed = findDatasetOrDie(id);
×
275
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
276
            boolean destroy = false;
×
277
            
278
            if (doomed.getVersions().size() == 1) {
×
279
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
280
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
281
                }
282
                destroy = true;
×
283
            } else {
284
                if (!doomedVersion.isDraft()) {
×
285
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
286
                }
287
            }
288
            
289
            // Gather the locations of the physical files that will need to be 
290
            // deleted once the destroy command execution has been finalized:
291
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
292
            
293
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
294
            
295
            // If we have gotten this far, the destroy command has succeeded, 
296
            // so we can finalize it by permanently deleting the physical files:
297
            // (DataFileService will double-check that the datafiles no 
298
            // longer exist in the database, before attempting to delete 
299
            // the physical files)
300
            if (!deleteStorageLocations.isEmpty()) {
×
301
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
302
            }
303
            
304
            return ok("Dataset " + id + " deleted");
×
305
        }, u);
306
    }
307
        
308
    @DELETE
309
    @AuthRequired
310
    @Path("{id}/destroy")
311
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
312

313
        User u = getRequestUser(crc);
×
314
        return response(req -> {
×
315
            // first check if dataset is released, and if so, if user is a superuser
316
            Dataset doomed = findDatasetOrDie(id);
×
317

318
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
319
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
320
            }
321

322
            // Gather the locations of the physical files that will need to be 
323
            // deleted once the destroy command execution has been finalized:
324
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
325

326
            execCommand(new DestroyDatasetCommand(doomed, req));
×
327

328
            // If we have gotten this far, the destroy command has succeeded, 
329
            // so we can finalize permanently deleting the physical files:
330
            // (DataFileService will double-check that the datafiles no 
331
            // longer exist in the database, before attempting to delete 
332
            // the physical files)
333
            if (!deleteStorageLocations.isEmpty()) {
×
334
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
335
            }
336

337
            return ok("Dataset " + id + " destroyed");
×
338
        }, u);
339
    }
340
    
341
    @DELETE
342
    @AuthRequired
343
    @Path("{id}/versions/{versionId}")
344
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
345
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
346
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
347
        }
348

349
        return response( req -> {
×
350
            Dataset dataset = findDatasetOrDie(id);
×
351
            DatasetVersion doomed = dataset.getLatestVersion();
×
352
            
353
            if (!doomed.isDraft()) {
×
354
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
355
            }
356
            
357
            // Gather the locations of the physical files that will need to be 
358
            // deleted once the destroy command execution has been finalized:
359
            
360
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
361
            
362
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
363
            
364
            // If we have gotten this far, the delete command has succeeded - 
365
            // by either deleting the Draft version of a published dataset, 
366
            // or destroying an unpublished one. 
367
            // This means we can finalize permanently deleting the physical files:
368
            // (DataFileService will double-check that the datafiles no 
369
            // longer exist in the database, before attempting to delete 
370
            // the physical files)
371
            if (!deleteStorageLocations.isEmpty()) {
×
372
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
373
            }
374
            
375
            return ok("Draft version of dataset " + id + " deleted");
×
376
        }, getRequestUser(crc));
×
377
    }
378
        
379
    @DELETE
380
    @AuthRequired
381
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
382
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
383
                boolean index = true;
×
384
        return response(req -> {
×
385
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
386
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
387
        }, getRequestUser(crc));
×
388
    }
389
        
390
    @PUT
391
    @AuthRequired
392
    @Path("{id}/citationdate")
393
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
394
        return response( req -> {
×
395
            if ( dsfTypeName.trim().isEmpty() ){
×
396
                return badRequest("Please provide a dataset field type in the requst body.");
×
397
            }
398
            DatasetFieldType dsfType = null;
×
399
            if (!":publicationDate".equals(dsfTypeName)) {
×
400
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
401
                if (dsfType == null) {
×
402
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
403
                }
404
            }
405

406
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
407
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
408
        }, getRequestUser(crc));
×
409
    }
410
    
411
    @DELETE
412
    @AuthRequired
413
    @Path("{id}/citationdate")
414
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
415
        return response( req -> {
×
416
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
417
            return ok("Citation Date for dataset " + id + " set to default");
×
418
        }, getRequestUser(crc));
×
419
    }
420
    
421
    @GET
422
    @AuthRequired
423
    @Path("{id}/versions")
424
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
425

426
        return response( req -> {
×
427
            Dataset dataset = findDatasetOrDie(id);
×
428
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
429

430
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
431
                                .stream()
×
432
                                .map( d -> json(d, deepLookup) )
×
433
                                .collect(toJsonArray()));
×
434
        }, getRequestUser(crc));
×
435
    }
436
    
437
    @GET
438
    @AuthRequired
439
    @Path("{id}/versions/{versionId}")
440
    public Response getVersion(@Context ContainerRequestContext crc,
441
                               @PathParam("id") String datasetId,
442
                               @PathParam("versionId") String versionId,
443
                               @QueryParam("excludeFiles") Boolean excludeFiles,
444
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
445
                               @QueryParam("returnOwners") boolean returnOwners,
446
                               @Context UriInfo uriInfo,
447
                               @Context HttpHeaders headers) {
448
        return response( req -> {
×
449
            
450
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
451
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
452
            
453
            Dataset dataset = findDatasetOrDie(datasetId);
×
454
            DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, 
×
455
                                                                            versionId, 
456
                                                                            dataset, 
457
                                                                            uriInfo, 
458
                                                                            headers, 
459
                                                                            includeDeaccessioned,
460
                                                                            checkPerms);
461

462
            if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) {
×
463
                return notFound("Dataset version not found");
×
464
            }
465

466
            if (excludeFiles == null ? true : !excludeFiles) {
×
467
                requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId());
×
468
            }
469

470
            JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
×
471
                                                 null, 
472
                                                 excludeFiles == null ? true : !excludeFiles, 
×
473
                                                 returnOwners);
474
            return ok(jsonBuilder);
×
475

476
        }, getRequestUser(crc));
×
477
    }
478

479
    @GET
480
    @AuthRequired
481
    @Path("{id}/versions/{versionId}/files")
482
    public Response getVersionFiles(@Context ContainerRequestContext crc,
483
                                    @PathParam("id") String datasetId,
484
                                    @PathParam("versionId") String versionId,
485
                                    @QueryParam("limit") Integer limit,
486
                                    @QueryParam("offset") Integer offset,
487
                                    @QueryParam("contentType") String contentType,
488
                                    @QueryParam("accessStatus") String accessStatus,
489
                                    @QueryParam("categoryName") String categoryName,
490
                                    @QueryParam("tabularTagName") String tabularTagName,
491
                                    @QueryParam("searchText") String searchText,
492
                                    @QueryParam("orderCriteria") String orderCriteria,
493
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
494
                                    @Context UriInfo uriInfo,
495
                                    @Context HttpHeaders headers) {
496
        return response(req -> {
×
497
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned);
×
498
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
499
            try {
500
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
501
            } catch (IllegalArgumentException e) {
×
502
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
503
            }
×
504
            FileSearchCriteria fileSearchCriteria;
505
            try {
506
                fileSearchCriteria = new FileSearchCriteria(
×
507
                        contentType,
508
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
509
                        categoryName,
510
                        tabularTagName,
511
                        searchText
512
                );
513
            } catch (IllegalArgumentException e) {
×
514
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
515
            }
×
516
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
517
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
518
        }, getRequestUser(crc));
×
519
    }
520

521
    @GET
522
    @AuthRequired
523
    @Path("{id}/versions/{versionId}/files/counts")
524
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
525
                                         @PathParam("id") String datasetId,
526
                                         @PathParam("versionId") String versionId,
527
                                         @QueryParam("contentType") String contentType,
528
                                         @QueryParam("accessStatus") String accessStatus,
529
                                         @QueryParam("categoryName") String categoryName,
530
                                         @QueryParam("tabularTagName") String tabularTagName,
531
                                         @QueryParam("searchText") String searchText,
532
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
533
                                         @Context UriInfo uriInfo,
534
                                         @Context HttpHeaders headers) {
535
        return response(req -> {
×
536
            FileSearchCriteria fileSearchCriteria;
537
            try {
538
                fileSearchCriteria = new FileSearchCriteria(
×
539
                        contentType,
540
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
541
                        categoryName,
542
                        tabularTagName,
543
                        searchText
544
                );
545
            } catch (IllegalArgumentException e) {
×
546
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
547
            }
×
548
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
549
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
550
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
551
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
552
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
553
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
554
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
555
            return ok(jsonObjectBuilder);
×
556
        }, getRequestUser(crc));
×
557
    }
558

559
    @GET
560
    @AuthRequired
561
    @Path("{id}/dirindex")
562
    @Produces("text/html")
563
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
564

565
        folderName = folderName == null ? "" : folderName;
×
566
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
567
        
568
        DatasetVersion version;
569
        try {
570
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
571
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
572
        } catch (WrappedResponse wr) {
×
573
            return wr.getResponse();
×
574
        }
×
575
        
576
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
577
        
578
        // return "NOT FOUND" if there is no such folder in the dataset version:
579
        
580
        if ("".equals(output)) {
×
581
            return notFound("Folder " + folderName + " does not exist");
×
582
        }
583
        
584
        
585
        String indexFileName = folderName.equals("") ? ".index.html"
×
586
                : ".index-" + folderName.replace('/', '_') + ".html";
×
587
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
588

589
        
590
        return Response.ok()
×
591
                .entity(output)
×
592
                //.type("application/html").
593
                .build();
×
594
    }
595
    
596
    @GET
597
    @AuthRequired
598
    @Path("{id}/versions/{versionId}/metadata")
599
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
600
        return response( req -> ok(
×
601
                    jsonByBlocks(
×
602
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
603
                                .getDatasetFields())), getRequestUser(crc));
×
604
    }
605
    
606
    @GET
607
    @AuthRequired
608
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
609
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
610
                                            @PathParam("id") String datasetId,
611
                                            @PathParam("versionNumber") String versionNumber,
612
                                            @PathParam("block") String blockName,
613
                                            @Context UriInfo uriInfo,
614
                                            @Context HttpHeaders headers) {
615
        
616
        return response( req -> {
×
617
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
618
            
619
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
620
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
621
                if ( p.getKey().getName().equals(blockName) ) {
×
622
                    return ok(json(p.getKey(), p.getValue()));
×
623
                }
624
            }
×
625
            return notFound("metadata block named " + blockName + " not found");
×
626
        }, getRequestUser(crc));
×
627
    }
628

629
    /**
630
     * Add Signposting
631
     * @param datasetId
632
     * @param versionId
633
     * @param uriInfo
634
     * @param headers
635
     * @return
636
     */
637
    @GET
638
    @AuthRequired
639
    @Path("{id}/versions/{versionId}/linkset")
640
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
641
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
642
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
643
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
644
        }
645
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
646
        try {
647
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
648
            return Response
×
649
                    .ok(Json.createObjectBuilder()
×
650
                            .add("linkset",
×
651
                                    new SignpostingResources(systemConfig, dsv,
652
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
653
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
654
                                                    .getJsonLinkset())
×
655
                            .build())
×
656
                    .type(MediaType.APPLICATION_JSON).build();
×
657
        } catch (WrappedResponse wr) {
×
658
            return wr.getResponse();
×
659
        }
660
    }
661

662
    @GET
663
    @AuthRequired
664
    @Path("{id}/modifyRegistration")
665
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
666
        return response( req -> {
×
667
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
668
            return ok("Dataset " + id + " target url updated");
×
669
        }, getRequestUser(crc));
×
670
    }
671
    
672
    @POST
673
    @AuthRequired
674
    @Path("/modifyRegistrationAll")
675
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
676
        return response( req -> {
×
677
            datasetService.findAll().forEach( ds -> {
×
678
                try {
679
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
680
                } catch (WrappedResponse ex) {
×
681
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
682
                }
×
683
            });
×
684
            return ok("Update All Dataset target url completed");
×
685
        }, getRequestUser(crc));
×
686
    }
687
    
688
    @POST
689
    @AuthRequired
690
    @Path("{id}/modifyRegistrationMetadata")
691
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
692

693
        try {
694
            Dataset dataset = findDatasetOrDie(id);
×
695
            if (!dataset.isReleased()) {
×
696
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
697
            }
698
        } catch (WrappedResponse ex) {
×
699
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
700
        }
×
701

702
        return response(req -> {
×
703
            Dataset dataset = findDatasetOrDie(id);
×
704
            execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req));
×
705
            List<String> args = Arrays.asList(dataset.getIdentifier());
×
706
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
707
        }, getRequestUser(crc));
×
708
    }
709
    
710
    @GET
711
    @AuthRequired
712
    @Path("/modifyRegistrationPIDMetadataAll")
713
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
714
        return response( req -> {
×
715
            datasetService.findAll().forEach( ds -> {
×
716
                try {
717
                    logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier());
×
718
                    if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) {
×
719
                        if (ds.isReleased()) {
×
720
                            logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered");
×
721
                        }
722
                    } else {
723
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
724
                    }
725
                } catch (WrappedResponse ex) {
×
726
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
727
                }
×
728
            });
×
729
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
730
        }, getRequestUser(crc));
×
731
    }
732
  
733
    @PUT
734
    @AuthRequired
735
    @Path("{id}/versions/{versionId}")
736
    @Consumes(MediaType.APPLICATION_JSON)
737
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
738
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
739
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
740
        }
741
        
742
        try {
743
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
744
            Dataset ds = findDatasetOrDie(id);
×
745
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
746
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
747
            
748
            // clear possibly stale fields from the incoming dataset version.
749
            // creation and modification dates are updated by the commands.
750
            incomingVersion.setId(null);
×
751
            incomingVersion.setVersionNumber(null);
×
752
            incomingVersion.setMinorVersionNumber(null);
×
753
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
754
            incomingVersion.setDataset(ds);
×
755
            incomingVersion.setCreateTime(null);
×
756
            incomingVersion.setLastUpdateTime(null);
×
757
            
758
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
759
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
760
            }
761
            
762
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
763
            
764
            DatasetVersion managedVersion;
765
            if (updateDraft) {
×
766
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
767
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
768
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
769
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
770
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
771
                if (!hasValidTerms) {
×
772
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
773
                }
774
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
775
                managedVersion = managedDataset.getOrCreateEditVersion();
×
776
            } else {
×
777
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
778
                if (!hasValidTerms) {
×
779
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
780
                }
781
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
782
            }
783
            return ok( json(managedVersion, true) );
×
784
                    
785
        } catch (JsonParseException ex) {
×
786
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
787
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
788
            
789
        } catch (WrappedResponse ex) {
×
790
            return ex.getResponse();
×
791
            
792
        }
793
    }
794

795
    @GET
796
    @AuthRequired
797
    @Path("{id}/versions/{versionId}/metadata")
798
    @Produces("application/ld+json, application/json-ld")
799
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
800
        try {
801
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
802
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
803
            OREMap ore = new OREMap(dsv,
×
804
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
805
            return ok(ore.getOREMapBuilder(true));
×
806

807
        } catch (WrappedResponse ex) {
×
808
            ex.printStackTrace();
×
809
            return ex.getResponse();
×
810
        } catch (Exception jpe) {
×
811
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
812
            jpe.printStackTrace();
×
813
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
814
        }
815
    }
816

817
    @GET
818
    @AuthRequired
819
    @Path("{id}/metadata")
820
    @Produces("application/ld+json, application/json-ld")
821
    public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
822
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
823
    }
824

825
    @PUT
826
    @AuthRequired
827
    @Path("{id}/metadata")
828
    @Consumes("application/ld+json, application/json-ld")
829
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
830

831
        try {
832
            Dataset ds = findDatasetOrDie(id);
×
833
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
834
            //Get draft state as of now
835

836
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
837
            //Get the current draft or create a new version to update
838
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
839
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
840
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
841
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
842
            if (!hasValidTerms) {
×
843
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
844
            }
845
            DatasetVersion managedVersion;
846
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
847
            managedVersion = managedDataset.getLatestVersion();
×
848
            String info = updateDraft ? "Version Updated" : "Version Created";
×
849
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
850

851
        } catch (WrappedResponse ex) {
×
852
            return ex.getResponse();
×
853
        } catch (JsonParsingException jpe) {
×
854
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
855
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
856
        }
857
    }
858

859
    @PUT
860
    @AuthRequired
861
    @Path("{id}/metadata/delete")
862
    @Consumes("application/ld+json, application/json-ld")
863
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
864
        try {
865
            Dataset ds = findDatasetOrDie(id);
×
866
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
867
            //Get draft state as of now
868

869
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
870
            //Get the current draft or create a new version to update
871
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
872
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
873
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
874
            DatasetVersion managedVersion;
875
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
876
            managedVersion = managedDataset.getLatestVersion();
×
877
            String info = updateDraft ? "Version Updated" : "Version Created";
×
878
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
879

880
        } catch (WrappedResponse ex) {
×
881
            ex.printStackTrace();
×
882
            return ex.getResponse();
×
883
        } catch (JsonParsingException jpe) {
×
884
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
885
            jpe.printStackTrace();
×
886
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
887
        }
888
    }
889

890
    @PUT
891
    @AuthRequired
892
    @Path("{id}/deleteMetadata")
893
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
894

895
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
896

897
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
898
    }
899

900
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
901
        try {
902

903
            Dataset ds = findDatasetOrDie(id);
×
904
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
905
            //Get the current draft or create a new version to update
906
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
907
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
908
            List<DatasetField> fields = new LinkedList<>();
×
909
            DatasetField singleField = null;
×
910

911
            JsonArray fieldsJson = json.getJsonArray("fields");
×
912
            if (fieldsJson == null) {
×
913
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
914
                fields.add(singleField);
×
915
            } else {
916
                fields = jsonParser().parseMultipleFields(json);
×
917
            }
918

919
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
920

921
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
922
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
923
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
924

925
            for (DatasetField updateField : fields) {
×
926
                boolean found = false;
×
927
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
928
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
929
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
930
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
931
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
932
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
933
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
934
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
935
                                                found = true;
×
936
                                                controlledVocabularyItemsToRemove.add(existing);
×
937
                                            }
938
                                        }
×
939
                                        if (!found) {
×
940
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
941
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
942
                                        }
943
                                    }
×
944
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
945
                                        dsf.getControlledVocabularyValues().remove(remove);
×
946
                                    }
×
947

948
                                } else {
949
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
950
                                        found = true;
×
951
                                        dsf.setSingleControlledVocabularyValue(null);
×
952
                                    }
953

954
                                }
955
                            } else {
956
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
957
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
958
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
959
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
960
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
961
                                                    found = true;
×
962
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
963
                                                }
964
                                            }
×
965
                                            if (!found) {
×
966
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
967
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
968
                                            }
969
                                        }
×
970
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
971
                                            dsf.getDatasetFieldValues().remove(remove);
×
972
                                        });
×
973

974
                                    } else {
975
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
976
                                            found = true;
×
977
                                            dsf.setSingleValue(null);
×
978
                                        }
979

980
                                    }
981
                                } else {
982
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
983
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
984
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
985
                                            String existingString = getCompoundDisplayValue(existing);
×
986
                                            if (existingString.equals(deleteVal)) {
×
987
                                                found = true;
×
988
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
989
                                            }
990
                                        }
×
991
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
992
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
993
                                        });
×
994
                                        if (!found) {
×
995
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
996
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
997
                                        }
998
                                    }
×
999
                                }
1000
                            }
1001
                        } else {
1002
                            found = true;
×
1003
                            dsf.setSingleValue(null);
×
1004
                            dsf.setSingleControlledVocabularyValue(null);
×
1005
                        }
1006
                        break;
×
1007
                    }
1008
                }
×
1009
                if (!found){
×
1010
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
1011
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1012
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1013
                }
1014
            }
×
1015

1016

1017
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1018
            return ok(json(managedVersion, true));
×
1019

1020
        } catch (JsonParseException ex) {
×
1021
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1022
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1023

1024
        } catch (WrappedResponse ex) {
×
1025
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
1026
            return ex.getResponse();
×
1027

1028
        }
1029
    
1030
    }
1031
    
1032
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
1033
        String returnString = "";
×
1034
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
1035
            for (String value : dsf.getValues()) {
×
1036
                if (!(value == null)) {
×
1037
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1038
                }
1039
            }
×
1040
        }
×
1041
        return returnString;
×
1042
    }
1043
    
1044
    @PUT
1045
    @AuthRequired
1046
    @Path("{id}/editMetadata")
1047
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1048

1049
        Boolean replaceData = replace != null;
×
1050
        DataverseRequest req = null;
×
1051
        req = createDataverseRequest(getRequestUser(crc));
×
1052

1053
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1054
    }
1055
    
1056
    
1057
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1058
        try {
1059
           
1060
            Dataset ds = findDatasetOrDie(id);
×
1061
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1062
            //Get the current draft or create a new version to update
1063
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1064
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1065
            List<DatasetField> fields = new LinkedList<>();
×
1066
            DatasetField singleField = null;
×
1067
            
1068
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1069
            if (fieldsJson == null) {
×
1070
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
1071
                fields.add(singleField);
×
1072
            } else {
1073
                fields = jsonParser().parseMultipleFields(json);
×
1074
            }
1075
            
1076

1077
            String valdationErrors = validateDatasetFieldValues(fields);
×
1078

1079
            if (!valdationErrors.isEmpty()) {
×
1080
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1081
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1082
            }
1083

1084
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1085

1086
            //loop through the update fields     
1087
            // and compare to the version fields  
1088
            //if exist add/replace values
1089
            //if not add entire dsf
1090
            for (DatasetField updateField : fields) {
×
1091
                boolean found = false;
×
1092
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1093
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1094
                        found = true;
×
1095
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1096
                            List priorCVV = new ArrayList<>();
×
1097
                            String cvvDisplay = "";
×
1098

1099
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1100
                                cvvDisplay = dsf.getDisplayValue();
×
1101
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1102
                                    priorCVV.add(cvvOld);
×
1103
                                }
×
1104
                            }
1105

1106
                            if (replaceData) {
×
1107
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1108
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1109
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1110
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1111
                                    priorCVV.clear();
×
1112
                                    dsf.getControlledVocabularyValues().clear();
×
1113
                                } else {
1114
                                    dsf.setSingleValue("");
×
1115
                                    dsf.setSingleControlledVocabularyValue(null);
×
1116
                                }
1117
                              cvvDisplay="";
×
1118
                            }
1119
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1120
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1121
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1122
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
1123
                                            priorCVV.add(cvv);
×
1124
                                        }
1125
                                    }
×
1126
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1127
                                } else {
1128
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1129
                                }
1130
                            } else {
1131
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1132
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1133
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1134
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1135
                                                dfv.setDatasetField(dsf);
×
1136
                                                dsf.getDatasetFieldValues().add(dfv);
×
1137
                                            }
1138
                                        }
×
1139
                                    } else {
1140
                                        dsf.setSingleValue(updateField.getValue());
×
1141
                                    }
1142
                                } else {
1143
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1144
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1145
                                            dfcv.setParentDatasetField(dsf);
×
1146
                                            dsf.setDatasetVersion(dsv);
×
1147
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1148
                                        }
1149
                                    }
×
1150
                                }
1151
                            }
1152
                        } else {
×
1153
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
1154
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1155
                            }
1156
                        }
1157
                        break;
1158
                    }
1159
                }
×
1160
                if (!found) {
×
1161
                    updateField.setDatasetVersion(dsv);
×
1162
                    dsv.getDatasetFields().add(updateField);
×
1163
                }
1164
            }
×
1165
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1166

1167
            return ok(json(managedVersion, true));
×
1168

1169
        } catch (JsonParseException ex) {
×
1170
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1171
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1172

1173
        } catch (WrappedResponse ex) {
×
1174
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1175
            return ex.getResponse();
×
1176

1177
        }
1178
    }
1179
    
1180
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1181
        StringBuilder error = new StringBuilder();
×
1182

1183
        for (DatasetField dsf : fields) {
×
1184
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1185
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1186
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1187
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) {
×
1188
                error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1189
            }
1190
        }
×
1191

1192
        if (!error.toString().isEmpty()) {
×
1193
            return (error.toString());
×
1194
        }
1195
        return "";
×
1196
    }
1197
    
1198
    /**
1199
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1200
     */
1201
    @GET
1202
    @AuthRequired
1203
    @Path("{id}/actions/:publish")
1204
    @Deprecated
1205
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1206
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
1207
        return publishDataset(crc, id, type, false);
×
1208
    }
1209

1210
    @POST
1211
    @AuthRequired
1212
    @Path("{id}/actions/:publish")
1213
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1214
        try {
1215
            if (type == null) {
×
1216
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1217
            }
1218
            boolean updateCurrent=false;
×
1219
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1220
            type = type.toLowerCase();
×
1221
            boolean isMinor=false;
×
1222
            switch (type) {
×
1223
                case "minor":
1224
                    isMinor = true;
×
1225
                    break;
×
1226
                case "major":
1227
                    isMinor = false;
×
1228
                    break;
×
1229
                case "updatecurrent":
1230
                    if (user.isSuperuser()) {
×
1231
                        updateCurrent = true;
×
1232
                    } else {
1233
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1234
                    }
1235
                    break;
1236
                default:
1237
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1238
            }
1239

1240
            Dataset ds = findDatasetOrDie(id);
×
1241
            
1242
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1243
            if (!hasValidTerms) {
×
1244
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1245
            }
1246
            
1247
            if (mustBeIndexed) {
×
1248
                logger.fine("IT: " + ds.getIndexTime());
×
1249
                logger.fine("MT: " + ds.getModificationTime());
×
1250
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1251
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1252
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
1253
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1254
                }
1255
                /*
1256
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1257
                 * modification or permission modification times. The checks here are trying to
1258
                 * see if indexing or permissionindexing could be pending, so they check to see
1259
                 * if the relevant modification time is set and if so, whether the index is also
1260
                 * set and if so, if it after the modification time. If the modification time is
1261
                 * set and the index time is null or is before the mod time, the 409/conflict
1262
                 * error is returned.
1263
                 *
1264
                 */
1265
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1266
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
1267
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1268
                }
1269
            }
1270
            if (updateCurrent) {
×
1271
                /*
1272
                 * Note: The code here mirrors that in the
1273
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1274
                 * to the core logic (i.e. beyond updating the messaging about results) should
1275
                 * be applied to the code there as well.
1276
                 */
1277
                String errorMsg = null;
×
1278
                String successMsg = null;
×
1279
                try {
1280
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1281
                    ds = commandEngine.submit(cmd);
×
1282
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1283

1284
                    // If configured, update archive copy as well
1285
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1286
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1287
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
1288
                    if (archiveCommand != null) {
×
1289
                        // Delete the record of any existing copy since it is now out of date/incorrect
1290
                        updateVersion.setArchivalCopyLocation(null);
×
1291
                        /*
1292
                         * Then try to generate and submit an archival copy. Note that running this
1293
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1294
                         * "The attribute [id] of class
1295
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1296
                         * key column in the database. Updates are not allowed." To avoid that, and to
1297
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1298
                         * pulled this out as a separate submit().
1299
                         */
1300
                        try {
1301
                            updateVersion = commandEngine.submit(archiveCommand);
×
1302
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
1303
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1304
                            } else {
1305
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1306
                            }
1307
                        } catch (CommandException ex) {
×
1308
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1309
                            logger.severe(ex.getMessage());
×
1310
                        }
×
1311
                    }
1312
                } catch (CommandException ex) {
×
1313
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1314
                    logger.severe(ex.getMessage());
×
1315
                }
×
1316
                if (errorMsg != null) {
×
1317
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1318
                } else {
1319
                    return Response.ok(Json.createObjectBuilder()
×
1320
                            .add("status", ApiConstants.STATUS_OK)
×
1321
                            .add("status_details", successMsg)
×
1322
                            .add("data", json(ds)).build())
×
1323
                            .type(MediaType.APPLICATION_JSON)
×
1324
                            .build();
×
1325
                }
1326
            } else {
1327
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
1328
                        createDataverseRequest(user),
×
1329
                        isMinor));
1330
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1331
            }
1332
        } catch (WrappedResponse ex) {
×
1333
            return ex.getResponse();
×
1334
        }
1335
    }
1336

1337
    @POST
1338
    @AuthRequired
1339
    @Path("{id}/actions/:releasemigrated")
1340
    @Consumes("application/ld+json, application/json-ld")
1341
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1342
        try {
1343
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1344
            if (!user.isSuperuser()) {
×
1345
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1346
            }
1347

1348
            Dataset ds = findDatasetOrDie(id);
×
1349
            try {
1350
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1351
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1352
                logger.fine("Submitted date: " + pubDate);
×
1353
                LocalDateTime dateTime = null;
×
1354
                if(!StringUtils.isEmpty(pubDate)) {
×
1355
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
1356
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1357
                    //Set version release date
1358
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1359
                }
1360
                // dataset.getPublicationDateFormattedYYYYMMDD())
1361
                // Assign a version number if not set
1362
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1363

1364
                    if (ds.getVersions().size() == 1) {
×
1365
                        // First Release
1366
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1367
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1368
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1369
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
1370
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1371
                    } else {
1372
                        // major, non-first release
1373
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
1374
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1375
                    }
1376
                }
1377
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1378
                    //Also set publication date if this is the first
1379
                    if(dateTime != null) {
×
1380
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1381
                    }
1382
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1383
                    // is null, so set it here.
1384
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1385
                }
1386
            } catch (Exception e) {
×
1387
                logger.fine(e.getMessage());
×
1388
                throw new BadRequestException("Unable to set publication date ("
×
1389
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
1390
            }
×
1391
            /*
1392
             * Note: The code here mirrors that in the
1393
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1394
             * to the core logic (i.e. beyond updating the messaging about results) should
1395
             * be applied to the code there as well.
1396
             */
1397
            String errorMsg = null;
×
1398
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1399

1400
            try {
1401
                // ToDo - should this be in onSuccess()? May relate to todo above
1402
                if (prePubWf.isPresent()) {
×
1403
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1404
                    wfService.start(prePubWf.get(),
×
1405
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1406
                            false);
1407
                } else {
1408
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1409
                            createDataverseRequest(user), !contactPIDProvider);
×
1410
                    ds = commandEngine.submit(cmd);
×
1411
                }
1412
            } catch (CommandException ex) {
×
1413
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1414
                logger.severe(ex.getMessage());
×
1415
            }
×
1416

1417
            if (errorMsg != null) {
×
1418
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1419
            } else {
1420
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1421
            }
1422

1423
        } catch (WrappedResponse ex) {
×
1424
            return ex.getResponse();
×
1425
        }
1426
    }
1427

1428
    @POST
1429
    @AuthRequired
1430
    @Path("{id}/move/{targetDataverseAlias}")
1431
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1432
        try {
1433
            User u = getRequestUser(crc);
×
1434
            Dataset ds = findDatasetOrDie(id);
×
1435
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1436
            if (target == null) {
×
1437
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1438
            }
1439
            //Command requires Super user - it will be tested by the command
1440
            execCommand(new MoveDatasetCommand(
×
1441
                    createDataverseRequest(u), ds, target, force
×
1442
            ));
1443
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1444
        } catch (WrappedResponse ex) {
×
1445
            if (ex.getCause() instanceof UnforcedCommandException) {
×
1446
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1447
            } else {
1448
                return ex.getResponse();
×
1449
            }
1450
        }
1451
    }
1452

1453
    @POST
1454
    @AuthRequired
1455
    @Path("{id}/files/actions/:set-embargo")
1456
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1457

1458
        // user is authenticated
1459
        AuthenticatedUser authenticatedUser = null;
×
1460
        try {
1461
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1462
        } catch (WrappedResponse ex) {
×
1463
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1464
        }
×
1465

1466
        Dataset dataset;
1467
        try {
1468
            dataset = findDatasetOrDie(id);
×
1469
        } catch (WrappedResponse ex) {
×
1470
            return ex.getResponse();
×
1471
        }
×
1472
        
1473
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1474
        
1475
        if (!hasValidTerms){
×
1476
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1477
        }
1478

1479
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1480
        /*
1481
         * This is only a pre-test - if there's no draft version, there are clearly no
1482
         * files that a normal user can change. The converse is not true. A draft
1483
         * version could contain only files that have already been released. Further, we
1484
         * haven't checked the file list yet so the user could still be trying to change
1485
         * released files even if there are some unreleased/draft-only files. Doing this
1486
         * check here does avoid having to do further parsing for some error cases. It
1487
         * also checks the user can edit this dataset, so we don't have to make that
1488
         * check later.
1489
         */
1490

1491
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1492
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1493
        }
1494

1495
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1496
        long maxEmbargoDurationInMonths = 0;
×
1497
        try {
1498
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1499
        } catch (NumberFormatException nfe){
×
1500
            if (nfe.getMessage().contains("null")) {
×
1501
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1502
            }
1503
        }
×
1504
        if (maxEmbargoDurationInMonths == 0){
×
1505
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1506
        }
1507

1508
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1509

1510
        Embargo embargo = new Embargo();
×
1511

1512

1513
        LocalDate currentDateTime = LocalDate.now();
×
1514
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1515

1516
        // check :MaxEmbargoDurationInMonths if -1
1517
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1518
        // dateAvailable is not in the past
1519
        if (dateAvailable.isAfter(currentDateTime)){
×
1520
            embargo.setDateAvailable(dateAvailable);
×
1521
        } else {
1522
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1523
        }
1524

1525
        // dateAvailable is within limits
1526
        if (maxEmbargoDateTime != null){
×
1527
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
1528
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1529
            }
1530
        }
1531

1532
        embargo.setReason(json.getString("reason"));
×
1533

1534
        List<DataFile> datasetFiles = dataset.getFiles();
×
1535
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1536

1537
        // extract fileIds from json, find datafiles and add to list
1538
        if (json.containsKey("fileIds")){
×
1539
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1540
            for (JsonValue jsv : fileIds) {
×
1541
                try {
1542
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1543
                    filesToEmbargo.add(dataFile);
×
1544
                } catch (WrappedResponse ex) {
×
1545
                    return ex.getResponse();
×
1546
                }
×
1547
            }
×
1548
        }
1549

1550
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1551
        // check if files belong to dataset
1552
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1553
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1554
            boolean badFiles = false;
×
1555
            for (DataFile datafile : filesToEmbargo) {
×
1556
                // superuser can overrule an existing embargo, even on released files
1557
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1558
                    restrictedFiles.add(datafile.getId());
×
1559
                    badFiles = true;
×
1560
                }
1561
            }
×
1562
            if (badFiles) {
×
1563
                return Response.status(Status.FORBIDDEN)
×
1564
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1565
                                .add("message", "You do not have permission to embargo the following files")
×
1566
                                .add("files", restrictedFiles).build())
×
1567
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1568
            }
1569
            embargo=embargoService.merge(embargo);
×
1570
            // Good request, so add the embargo. Track any existing embargoes so we can
1571
            // delete them if there are no files left that reference them.
1572
            for (DataFile datafile : filesToEmbargo) {
×
1573
                Embargo emb = datafile.getEmbargo();
×
1574
                if (emb != null) {
×
1575
                    emb.getDataFiles().remove(datafile);
×
1576
                    if (emb.getDataFiles().isEmpty()) {
×
1577
                        orphanedEmbargoes.add(emb);
×
1578
                    }
1579
                }
1580
                // Save merges the datafile with an embargo into the context
1581
                datafile.setEmbargo(embargo);
×
1582
                fileService.save(datafile);
×
1583
            }
×
1584
            //Call service to get action logged
1585
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1586
            if (orphanedEmbargoes.size() > 0) {
×
1587
                for (Embargo emb : orphanedEmbargoes) {
×
1588
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1589
                }
×
1590
            }
1591
            //If superuser, report changes to any released files
1592
            if (authenticatedUser.isSuperuser()) {
×
1593
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1594
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1595
                if (!releasedFiles.isBlank()) {
×
1596
                    actionLogSvc
×
1597
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
1598
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1599
                                            + releasedFiles + ".")
1600
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1601
                }
1602
            }
1603
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1604
        } else {
1605
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1606
        }
1607
    }
1608

1609
    @POST
1610
    @AuthRequired
1611
    @Path("{id}/files/actions/:unset-embargo")
1612
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1613

1614
        // user is authenticated
1615
        AuthenticatedUser authenticatedUser = null;
×
1616
        try {
1617
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1618
        } catch (WrappedResponse ex) {
×
1619
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1620
        }
×
1621

1622
        Dataset dataset;
1623
        try {
1624
            dataset = findDatasetOrDie(id);
×
1625
        } catch (WrappedResponse ex) {
×
1626
            return ex.getResponse();
×
1627
        }
×
1628

1629
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1630
        // check if files are unreleased(DRAFT?)
1631
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1632
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1633
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1634
        }
1635

1636
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1637
        //Todo - is 400 right for embargoes not enabled
1638
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
1639
        int maxEmbargoDurationInMonths = 0;
×
1640
        try {
1641
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1642
        } catch (NumberFormatException nfe){
×
1643
            if (nfe.getMessage().contains("null")) {
×
1644
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1645
            }
1646
        }
×
1647
        if (maxEmbargoDurationInMonths == 0){
×
1648
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1649
        }
1650

1651
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1652

1653
        List<DataFile> datasetFiles = dataset.getFiles();
×
1654
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1655

1656
        // extract fileIds from json, find datafiles and add to list
1657
        if (json.containsKey("fileIds")){
×
1658
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1659
            for (JsonValue jsv : fileIds) {
×
1660
                try {
1661
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1662
                    embargoFilesToUnset.add(dataFile);
×
1663
                } catch (WrappedResponse ex) {
×
1664
                    return ex.getResponse();
×
1665
                }
×
1666
            }
×
1667
        }
1668

1669
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1670
        // check if files belong to dataset
1671
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1672
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1673
            boolean badFiles = false;
×
1674
            for (DataFile datafile : embargoFilesToUnset) {
×
1675
                // superuser can overrule an existing embargo, even on released files
1676
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1677
                    restrictedFiles.add(datafile.getId());
×
1678
                    badFiles = true;
×
1679
                }
1680
            }
×
1681
            if (badFiles) {
×
1682
                return Response.status(Status.FORBIDDEN)
×
1683
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1684
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1685
                                .add("files", restrictedFiles).build())
×
1686
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1687
            }
1688
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1689
            // delete them if there are no files left that reference them.
1690
            for (DataFile datafile : embargoFilesToUnset) {
×
1691
                Embargo emb = datafile.getEmbargo();
×
1692
                if (emb != null) {
×
1693
                    emb.getDataFiles().remove(datafile);
×
1694
                    if (emb.getDataFiles().isEmpty()) {
×
1695
                        orphanedEmbargoes.add(emb);
×
1696
                    }
1697
                }
1698
                // Save merges the datafile with an embargo into the context
1699
                datafile.setEmbargo(null);
×
1700
                fileService.save(datafile);
×
1701
            }
×
1702
            if (orphanedEmbargoes.size() > 0) {
×
1703
                for (Embargo emb : orphanedEmbargoes) {
×
1704
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1705
                }
×
1706
            }
1707
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1708
            if(!releasedFiles.isBlank()) {
×
1709
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1710
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
1711
                actionLogSvc.log(removeRecord);
×
1712
            }
1713
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1714
        } else {
1715
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1716
        }
1717
    }
1718

1719
    @POST
1720
    @AuthRequired
1721
    @Path("{id}/files/actions/:set-retention")
1722
    public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1723

1724
        // user is authenticated
1725
        AuthenticatedUser authenticatedUser = null;
×
1726
        try {
1727
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1728
        } catch (WrappedResponse ex) {
×
1729
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1730
        }
×
1731

1732
        Dataset dataset;
1733
        try {
1734
            dataset = findDatasetOrDie(id);
×
1735
        } catch (WrappedResponse ex) {
×
1736
            return ex.getResponse();
×
1737
        }
×
1738

1739
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1740

1741
        if (!hasValidTerms){
×
1742
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1743
        }
1744

1745
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1746
        // check if files are unreleased(DRAFT?)
1747
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1748
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1749
        }
1750

1751
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1752
        long minRetentionDurationInMonths = 0;
×
1753
        try {
1754
            minRetentionDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1755
        } catch (NumberFormatException nfe){
×
1756
            if (nfe.getMessage().contains("null")) {
×
1757
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1758
            }
1759
        }
×
1760
        if (minRetentionDurationInMonths == 0){
×
1761
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1762
        }
1763

1764
        JsonObject json;
1765
        try {
1766
            json = JsonUtil.getJsonObject(jsonBody);
×
1767
        } catch (JsonException ex) {
×
1768
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1769
        }
×
1770

1771
        Retention retention = new Retention();
×
1772

1773

1774
        LocalDate currentDateTime = LocalDate.now();
×
1775

1776
        // Extract the dateUnavailable - check if specified and valid
1777
        String dateUnavailableStr = "";
×
1778
        LocalDate dateUnavailable;
1779
        try {
1780
            dateUnavailableStr = json.getString("dateUnavailable");
×
1781
            dateUnavailable = LocalDate.parse(dateUnavailableStr);
×
1782
        } catch (NullPointerException npex) {
×
1783
            return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified");
×
1784
        } catch (ClassCastException ccex) {
×
1785
            return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string");
×
1786
        } catch (DateTimeParseException dtpex) {
×
1787
            return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr);
×
1788
        }
×
1789

1790
        // check :MinRetentionDurationInMonths if -1
1791
        LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null;
×
1792
        // dateUnavailable is not in the past
1793
        if (dateUnavailable.isAfter(currentDateTime)){
×
1794
            retention.setDateUnavailable(dateUnavailable);
×
1795
        } else {
1796
            return error(Status.BAD_REQUEST, "Date unavailable can not be in the past");
×
1797
        }
1798

1799
        // dateAvailable is within limits
1800
        if (minRetentionDateTime != null){
×
1801
            if (dateUnavailable.isBefore(minRetentionDateTime)){
×
1802
                return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
×
1803
            }
1804
        }
1805
        
1806
        try {
1807
            String reason = json.getString("reason");
×
1808
            retention.setReason(reason);
×
1809
        } catch (NullPointerException npex) {
×
1810
            // ignoring; no reason specified is OK, it is optional
1811
        } catch (ClassCastException ccex) {
×
1812
            return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string");
×
1813
        }
×
1814

1815

1816
        List<DataFile> datasetFiles = dataset.getFiles();
×
1817
        List<DataFile> filesToRetention = new LinkedList<>();
×
1818

1819
        // extract fileIds from json, find datafiles and add to list
1820
        if (json.containsKey("fileIds")){
×
1821
            try {
1822
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1823
                for (JsonValue jsv : fileIds) {
×
1824
                    try {
1825
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1826
                        filesToRetention.add(dataFile);
×
1827
                    } catch (WrappedResponse ex) {
×
1828
                        return ex.getResponse();
×
1829
                    }
×
1830
                }
×
1831
            } catch (ClassCastException ccex) {
×
1832
                return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings");
×
1833
            } catch (NullPointerException npex) {
×
1834
                return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified");
×
1835
            }
×
1836
        } else {
1837
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1838
        }
1839

1840
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1841
        // check if files belong to dataset
1842
        if (datasetFiles.containsAll(filesToRetention)) {
×
1843
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1844
            boolean badFiles = false;
×
1845
            for (DataFile datafile : filesToRetention) {
×
1846
                // superuser can overrule an existing retention, even on released files
1847
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1848
                    restrictedFiles.add(datafile.getId());
×
1849
                    badFiles = true;
×
1850
                }
1851
            }
×
1852
            if (badFiles) {
×
1853
                return Response.status(Status.FORBIDDEN)
×
1854
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1855
                                .add("message", "You do not have permission to set a retention period for the following files")
×
1856
                                .add("files", restrictedFiles).build())
×
1857
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1858
            }
1859
            retention=retentionService.merge(retention);
×
1860
            // Good request, so add the retention. Track any existing retentions so we can
1861
            // delete them if there are no files left that reference them.
1862
            for (DataFile datafile : filesToRetention) {
×
1863
                Retention ret = datafile.getRetention();
×
1864
                if (ret != null) {
×
1865
                    ret.getDataFiles().remove(datafile);
×
1866
                    if (ret.getDataFiles().isEmpty()) {
×
1867
                        orphanedRetentions.add(ret);
×
1868
                    }
1869
                }
1870
                // Save merges the datafile with an retention into the context
1871
                datafile.setRetention(retention);
×
1872
                fileService.save(datafile);
×
1873
            }
×
1874
            //Call service to get action logged
1875
            long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier());
×
1876
            if (orphanedRetentions.size() > 0) {
×
1877
                for (Retention ret : orphanedRetentions) {
×
1878
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
1879
                }
×
1880
            }
1881
            //If superuser, report changes to any released files
1882
            if (authenticatedUser.isSuperuser()) {
×
1883
                String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased())
×
1884
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1885
                if (!releasedFiles.isBlank()) {
×
1886
                    actionLogSvc
×
1887
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo")
×
1888
                                    .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) "
×
1889
                                            + releasedFiles + ".")
1890
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1891
                }
1892
            }
1893
            return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated"));
×
1894
        } else {
1895
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1896
        }
1897
    }
1898

1899
    @POST
1900
    @AuthRequired
1901
    @Path("{id}/files/actions/:unset-retention")
1902
    public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1903

1904
        // user is authenticated
1905
        AuthenticatedUser authenticatedUser = null;
×
1906
        try {
1907
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1908
        } catch (WrappedResponse ex) {
×
1909
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1910
        }
×
1911

1912
        Dataset dataset;
1913
        try {
1914
            dataset = findDatasetOrDie(id);
×
1915
        } catch (WrappedResponse ex) {
×
1916
            return ex.getResponse();
×
1917
        }
×
1918

1919
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1920
        // check if files are unreleased(DRAFT?)
1921
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1922
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1923
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1924
        }
1925

1926
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1927
        int minRetentionDurationInMonths = 0;
×
1928
        try {
1929
            minRetentionDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1930
        } catch (NumberFormatException nfe){
×
1931
            if (nfe.getMessage().contains("null")) {
×
1932
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1933
            }
1934
        }
×
1935
        if (minRetentionDurationInMonths == 0){
×
1936
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1937
        }
1938

1939
        JsonObject json;
1940
        try {
1941
            json = JsonUtil.getJsonObject(jsonBody);
×
1942
        } catch (JsonException ex) {
×
1943
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1944
        }
×
1945

1946
        List<DataFile> datasetFiles = dataset.getFiles();
×
1947
        List<DataFile> retentionFilesToUnset = new LinkedList<>();
×
1948

1949
        // extract fileIds from json, find datafiles and add to list
1950
        if (json.containsKey("fileIds")){
×
1951
            try {
1952
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1953
                for (JsonValue jsv : fileIds) {
×
1954
                    try {
1955
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1956
                        retentionFilesToUnset.add(dataFile);
×
1957
                    } catch (WrappedResponse ex) {
×
1958
                        return ex.getResponse();
×
1959
                    }
×
1960
                }
×
1961
            } catch (ClassCastException ccex) {
×
1962
                return error(Status.BAD_REQUEST, "fileIds must be an array of id strings");
×
1963
            } catch (NullPointerException npex) {
×
1964
                return error(Status.BAD_REQUEST, "No fileIds specified");
×
1965
            }
×
1966
        } else {
1967
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1968
        }
1969

1970
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1971
        // check if files belong to dataset
1972
        if (datasetFiles.containsAll(retentionFilesToUnset)) {
×
1973
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1974
            boolean badFiles = false;
×
1975
            for (DataFile datafile : retentionFilesToUnset) {
×
1976
                // superuser can overrule an existing retention, even on released files
1977
                if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) {
×
1978
                    restrictedFiles.add(datafile.getId());
×
1979
                    badFiles = true;
×
1980
                }
1981
            }
×
1982
            if (badFiles) {
×
1983
                return Response.status(Status.FORBIDDEN)
×
1984
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1985
                                .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods")
×
1986
                                .add("files", restrictedFiles).build())
×
1987
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1988
            }
1989
            // Good request, so remove the retention from the files. Track any existing retentions so we can
1990
            // delete them if there are no files left that reference them.
1991
            for (DataFile datafile : retentionFilesToUnset) {
×
1992
                Retention ret = datafile.getRetention();
×
1993
                if (ret != null) {
×
1994
                    ret.getDataFiles().remove(datafile);
×
1995
                    if (ret.getDataFiles().isEmpty()) {
×
1996
                        orphanedRetentions.add(ret);
×
1997
                    }
1998
                }
1999
                // Save merges the datafile with an retention into the context
2000
                datafile.setRetention(null);
×
2001
                fileService.save(datafile);
×
2002
            }
×
2003
            if (orphanedRetentions.size() > 0) {
×
2004
                for (Retention ret : orphanedRetentions) {
×
2005
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
2006
                }
×
2007
            }
2008
            String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
2009
            if(!releasedFiles.isBlank()) {
×
2010
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + ".");
×
2011
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
2012
                actionLogSvc.log(removeRecord);
×
2013
            }
2014
            return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)"));
×
2015
        } else {
2016
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
2017
        }
2018
    }
2019

2020
    @PUT
2021
    @AuthRequired
2022
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
2023
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
2024
        try {
2025
            User u = getRequestUser(crc);
×
2026
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
2027
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
2028
            if (linked == null){
×
2029
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
2030
            }
2031
            if (linking == null) {
×
2032
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
2033
            }
2034
            execCommand(new LinkDatasetCommand(
×
2035
                    createDataverseRequest(u), linking, linked
×
2036
            ));
2037
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
2038
        } catch (WrappedResponse ex) {
×
2039
            return ex.getResponse();
×
2040
        }
2041
    }
2042

2043
    @GET
2044
    @Path("{id}/versions/{versionId}/customlicense")
2045
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
2046
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
2047
        User user = session.getUser();
×
2048
        String persistentId;
2049
        try {
2050
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
2051
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
2052
            }
2053
            persistentId = getRequestParameter(":persistentId".substring(1));
×
2054
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
2055
                versionId = "DRAFT";
×
2056
            }
2057
        } catch (WrappedResponse wrappedResponse) {
×
2058
            return wrappedResponse.getResponse();
×
2059
        }
×
2060
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
2061
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
2062
    }
2063

2064

2065
    @GET
2066
    @AuthRequired
2067
    @Path("{id}/links")
2068
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
2069
        try {
2070
            User u = getRequestUser(crc);
×
2071
            if (!u.isSuperuser()) {
×
2072
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
2073
            }
2074
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2075

2076
            long datasetId = dataset.getId();
×
2077
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
2078
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
2079
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
2080
                dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
×
2081
            }
×
2082
            JsonObjectBuilder response = Json.createObjectBuilder();
×
2083
            response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
×
2084
            return ok(response);
×
2085
        } catch (WrappedResponse wr) {
×
2086
            return wr.getResponse();
×
2087
        }
2088
    }
2089

2090
    /**
2091
     * Add a given assignment to a given user or group
2092
     * @param ra     role assignment DTO
2093
     * @param id     dataset id
2094
     * @param apiKey
2095
     */
2096
    @POST
2097
    @AuthRequired
2098
    @Path("{identifier}/assignments")
2099
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
2100
        try {
2101
            Dataset dataset = findDatasetOrDie(id);
×
2102
            
2103
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
2104
            if (assignee == null) {
×
2105
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
2106
            }
2107
            
2108
            DataverseRole theRole;
2109
            Dataverse dv = dataset.getOwner();
×
2110
            theRole = null;
×
2111
            while ((theRole == null) && (dv != null)) {
×
2112
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
2113
                    if (aRole.getAlias().equals(ra.getRole())) {
×
2114
                        theRole = aRole;
×
2115
                        break;
×
2116
                    }
2117
                }
×
2118
                dv = dv.getOwner();
×
2119
            }
2120
            if (theRole == null) {
×
2121
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
2122
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
2123
            }
2124

2125
            String privateUrlToken = null;
×
2126
            return ok(
×
2127
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
2128
        } catch (WrappedResponse ex) {
×
2129
            List<String> args = Arrays.asList(ex.getMessage());
×
2130
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
2131
            return ex.getResponse();
×
2132
        }
2133

2134
    }
2135
    
2136
    @DELETE
2137
    @AuthRequired
2138
    @Path("{identifier}/assignments/{id}")
2139
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
2140
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
2141
        if (ra != null) {
×
2142
            try {
2143
                findDatasetOrDie(dsId);
×
2144
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
2145
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
2146
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
2147
            } catch (WrappedResponse ex) {
×
2148
                return ex.getResponse();
×
2149
            }
2150
        } else {
2151
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
2152
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
2153
        }
2154
    }
2155

2156
    @GET
2157
    @AuthRequired
2158
    @Path("{identifier}/assignments")
2159
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2160
        return response(req ->
×
2161
                ok(execCommand(
×
2162
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
2163
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
2164
    }
2165

2166
    @GET
2167
    @AuthRequired
2168
    @Path("{id}/privateUrl")
2169
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2170
        return response( req -> {
×
2171
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
2172
            return (privateUrl != null) ? ok(json(privateUrl))
×
2173
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
2174
        }, getRequestUser(crc));
×
2175
    }
2176

2177
    @POST
2178
    @AuthRequired
2179
    @Path("{id}/privateUrl")
2180
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
2181
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
2182
            throw new NotAcceptableException("Anonymized Access not enabled");
×
2183
        }
2184
        return response(req ->
×
2185
                ok(json(execCommand(
×
2186
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
2187
    }
2188

2189
    @DELETE
2190
    @AuthRequired
2191
    @Path("{id}/privateUrl")
2192
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2193
        return response( req -> {
×
2194
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2195
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
2196
            if (privateUrl != null) {
×
2197
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
2198
                return ok("Private URL deleted.");
×
2199
            } else {
2200
                return notFound("No Private URL to delete.");
×
2201
            }
2202
        }, getRequestUser(crc));
×
2203
    }
2204

2205
    @GET
2206
    @AuthRequired
2207
    @Path("{id}/thumbnail/candidates")
2208
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2209
        try {
2210
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2211
            boolean canUpdateThumbnail = false;
×
2212
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
2213
            if (!canUpdateThumbnail) {
×
2214
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
2215
            }
2216
            JsonArrayBuilder data = Json.createArrayBuilder();
×
2217
            boolean considerDatasetLogoAsCandidate = true;
×
2218
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
2219
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
2220
                String base64image = datasetThumbnail.getBase64image();
×
2221
                if (base64image != null) {
×
2222
                    logger.fine("found a candidate!");
×
2223
                    candidate.add("base64image", base64image);
×
2224
                }
2225
                DataFile dataFile = datasetThumbnail.getDataFile();
×
2226
                if (dataFile != null) {
×
2227
                    candidate.add("dataFileId", dataFile.getId());
×
2228
                }
2229
                data.add(candidate);
×
2230
            }
×
2231
            return ok(data);
×
2232
        } catch (WrappedResponse ex) {
×
2233
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
2234
        }
2235
    }
2236

2237
    @GET
2238
    @Produces({"image/png"})
2239
    @Path("{id}/thumbnail")
2240
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
2241
        try {
2242
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2243
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
2244
            if(is == null) {
×
2245
                return notFound("Thumbnail not available");
×
2246
            }
2247
            return Response.ok(is).build();
×
2248
        } catch (WrappedResponse wr) {
×
2249
            return notFound("Thumbnail not available");
×
2250
        }
2251
    }
2252

2253
    @GET
2254
    @Produces({ "image/png" })
2255
    @Path("{id}/logo")
2256
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
2257
        try {
2258
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2259
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
2260
            if (is == null) {
×
2261
                return notFound("Logo not available");
×
2262
            }
2263
            return Response.ok(is).build();
×
2264
        } catch (WrappedResponse wr) {
×
2265
            return notFound("Logo not available");
×
2266
        }
2267
    }
2268

2269
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
2270
    @POST
2271
    @AuthRequired
2272
    @Path("{id}/thumbnail/{dataFileId}")
2273
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
2274
        try {
2275
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
2276
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
2277
        } catch (WrappedResponse wr) {
×
2278
            return wr.getResponse();
×
2279
        }
2280
    }
2281

2282
    @POST
2283
    @AuthRequired
2284
    @Path("{id}/thumbnail")
2285
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2286
    @Produces("application/json")
2287
    @Operation(summary = "Uploads a logo for a dataset", 
2288
               description = "Uploads a logo for a dataset")
2289
    @APIResponse(responseCode = "200",
2290
               description = "Dataset logo uploaded successfully")
2291
    @Tag(name = "uploadDatasetLogo", 
2292
         description = "Uploads a logo for a dataset")
2293
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))          
2294
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
2295
        try {
2296
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
2297
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
2298
        } catch (WrappedResponse wr) {
×
2299
            return wr.getResponse();
×
2300
        }
2301
    }
2302

2303
    @DELETE
2304
    @AuthRequired
2305
    @Path("{id}/thumbnail")
2306
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2307
        try {
2308
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
2309
            return ok("Dataset thumbnail removed.");
×
2310
        } catch (WrappedResponse wr) {
×
2311
            return wr.getResponse();
×
2312
        }
2313
    }
2314

2315
    @Deprecated(forRemoval = true, since = "2024-07-07")
2316
    @GET
2317
    @AuthRequired
2318
    @Path("{identifier}/dataCaptureModule/rsync")
2319
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2320
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
2321
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
2322
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
2323
        }
2324
        Dataset dataset = null;
×
2325
        try {
2326
            dataset = findDatasetOrDie(id);
×
2327
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2328
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
2329
            
2330
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
2331
            if (lock == null) {
×
2332
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
2333
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
2334
            }
2335
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
2336
        } catch (WrappedResponse wr) {
×
2337
            return wr.getResponse();
×
2338
        } catch (EJBException ex) {
×
2339
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
2340
        }
2341
    }
2342
    
2343
    /**
2344
     * This api endpoint triggers the creation of a "package" file in a dataset
2345
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2346
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2347
     * The "package" can be downloaded over RSAL.
2348
     *
2349
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2350
     *
2351
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2352
     * But due to the possibly immense number of files (millions) the package approach was taken.
2353
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2354
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2355
     * -MAD 4.9.1
2356
     */
2357
    @POST
2358
    @AuthRequired
2359
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2360
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2361
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
2362
        AuthenticatedUser authenticatedUser = null;
×
2363
        try {
2364
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2365
        } catch (WrappedResponse ex) {
×
2366
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2367
        }
×
2368
        if (!authenticatedUser.isSuperuser()) {
×
2369
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2370
        }
2371
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2372
        try {
2373
            Dataset dataset = findDatasetOrDie(id);
×
2374
            if ("validation passed".equals(statusMessageFromDcm)) {
×
2375
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2376

2377
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2378
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2379
                int totalSize = jsonFromDcm.getInt("totalSize");
×
2380
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2381
                
2382
                if (storageDriverType.equals("file")) {
×
2383
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2384

2385
                    ImportMode importMode = ImportMode.MERGE;
×
2386
                    try {
2387
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2388
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2389
                        String message = jsonFromImportJobKickoff.getString("message");
×
2390
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2391
                        job.add("jobId", jobId);
×
2392
                        job.add("message", message);
×
2393
                        return ok(job);
×
2394
                    } catch (WrappedResponse wr) {
×
2395
                        String message = wr.getMessage();
×
2396
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2397
                    }
2398
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2399
                    
2400
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2401
                    try {
2402
                        
2403
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2404
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
2405
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2406
                        
2407
                        if (packageFile == null) {
×
2408
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
2409
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2410
                        }
2411
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2412
                        if (dcmLock == null) {
×
2413
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2414
                        } else {
2415
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
2416
                            dataset.removeLock(dcmLock);
×
2417
                        }
2418
                        
2419
                        // update version using the command engine to enforce user permissions and constraints
2420
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2421
                            try {
2422
                                Command<Dataset> cmd;
2423
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2424
                                commandEngine.submit(cmd);
×
2425
                            } catch (CommandException ex) {
×
2426
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
2427
                            }
×
2428
                        } else {
2429
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2430
                                    + "single version in draft mode.";
2431
                            logger.log(Level.SEVERE, constraintError);
×
2432
                        }
2433

2434
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2435
                        return ok(job);
×
2436
                        
2437
                    } catch (IOException e) {
×
2438
                        String message = e.getMessage();
×
2439
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2440
                    }
2441
                } else {
2442
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2443
                }
2444
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2445
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2446
                distinctAuthors.values().forEach((value) -> {
×
2447
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2448
                });
×
2449
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2450
                if (superUsers != null && !superUsers.isEmpty()) {
×
2451
                    superUsers.forEach((au) -> {
×
2452
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2453
                    });
×
2454
                }
2455
                return ok("User notified about checksum validation failure.");
×
2456
            } else {
2457
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2458
            }
2459
        } catch (WrappedResponse ex) {
×
2460
            return ex.getResponse();
×
2461
        }
2462
    }
2463
    
2464

2465
    @POST
2466
    @AuthRequired
2467
    @Path("{id}/submitForReview")
2468
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2469
        try {
2470
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
2471
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2472
            
2473
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2474
            
2475
            result.add("inReview", inReview);
×
2476
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2477
            return ok(result);
×
2478
        } catch (WrappedResponse wr) {
×
2479
            return wr.getResponse();
×
2480
        }
2481
    }
2482

2483
    @POST
2484
    @AuthRequired
2485
    @Path("{id}/returnToAuthor")
2486
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2487

2488
        if (jsonBody == null || jsonBody.isEmpty()) {
×
2489
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2490
        }
2491
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2492
        try {
2493
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2494
            String reasonForReturn = null;
×
2495
            reasonForReturn = json.getString("reasonForReturn");
×
2496
            if ((reasonForReturn == null || reasonForReturn.isEmpty())
×
2497
                    && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) {
×
2498
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2499
            }
2500
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2501
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2502

2503
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2504
            result.add("inReview", false);
×
2505
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2506
            return ok(result);
×
2507
        } catch (WrappedResponse wr) {
×
2508
            return wr.getResponse();
×
2509
        }
2510
    }
2511

2512
    @GET
2513
    @AuthRequired
2514
    @Path("{id}/curationStatus")
2515
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2516
        try {
2517
            Dataset ds = findDatasetOrDie(idSupplied);
×
2518
            DatasetVersion dsv = ds.getLatestVersion();
×
2519
            User user = getRequestUser(crc);
×
2520
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
2521
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2522
            } else {
2523
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2524
            }
2525
        } catch (WrappedResponse wr) {
×
2526
            return wr.getResponse();
×
2527
        }
2528
    }
2529

2530
    @PUT
2531
    @AuthRequired
2532
    @Path("{id}/curationStatus")
2533
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2534
        Dataset ds = null;
×
2535
        User u = null;
×
2536
        try {
2537
            ds = findDatasetOrDie(idSupplied);
×
2538
            u = getRequestUser(crc);
×
2539
        } catch (WrappedResponse wr) {
×
2540
            return wr.getResponse();
×
2541
        }
×
2542
        try {
2543
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2544
            return ok("Curation Status updated");
×
2545
        } catch (WrappedResponse wr) {
×
2546
            // Just change to Bad Request and send
2547
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2548
        }
2549
    }
2550

2551
    @DELETE
2552
    @AuthRequired
2553
    @Path("{id}/curationStatus")
2554
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2555
        Dataset ds = null;
×
2556
        User u = null;
×
2557
        try {
2558
            ds = findDatasetOrDie(idSupplied);
×
2559
            u = getRequestUser(crc);
×
2560
        } catch (WrappedResponse wr) {
×
2561
            return wr.getResponse();
×
2562
        }
×
2563
        try {
2564
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2565
            return ok("Curation Status deleted");
×
2566
        } catch (WrappedResponse wr) {
×
2567
            //Just change to Bad Request and send
2568
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2569
        }
2570
    }
2571

2572
    @GET
2573
    @AuthRequired
2574
    @Path("{id}/uploadurls")
2575
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2576
        try {
2577
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2578

2579
            boolean canUpdateDataset = false;
×
2580
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2581
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2582
            if (!canUpdateDataset) {
×
2583
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2584
            }
2585
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2586
            if (s3io == null) {
×
2587
                return error(Response.Status.NOT_FOUND,
×
2588
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2589
            }
2590
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
2591
            if (maxSize != null) {
×
2592
                if(fileSize > maxSize) {
×
2593
                    return error(Response.Status.BAD_REQUEST,
×
2594
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2595
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2596
                }
2597
            }
2598
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
2599
            if (limit != null) {
×
2600
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
2601
                    return error(Response.Status.BAD_REQUEST,
×
2602
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2603
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2604
                }
2605
            }
2606
            JsonObjectBuilder response = null;
×
2607
            String storageIdentifier = null;
×
2608
            try {
2609
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
2610
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2611

2612
            } catch (IOException io) {
×
2613
                logger.warning(io.getMessage());
×
2614
                throw new WrappedResponse(io,
×
2615
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
2616
            }
×
2617

2618
            response.add("storageIdentifier", storageIdentifier);
×
2619
            return ok(response);
×
2620
        } catch (WrappedResponse wr) {
×
2621
            return wr.getResponse();
×
2622
        }
2623
    }
2624

2625
    @DELETE
2626
    @AuthRequired
2627
    @Path("mpupload")
2628
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2629
        try {
2630
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2631
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2632
            User user = session.getUser();
×
2633
            if (!user.isAuthenticated()) {
×
2634
                try {
2635
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2636
                } catch (WrappedResponse ex) {
×
2637
                    logger.info(
×
2638
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2639
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2640
                    throw ex;
×
2641
                }
×
2642
            }
2643
            boolean allowed = false;
×
2644
            if (dataset != null) {
×
2645
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2646
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2647
            } else {
2648
                /*
2649
                 * The only legitimate case where a global id won't correspond to a dataset is
2650
                 * for uploads during creation. Given that this call will still fail unless all
2651
                 * three parameters correspond to an active multipart upload, it should be safe
2652
                 * to allow the attempt for an authenticated user. If there are concerns about
2653
                 * permissions, one could check with the current design that the user is allowed
2654
                 * to create datasets in some dataverse that is configured to use the storage
2655
                 * provider specified in the storageidentifier, but testing for the ability to
2656
                 * create a dataset in a specific dataverse would requiring changing the design
2657
                 * somehow (e.g. adding the ownerId to this call).
2658
                 */
2659
                allowed = true;
×
2660
            }
2661
            if (!allowed) {
×
2662
                return error(Response.Status.FORBIDDEN,
×
2663
                        "You are not permitted to abort file uploads with the supplied parameters.");
2664
            }
2665
            try {
2666
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2667
            } catch (IOException io) {
×
2668
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2669
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2670
                logger.warning(io.getMessage());
×
2671
                throw new WrappedResponse(io,
×
2672
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2673
            }
×
2674
            return Response.noContent().build();
×
2675
        } catch (WrappedResponse wr) {
×
2676
            return wr.getResponse();
×
2677
        }
2678
    }
2679

2680
    @PUT
2681
    @AuthRequired
2682
    @Path("mpupload")
2683
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2684
        try {
2685
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2686
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2687
            User user = session.getUser();
×
2688
            if (!user.isAuthenticated()) {
×
2689
                try {
2690
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2691
                } catch (WrappedResponse ex) {
×
2692
                    logger.info(
×
2693
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2694
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2695
                    throw ex;
×
2696
                }
×
2697
            }
2698
            boolean allowed = false;
×
2699
            if (dataset != null) {
×
2700
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2701
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2702
            } else {
2703
                /*
2704
                 * The only legitimate case where a global id won't correspond to a dataset is
2705
                 * for uploads during creation. Given that this call will still fail unless all
2706
                 * three parameters correspond to an active multipart upload, it should be safe
2707
                 * to allow the attempt for an authenticated user. If there are concerns about
2708
                 * permissions, one could check with the current design that the user is allowed
2709
                 * to create datasets in some dataverse that is configured to use the storage
2710
                 * provider specified in the storageidentifier, but testing for the ability to
2711
                 * create a dataset in a specific dataverse would requiring changing the design
2712
                 * somehow (e.g. adding the ownerId to this call).
2713
                 */
2714
                allowed = true;
×
2715
            }
2716
            if (!allowed) {
×
2717
                return error(Response.Status.FORBIDDEN,
×
2718
                        "You are not permitted to complete file uploads with the supplied parameters.");
2719
            }
2720
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
2721
            logger.info("Etags: " + partETagBody);
×
2722
            try {
2723
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2724
                for (String partNo : object.keySet()) {
×
2725
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2726
                }
×
2727
                for (PartETag et : eTagList) {
×
2728
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2729
                }
×
2730
            } catch (JsonException je) {
×
2731
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2732
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2733
            }
×
2734
            try {
2735
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2736
            } catch (IOException io) {
×
2737
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2738
                logger.warning(io.getMessage());
×
2739
                try {
2740
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2741
                } catch (IOException e) {
×
2742
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2743
                    logger.severe(io.getMessage());
×
2744
                }
×
2745

2746
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2747
            }
×
2748
            return ok("Multipart Upload completed");
×
2749
        } catch (WrappedResponse wr) {
×
2750
            return wr.getResponse();
×
2751
        }
2752
    }
2753

2754
    /**
2755
     * Add a File to an existing Dataset
2756
     *
2757
     * @param idSupplied
2758
     * @param jsonData
2759
     * @param fileInputStream
2760
     * @param contentDispositionHeader
2761
     * @param formDataBodyPart
2762
     * @return
2763
     */
2764
    @POST
2765
    @AuthRequired
2766
    @Path("{id}/add")
2767
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2768
    @Produces("application/json")
2769
    @Operation(summary = "Uploads a file for a dataset", 
2770
               description = "Uploads a file for a dataset")
2771
    @APIResponse(responseCode = "200",
2772
               description = "File uploaded successfully to dataset")
2773
    @Tag(name = "addFileToDataset", 
2774
         description = "Uploads a file for a dataset")
2775
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
2776
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2777
                    @PathParam("id") String idSupplied,
2778
                    @FormDataParam("jsonData") String jsonData,
2779
                    @FormDataParam("file") InputStream fileInputStream,
2780
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2781
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2782
                    ){
2783

2784
        if (!systemConfig.isHTTPUpload()) {
×
2785
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2786
        }
2787

2788
        // -------------------------------------
2789
        // (1) Get the user from the ContainerRequestContext
2790
        // -------------------------------------
2791
        User authUser;
2792
        authUser = getRequestUser(crc);
×
2793

2794
        // -------------------------------------
2795
        // (2) Get the Dataset Id
2796
        //  
2797
        // -------------------------------------
2798
        Dataset dataset;
2799
        
2800
        try {
2801
            dataset = findDatasetOrDie(idSupplied);
×
2802
        } catch (WrappedResponse wr) {
×
2803
            return wr.getResponse();
×
2804
        }
×
2805
        
2806
        //------------------------------------
2807
        // (2a) Make sure dataset does not have package file
2808
        //
2809
        // --------------------------------------
2810
        
2811
        for (DatasetVersion dv : dataset.getVersions()) {
×
2812
            if (dv.isHasPackageFile()) {
×
2813
                return error(Response.Status.FORBIDDEN,
×
2814
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2815
                );
2816
            }
2817
        }
×
2818

2819
        // (2a) Load up optional params via JSON
2820
        //---------------------------------------
2821
        OptionalFileParams optionalFileParams = null;
×
2822
        msgt("(api) jsonData: " + jsonData);
×
2823

2824
        try {
2825
            optionalFileParams = new OptionalFileParams(jsonData);
×
2826
        } catch (DataFileTagException ex) {
×
2827
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2828
        }
2829
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2830
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
2831
        }
×
2832
        
2833
        // -------------------------------------
2834
        // (3) Get the file name and content type
2835
        // -------------------------------------
2836
        String newFilename = null;
×
2837
        String newFileContentType = null;
×
2838
        String newStorageIdentifier = null;
×
2839
        if (null == contentDispositionHeader) {
×
2840
            if (optionalFileParams.hasStorageIdentifier()) {
×
2841
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
2842
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2843
                
2844
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
2845
                    return error(BAD_REQUEST,
×
2846
                            "Dataset store configuration does not allow provided storageIdentifier.");
2847
                }
2848
                if (optionalFileParams.hasFileName()) {
×
2849
                    newFilename = optionalFileParams.getFileName();
×
2850
                    if (optionalFileParams.hasMimetype()) {
×
2851
                        newFileContentType = optionalFileParams.getMimeType();
×
2852
                    }
2853
                }
2854
            } else {
2855
                return error(BAD_REQUEST,
×
2856
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2857
            }
2858
        } else {
2859
            newFilename = contentDispositionHeader.getFileName();
×
2860
            // Let's see if the form data part has the mime (content) type specified.
2861
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2862
            // because that defaults to "text/plain" when no "Content-Type:" header is
2863
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2864
            // is there. If not, we'll default to "application/octet-stream" - the generic
2865
            // unknown type. This will prompt the application to run type detection and
2866
            // potentially find something more accurate.
2867
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2868

2869
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2870
                if (header.equalsIgnoreCase("Content-Type")) {
×
2871
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2872
                }
2873
            }
×
2874
            if (newFileContentType == null) {
×
2875
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2876
            }
2877
        }
2878

2879

2880
        //-------------------
2881
        // (3) Create the AddReplaceFileHelper object
2882
        //-------------------
2883
        msg("ADD!");
×
2884

2885
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
2886
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2887
                ingestService,
2888
                datasetService,
2889
                fileService,
2890
                permissionSvc,
2891
                commandEngine,
2892
                systemConfig);
2893

2894

2895
        //-------------------
2896
        // (4) Run "runAddFileByDatasetId"
2897
        //-------------------
2898
        addFileHelper.runAddFileByDataset(dataset,
×
2899
                newFilename,
2900
                newFileContentType,
2901
                newStorageIdentifier,
2902
                fileInputStream,
2903
                optionalFileParams);
2904

2905

2906
        if (addFileHelper.hasError()){
×
2907
            //conflict response status added for 8859
2908
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
2909
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2910
            }
2911
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2912
        } else {
2913
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2914
            try {
2915
                //msgt("as String: " + addFileHelper.getSuccessResult());
2916
                /**
2917
                 * @todo We need a consistent, sane way to communicate a human
2918
                 * readable message to an API client suitable for human
2919
                 * consumption. Imagine if the UI were built in Angular or React
2920
                 * and we want to return a message from the API as-is to the
2921
                 * user. Human readable.
2922
                 */
2923
                logger.fine("successMsg: " + successMsg);
×
2924
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2925
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
2926
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2927
                } else {
2928
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2929
                }
2930

2931
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2932
            } catch (NoFilesException ex) {
×
2933
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
2934
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2935

2936
            }
2937
        }
2938
        
2939
    } // end: addFileToDataset
2940

2941

2942
    /**
2943
     * Clean storage of a Dataset
2944
     *
2945
     * @param idSupplied
2946
     * @return
2947
     */
2948
    @GET
2949
    @AuthRequired
2950
    @Path("{id}/cleanStorage")
2951
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2952
        // get user and dataset
2953
        User authUser = getRequestUser(crc);
×
2954

2955
        Dataset dataset;
2956
        try {
2957
            dataset = findDatasetOrDie(idSupplied);
×
2958
        } catch (WrappedResponse wr) {
×
2959
            return wr.getResponse();
×
2960
        }
×
2961
        
2962
        // check permissions
2963
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
2964
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
2965
        }
2966

2967
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
2968

2969
        // check if no legacy files are present
2970
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
2971
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
2972
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
2973
        }
2974

2975
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
2976
        List<String> deleted;
2977
        try {
2978
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
2979
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
2980
        } catch (IOException ex) {
×
2981
            logger.log(Level.SEVERE, null, ex);
×
2982
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
2983
        }
×
2984

2985
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
2986
        
2987
    }
2988

2989
    private static Set<String> getDatasetFilenames(Dataset dataset) {
2990
        Set<String> files = new HashSet<>();
×
2991
        for (DataFile dataFile: dataset.getFiles()) {
×
2992
            String storageIdentifier = dataFile.getStorageIdentifier();
×
2993
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
2994
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
2995
            files.add(locationParts[locationParts.length-1]);
×
2996
        }
×
2997
        return files;
×
2998
    }
2999

3000
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
3001
        return f -> {
1✔
3002
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
3003
        };
3004
    }
3005

3006
    private void msg(String m) {
3007
        //System.out.println(m);
3008
        logger.fine(m);
×
3009
    }
×
3010

3011
    private void dashes() {
3012
        msg("----------------");
×
3013
    }
×
3014

3015
    private void msgt(String m) {
3016
        dashes();
×
3017
        msg(m);
×
3018
        dashes();
×
3019
    }
×
3020

3021

3022
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
3023
            throws WrappedResponse {
3024
        switch (versionId) {
×
3025
            case DS_VERSION_LATEST:
3026
                return hdl.handleLatest();
×
3027
            case DS_VERSION_DRAFT:
3028
                return hdl.handleDraft();
×
3029
            case DS_VERSION_LATEST_PUBLISHED:
3030
                return hdl.handleLatestPublished();
×
3031
            default:
3032
                try {
3033
                    String[] versions = versionId.split("\\.");
×
3034
                    switch (versions.length) {
×
3035
                        case 1:
3036
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
3037
                        case 2:
3038
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
3039
                        default:
3040
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3041
                    }
3042
                } catch (NumberFormatException nfe) {
×
3043
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3044
                }
3045
        }
3046
    }
3047

3048
    /*
3049
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
3050
     * a deaccessioned dataset.
3051
     */
3052
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, 
3053
                                                  String versionNumber, 
3054
                                                  final Dataset ds,
3055
                                                  UriInfo uriInfo, 
3056
                                                  HttpHeaders headers) throws WrappedResponse {
3057
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
3058
        boolean checkFilePerms = false;
×
3059
        boolean includeDeaccessioned = false;
×
3060
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
×
3061
    }
3062
    
3063
    /*
3064
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3065
     */
3066
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3067
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
3068
        boolean checkPermsWhenDeaccessioned = true;
×
3069
        boolean bypassAccessCheck = false;
×
3070
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3071
    }
3072

3073
    /*
3074
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3075
     */
3076
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3077
                                                  UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
3078
        boolean bypassAccessCheck = false;
×
3079
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3080
    }
3081

3082
    /*
3083
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
3084
     */
3085
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3086
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
3087
            boolean bypassAccessCheck)
3088
            throws WrappedResponse {
3089

3090
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
3091

3092
        if (dsv == null || dsv.getId() == null) {
×
3093
            throw new WrappedResponse(
×
3094
                    notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
3095
        }
3096
        if (dsv.isReleased()&& uriInfo!=null) {
×
3097
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
3098
            mdcLogService.logEntry(entry);
×
3099
        }
3100
        return dsv;
×
3101
    }
3102
 
3103
    @GET
3104
    @Path("{identifier}/locks")
3105
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3106

3107
        Dataset dataset = null;
×
3108
        try {
3109
            dataset = findDatasetOrDie(id);
×
3110
            Set<DatasetLock> locks;
3111
            if (lockType == null) {
×
3112
                locks = dataset.getLocks();
×
3113
            } else {
3114
                // request for a specific type lock:
3115
                DatasetLock lock = dataset.getLockFor(lockType);
×
3116

3117
                locks = new HashSet<>();
×
3118
                if (lock != null) {
×
3119
                    locks.add(lock);
×
3120
                }
3121
            }
3122
            
3123
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3124

3125
        } catch (WrappedResponse wr) {
×
3126
            return wr.getResponse();
×
3127
        }
3128
    }
3129

3130
    @DELETE
3131
    @AuthRequired
3132
    @Path("{identifier}/locks")
3133
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3134

3135
        return response(req -> {
×
3136
            try {
3137
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3138
                if (!user.isSuperuser()) {
×
3139
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3140
                }
3141
                Dataset dataset = findDatasetOrDie(id);
×
3142
                
3143
                if (lockType == null) {
×
3144
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
3145
                    for (DatasetLock lock : dataset.getLocks()) {
×
3146
                        locks.add(lock.getReason());
×
3147
                    }
×
3148
                    if (!locks.isEmpty()) {
×
3149
                        for (DatasetLock.Reason locktype : locks) {
×
3150
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
3151
                            // refresh the dataset:
3152
                            dataset = findDatasetOrDie(id);
×
3153
                        }
×
3154
                        // kick of dataset reindexing, in case the locks removed 
3155
                        // affected the search card:
3156
                        indexService.asyncIndexDataset(dataset, true);
×
3157
                        return ok("locks removed");
×
3158
                    }
3159
                    return ok("dataset not locked");
×
3160
                }
3161
                // request for a specific type lock:
3162
                DatasetLock lock = dataset.getLockFor(lockType);
×
3163
                if (lock != null) {
×
3164
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
3165
                    // refresh the dataset:
3166
                    dataset = findDatasetOrDie(id);
×
3167
                    // ... and kick of dataset reindexing, in case the lock removed 
3168
                    // affected the search card:
3169
                    indexService.asyncIndexDataset(dataset, true);
×
3170
                    return ok("lock type " + lock.getReason() + " removed");
×
3171
                }
3172
                return ok("no lock type " + lockType + " on the dataset");
×
3173
            } catch (WrappedResponse wr) {
×
3174
                return wr.getResponse();
×
3175
            }
3176

3177
        }, getRequestUser(crc));
×
3178

3179
    }
3180
    
3181
    @POST
3182
    @AuthRequired
3183
    @Path("{identifier}/lock/{type}")
3184
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
3185
        return response(req -> {
×
3186
            try {
3187
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3188
                if (!user.isSuperuser()) {
×
3189
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3190
                }
3191
                Dataset dataset = findDatasetOrDie(id);
×
3192
                DatasetLock lock = dataset.getLockFor(lockType);
×
3193
                if (lock != null) {
×
3194
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
3195
                }
3196
                lock = new DatasetLock(lockType, user);
×
3197
                execCommand(new AddLockCommand(req, dataset, lock));
×
3198
                // refresh the dataset:
3199
                dataset = findDatasetOrDie(id);
×
3200
                // ... and kick of dataset reindexing:
3201
                indexService.asyncIndexDataset(dataset, true);
×
3202

3203
                return ok("dataset locked with lock type " + lockType);
×
3204
            } catch (WrappedResponse wr) {
×
3205
                return wr.getResponse();
×
3206
            }
3207

3208
        }, getRequestUser(crc));
×
3209
    }
3210
    
3211
    @GET
3212
    @AuthRequired
3213
    @Path("locks")
3214
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
3215
        // This API is here, under /datasets, and not under /admin, because we
3216
        // likely want it to be accessible to admin users who may not necessarily 
3217
        // have localhost access, that would be required to get to /api/admin in 
3218
        // most installations. It is still reasonable however to limit access to
3219
        // this api to admin users only.
3220
        AuthenticatedUser apiUser;
3221
        try {
3222
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
3223
        } catch (WrappedResponse ex) {
×
3224
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3225
        }
×
3226
        if (!apiUser.isSuperuser()) {
×
3227
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3228
        }
3229
        
3230
        // Locks can be optinally filtered by type, user or both.
3231
        DatasetLock.Reason lockTypeValue = null;
×
3232
        AuthenticatedUser user = null; 
×
3233
        
3234
        // For the lock type, we use a QueryParam of type String, instead of 
3235
        // DatasetLock.Reason; that would be less code to write, but this way 
3236
        // we can check if the value passed matches a valid lock type ("reason") 
3237
        // and provide a helpful error message if it doesn't. If you use a 
3238
        // QueryParam of an Enum type, trying to pass an invalid value to it 
3239
        // results in a potentially confusing "404/NOT FOUND - requested 
3240
        // resource is not available".
3241
        if (lockType != null && !lockType.isEmpty()) {
×
3242
            try {
3243
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
3244
            } catch (IllegalArgumentException iax) {
×
3245
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
3246
                for (Reason r: Reason.values()) {
×
3247
                    reasonJoiner.add(r.name());
×
3248
                };
3249
                String errorMessage = "Invalid lock type value: " + lockType + 
×
3250
                        "; valid lock types: " + reasonJoiner.toString();
×
3251
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
3252
            }
×
3253
        }
3254
        
3255
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
3256
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
3257
            if (user == null) {
×
3258
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
3259
            }
3260
        }
3261
        
3262
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
3263
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
3264
                            
3265
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3266
    }   
3267
    
3268
    
3269
    @GET
3270
    @Path("{id}/makeDataCount/citations")
3271
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
3272
        
3273
        try {
3274
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3275
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
3276
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
3277
            for (DatasetExternalCitations citation : externalCitations) {
×
3278
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
3279
                /**
3280
                 * In the future we can imagine storing and presenting more
3281
                 * information about the citation such as the title of the paper
3282
                 * and the names of the authors. For now, we'll at least give
3283
                 * the URL of the citation so people can click and find out more
3284
                 * about the citation.
3285
                 */
3286
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
3287
                datasetsCitations.add(candidateObj);
×
3288
            }
×
3289
            return ok(datasetsCitations);
×
3290

3291
        } catch (WrappedResponse wr) {
×
3292
            return wr.getResponse();
×
3293
        }
3294

3295
    }
3296

3297
    @GET
3298
    @Path("{id}/makeDataCount/{metric}")
3299
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
3300
        String nullCurrentMonth = null;
×
3301
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
3302
    }
3303

3304
    @GET
3305
    @AuthRequired
3306
    @Path("{identifier}/storagesize")
3307
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
3308
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
3309
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
3310
    }
3311

3312
    @GET
3313
    @AuthRequired
3314
    @Path("{identifier}/versions/{versionId}/downloadsize")
3315
    public Response getDownloadSize(@Context ContainerRequestContext crc,
3316
                                    @PathParam("identifier") String dvIdtf,
3317
                                    @PathParam("versionId") String version,
3318
                                    @QueryParam("contentType") String contentType,
3319
                                    @QueryParam("accessStatus") String accessStatus,
3320
                                    @QueryParam("categoryName") String categoryName,
3321
                                    @QueryParam("tabularTagName") String tabularTagName,
3322
                                    @QueryParam("searchText") String searchText,
3323
                                    @QueryParam("mode") String mode,
3324
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
3325
                                    @Context UriInfo uriInfo,
3326
                                    @Context HttpHeaders headers) {
3327

3328
        return response(req -> {
×
3329
            FileSearchCriteria fileSearchCriteria;
3330
            try {
3331
                fileSearchCriteria = new FileSearchCriteria(
×
3332
                        contentType,
3333
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
3334
                        categoryName,
3335
                        tabularTagName,
3336
                        searchText
3337
                );
3338
            } catch (IllegalArgumentException e) {
×
3339
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
3340
            }
×
3341
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
3342
            try {
3343
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
3344
            } catch (IllegalArgumentException e) {
×
3345
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
3346
            }
×
3347
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
3348
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
3349
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
3350
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
3351
            jsonObjectBuilder.add("message", message);
×
3352
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
3353
            return ok(jsonObjectBuilder);
×
3354
        }, getRequestUser(crc));
×
3355
    }
3356

3357
    @GET
3358
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
3359
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
3360
        try {
3361
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3362
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
3363
            MakeDataCountUtil.MetricType metricType = null;
×
3364
            try {
3365
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
3366
            } catch (IllegalArgumentException ex) {
×
3367
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
3368
            }
×
3369
            String monthYear = null;
×
3370
            if (yyyymm != null) {
×
3371
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
3372
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
3373
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
3374
            }
3375
            if (country != null) {
×
3376
                country = country.toLowerCase();
×
3377
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
3378
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3379
                }
3380
            }
3381
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
3382
            if (datasetMetrics == null) {
×
3383
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3384
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
3385
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3386
            }
3387
            Long viewsTotalRegular = null;
×
3388
            Long viewsUniqueRegular = null;
×
3389
            Long downloadsTotalRegular = null;
×
3390
            Long downloadsUniqueRegular = null;
×
3391
            Long viewsTotalMachine = null;
×
3392
            Long viewsUniqueMachine = null;
×
3393
            Long downloadsTotalMachine = null;
×
3394
            Long downloadsUniqueMachine = null;
×
3395
            Long viewsTotal = null;
×
3396
            Long viewsUnique = null;
×
3397
            Long downloadsTotal = null;
×
3398
            Long downloadsUnique = null;
×
3399
            switch (metricSupplied) {
×
3400
                case "viewsTotal":
3401
                    viewsTotal = datasetMetrics.getViewsTotal();
×
3402
                    break;
×
3403
                case "viewsTotalRegular":
3404
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
3405
                    break;
×
3406
                case "viewsTotalMachine":
3407
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
3408
                    break;
×
3409
                case "viewsUnique":
3410
                    viewsUnique = datasetMetrics.getViewsUnique();
×
3411
                    break;
×
3412
                case "viewsUniqueRegular":
3413
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
3414
                    break;
×
3415
                case "viewsUniqueMachine":
3416
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
3417
                    break;
×
3418
                case "downloadsTotal":
3419
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
3420
                    break;
×
3421
                case "downloadsTotalRegular":
3422
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
3423
                    break;
×
3424
                case "downloadsTotalMachine":
3425
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
3426
                    break;
×
3427
                case "downloadsUnique":
3428
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
3429
                    break;
×
3430
                case "downloadsUniqueRegular":
3431
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
3432
                    break;
×
3433
                case "downloadsUniqueMachine":
3434
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
3435
                    break;
×
3436
                default:
3437
                    break;
3438
            }
3439
            /**
3440
             * TODO: Think more about the JSON output and the API design.
3441
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3442
             * now, by country. We could return multiple metrics (viewsTotal,
3443
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3444
             */
3445
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
3446
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
3447
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3448
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3449
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3450
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3451
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3452
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3453
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3454
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3455
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3456
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3457
            return ok(jsonObjectBuilder);
×
3458
        } catch (WrappedResponse wr) {
×
3459
            return wr.getResponse();
×
3460
        } catch (Exception e) {
×
3461
            //bad date - caught in sanitize call
3462
            return error(BAD_REQUEST, e.getMessage());
×
3463
        }
3464
    }
3465
    
3466
    @GET
3467
    @AuthRequired
3468
    @Path("{identifier}/storageDriver")
3469
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3470
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3471
        
3472
        Dataset dataset; 
3473
        
3474
        try {
3475
            dataset = findDatasetOrDie(dvIdtf);
×
3476
        } catch (WrappedResponse ex) {
×
3477
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3478
        }
×
3479
        
3480
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3481
    }
3482
    
3483
    @PUT
3484
    @AuthRequired
3485
    @Path("{identifier}/storageDriver")
3486
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3487
            String storageDriverLabel,
3488
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3489
        
3490
        // Superuser-only:
3491
        AuthenticatedUser user;
3492
        try {
3493
            user = getRequestAuthenticatedUserOrDie(crc);
×
3494
        } catch (WrappedResponse ex) {
×
3495
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3496
        }
×
3497
        if (!user.isSuperuser()) {
×
3498
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3499
        }
3500

3501
        Dataset dataset;
3502

3503
        try {
3504
            dataset = findDatasetOrDie(dvIdtf);
×
3505
        } catch (WrappedResponse ex) {
×
3506
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3507
        }
×
3508
        
3509
        // We don't want to allow setting this to a store id that does not exist: 
3510
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
3511
            if (store.getKey().equals(storageDriverLabel)) {
×
3512
                dataset.setStorageDriverId(store.getValue());
×
3513
                datasetService.merge(dataset);
×
3514
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3515
            }
3516
        }
×
3517
        return error(Response.Status.BAD_REQUEST,
×
3518
                "No Storage Driver found for : " + storageDriverLabel);
3519
    }
3520
    
3521
    @DELETE
3522
    @AuthRequired
3523
    @Path("{identifier}/storageDriver")
3524
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3525
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3526
        
3527
        // Superuser-only:
3528
        AuthenticatedUser user;
3529
        try {
3530
            user = getRequestAuthenticatedUserOrDie(crc);
×
3531
        } catch (WrappedResponse ex) {
×
3532
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3533
        }
×
3534
        if (!user.isSuperuser()) {
×
3535
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3536
        }
3537

3538
        Dataset dataset;
3539

3540
        try {
3541
            dataset = findDatasetOrDie(dvIdtf);
×
3542
        } catch (WrappedResponse ex) {
×
3543
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3544
        }
×
3545
        
3546
        dataset.setStorageDriverId(null);
×
3547
        datasetService.merge(dataset);
×
3548
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3549
    }
3550

3551
    @GET
3552
    @AuthRequired
3553
    @Path("{identifier}/curationLabelSet")
3554
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3555
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3556

3557
        try {
3558
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3559
            if (!user.isSuperuser()) {
×
3560
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3561
            }
3562
        } catch (WrappedResponse wr) {
×
3563
            return wr.getResponse();
×
3564
        }
×
3565

3566
        Dataset dataset;
3567

3568
        try {
3569
            dataset = findDatasetOrDie(dvIdtf);
×
3570
        } catch (WrappedResponse ex) {
×
3571
            return ex.getResponse();
×
3572
        }
×
3573

3574
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3575
    }
3576

3577
    @PUT
3578
    @AuthRequired
3579
    @Path("{identifier}/curationLabelSet")
3580
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3581
                                        @PathParam("identifier") String dvIdtf,
3582
                                        @QueryParam("name") String curationLabelSet,
3583
                                        @Context UriInfo uriInfo,
3584
                                        @Context HttpHeaders headers) throws WrappedResponse {
3585

3586
        // Superuser-only:
3587
        AuthenticatedUser user;
3588
        try {
3589
            user = getRequestAuthenticatedUserOrDie(crc);
×
3590
        } catch (WrappedResponse ex) {
×
3591
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3592
        }
×
3593
        if (!user.isSuperuser()) {
×
3594
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3595
        }
3596

3597
        Dataset dataset;
3598

3599
        try {
3600
            dataset = findDatasetOrDie(dvIdtf);
×
3601
        } catch (WrappedResponse ex) {
×
3602
            return ex.getResponse();
×
3603
        }
×
3604
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3605
            dataset.setCurationLabelSetName(curationLabelSet);
×
3606
            datasetService.merge(dataset);
×
3607
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3608
        } else {
3609
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3610
                if (setName.equals(curationLabelSet)) {
×
3611
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3612
                    datasetService.merge(dataset);
×
3613
                    return ok("Curation Label Set Name set to: " + setName);
×
3614
                }
3615
            }
×
3616
        }
3617
        return error(Response.Status.BAD_REQUEST,
×
3618
            "No Such Curation Label Set");
3619
    }
3620

3621
    @DELETE
3622
    @AuthRequired
3623
    @Path("{identifier}/curationLabelSet")
3624
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3625
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3626

3627
        // Superuser-only:
3628
        AuthenticatedUser user;
3629
        try {
3630
            user = getRequestAuthenticatedUserOrDie(crc);
×
3631
        } catch (WrappedResponse ex) {
×
3632
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3633
        }
×
3634
        if (!user.isSuperuser()) {
×
3635
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3636
        }
3637

3638
        Dataset dataset;
3639

3640
        try {
3641
            dataset = findDatasetOrDie(dvIdtf);
×
3642
        } catch (WrappedResponse ex) {
×
3643
            return ex.getResponse();
×
3644
        }
×
3645

3646
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3647
        datasetService.merge(dataset);
×
3648
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3649
    }
3650

3651
    @GET
3652
    @AuthRequired
3653
    @Path("{identifier}/allowedCurationLabels")
3654
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3655
                                             @PathParam("identifier") String dvIdtf,
3656
                                             @Context UriInfo uriInfo,
3657
                                             @Context HttpHeaders headers) throws WrappedResponse {
3658
        AuthenticatedUser user = null;
×
3659
        try {
3660
            user = getRequestAuthenticatedUserOrDie(crc);
×
3661
        } catch (WrappedResponse wr) {
×
3662
            return wr.getResponse();
×
3663
        }
×
3664

3665
        Dataset dataset;
3666

3667
        try {
3668
            dataset = findDatasetOrDie(dvIdtf);
×
3669
        } catch (WrappedResponse ex) {
×
3670
            return ex.getResponse();
×
3671
        }
×
3672
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3673
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
3674
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3675
        } else {
3676
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3677
        }
3678
    }
3679

3680
    @GET
3681
    @AuthRequired
3682
    @Path("{identifier}/timestamps")
3683
    @Produces(MediaType.APPLICATION_JSON)
3684
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3685

3686
        Dataset dataset = null;
×
3687
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3688
        try {
3689
            dataset = findDatasetOrDie(id);
×
3690
            User u = getRequestUser(crc);
×
3691
            Set<Permission> perms = new HashSet<Permission>();
×
3692
            perms.add(Permission.ViewUnpublishedDataset);
×
3693
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3694
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3695
            logger.fine("CSD: " + canSeeDraft);
×
3696
            logger.fine("IT: " + dataset.getIndexTime());
×
3697
            logger.fine("MT: " + dataset.getModificationTime());
×
3698
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
3699
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3700
            // Basic info if it's released
3701
            if (dataset.isReleased() || canSeeDraft) {
×
3702
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
3703
                if (dataset.getPublicationDate() != null) {
×
3704
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3705
                }
3706

3707
                if (dataset.getLastExportTime() != null) {
×
3708
                    timestamps.add("lastMetadataExportTime",
×
3709
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3710

3711
                }
3712

3713
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
3714
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
3715
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3716
                }
3717
                // If the modification/permissionmodification time is
3718
                // set and the index time is null or is before the mod time, the relevant index is stale
3719
                timestamps.add("hasStaleIndex",
×
3720
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
3721
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3722
                                : false);
×
3723
                timestamps.add("hasStalePermissionIndex",
×
3724
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3725
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3726
                                : false);
×
3727
            }
3728
            // More detail if you can see a draft
3729
            if (canSeeDraft) {
×
3730
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
3731
                if (dataset.getIndexTime() != null) {
×
3732
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3733
                }
3734
                if (dataset.getPermissionModificationTime() != null) {
×
3735
                    timestamps.add("lastPermissionUpdateTime",
×
3736
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3737
                }
3738
                if (dataset.getPermissionIndexTime() != null) {
×
3739
                    timestamps.add("lastPermissionIndexTime",
×
3740
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3741
                }
3742
                if (dataset.getGlobalIdCreateTime() != null) {
×
3743
                    timestamps.add("globalIdCreateTime", formatter
×
3744
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3745
                }
3746

3747
            }
3748
            return ok(timestamps);
×
3749
        } catch (WrappedResponse wr) {
×
3750
            return wr.getResponse();
×
3751
        }
3752
    }
3753

3754

3755
/****************************
3756
 * Globus Support Section:
3757
 * 
3758
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3759
 * app is launched and makes a callback to the
3760
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3761
 * needed to set up it's display.
3762
 * 
3763
 * At some point after that, the user will make a selection as to which files to
3764
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3765
 * to indicate a transfer is about to start. In addition to providing the
3766
 * details of where to transfer the files to/from, Dataverse also grants the
3767
 * Globus principal involved the relevant rw or r permission for the dataset.
3768
 * 
3769
 * Once the transfer is started, the app records the task id and sends it to
3770
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3771
 * monitors the transfer task and when it ultimately succeeds for fails it
3772
 * revokes the principal's permission and, for the transfer in case, adds the
3773
 * files to the dataset. (The dataset is locked until the transfer completes.)
3774
 * 
3775
 * (If no transfer is started within a specified timeout, permissions will
3776
 * automatically be revoked - see the GlobusServiceBean for details.)
3777
 *
3778
 * The option to reference a file at a remote endpoint (rather than transfer it)
3779
 * follows the first two steps of the process above but completes with a call to
3780
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3781
 * files can be added to the dataset immediately.)
3782
 */
3783

3784
    /**
3785
     * Retrieve the parameters and signed URLs required to perform a globus
3786
     * transfer. This api endpoint is expected to be called as a signed callback
3787
     * after the globus-dataverse app/other app is launched, but it will accept
3788
     * other forms of authentication.
3789
     * 
3790
     * @param crc
3791
     * @param datasetId
3792
     */
3793
    @GET
3794
    @AuthRequired
3795
    @Path("{id}/globusUploadParameters")
3796
    @Produces(MediaType.APPLICATION_JSON)
3797
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3798
            @QueryParam(value = "locale") String locale) {
3799
        // -------------------------------------
3800
        // (1) Get the user from the ContainerRequestContext
3801
        // -------------------------------------
3802
        AuthenticatedUser authUser;
3803
        try {
3804
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3805
        } catch (WrappedResponse e) {
×
3806
            return e.getResponse();
×
3807
        }
×
3808
        // -------------------------------------
3809
        // (2) Get the Dataset Id
3810
        // -------------------------------------
3811
        Dataset dataset;
3812

3813
        try {
3814
            dataset = findDatasetOrDie(datasetId);
×
3815
        } catch (WrappedResponse wr) {
×
3816
            return wr.getResponse();
×
3817
        }
×
3818
        String storeId = dataset.getEffectiveStorageDriverId();
×
3819
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3820
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3821
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
3822
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3823
        }
3824

3825
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3826

3827
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3828
        String transferEndpoint = null;
×
3829
        JsonArray referenceEndpointsWithPaths = null;
×
3830
        if (managed) {
×
3831
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3832
        } else {
3833
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3834
        }
3835

3836
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3837
        queryParams.add("queryParameters",
×
3838
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3839
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3840
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3841
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3842
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3843
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3844
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3845
        substitutedParams.keySet().forEach((key) -> {
×
3846
            params.add(key, substitutedParams.get(key));
×
3847
        });
×
3848
        params.add("managed", Boolean.toString(managed));
×
3849
        if (managed) {
×
3850
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
3851
            if (maxSize != null) {
×
3852
                params.add("fileSizeLimit", maxSize);
×
3853
            }
3854
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
3855
            if (limit != null) {
×
3856
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3857
            }
3858
        }
3859
        if (transferEndpoint != null) {
×
3860
            params.add("endpoint", transferEndpoint);
×
3861
        } else {
3862
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3863
        }
3864
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3865
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3866
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3867
        allowedApiCalls.add(
×
3868
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3869
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3870
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3871
        if(managed) {
×
3872
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3873
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3874
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
3875
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3876
        } else {
3877
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3878
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3879
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
3880
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3881
        }
3882
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3883
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3884
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3885
                .add(URLTokenUtil.TIMEOUT, 5));
×
3886
        allowedApiCalls.add(
×
3887
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3888
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3889
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3890

3891
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3892
    }
3893

3894
    /**
3895
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3896
     * 
3897
     * @param crc
3898
     * @param datasetId
3899
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3900
     * @return
3901
     * @throws IOException
3902
     * @throws ExecutionException
3903
     * @throws InterruptedException
3904
     */
3905
    @POST
3906
    @AuthRequired
3907
    @Path("{id}/requestGlobusUploadPaths")
3908
    @Consumes(MediaType.APPLICATION_JSON)
3909
    @Produces(MediaType.APPLICATION_JSON)
3910
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3911
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3912

3913
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3914

3915
        if (!systemConfig.isGlobusUpload()) {
×
3916
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
NEW
3917
                    BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
3918
        }
3919

3920
        // -------------------------------------
3921
        // (1) Get the user from the ContainerRequestContext
3922
        // -------------------------------------
3923
        AuthenticatedUser authUser;
3924
        try {
3925
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3926
        } catch (WrappedResponse e) {
×
3927
            return e.getResponse();
×
3928
        }
×
3929

3930
        // -------------------------------------
3931
        // (2) Get the Dataset Id
3932
        // -------------------------------------
3933
        Dataset dataset;
3934

3935
        try {
3936
            dataset = findDatasetOrDie(datasetId);
×
3937
        } catch (WrappedResponse wr) {
×
3938
            return wr.getResponse();
×
3939
        }
×
3940
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
3941
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
3942

3943
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
3944
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3945
                try {
3946
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
3947
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
3948
                        return badRequest("No referencedFiles specified");
×
3949
                    }
3950
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
3951
                    return (ok(fileMap));
×
3952
                } catch (Exception e) {
×
3953
                    return badRequest(e.getLocalizedMessage());
×
3954
                }
3955
            } else {
3956
                try {
3957
                    String principal = params.getString("principal");
×
3958
                    int numberOfPaths = params.getInt("numberOfFiles");
×
3959
                    if (numberOfPaths <= 0) {
×
3960
                        return badRequest("numberOfFiles must be positive");
×
3961
                    }
3962

3963
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
3964
                    switch (response.getInt("status")) {
×
3965
                    case 201:
3966
                        return ok(response.getJsonObject("paths"));
×
3967
                    case 400:
3968
                        return badRequest("Unable to grant permission");
×
3969
                    case 409:
3970
                        return conflict("Permission already exists");
×
3971
                    default:
3972
                        return error(null, "Unexpected error when granting permission");
×
3973
                    }
3974

3975
                } catch (NullPointerException | ClassCastException e) {
×
3976
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
3977

3978
                }
3979
            }
3980
        } else {
3981
            return forbidden("User doesn't have permission to upload to this dataset");
×
3982
        }
3983

3984
    }
3985

3986
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
3987
     * 
3988
     * @param crc
3989
     * @param datasetId
3990
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
3991
     * @param uriInfo
3992
     * @return
3993
     * @throws IOException
3994
     * @throws ExecutionException
3995
     * @throws InterruptedException
3996
     */
3997
    @POST
3998
    @AuthRequired
3999
    @Path("{id}/addGlobusFiles")
4000
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4001
    @Produces("application/json")
4002
    @Operation(summary = "Uploads a Globus file for a dataset", 
4003
               description = "Uploads a Globus file for a dataset")
4004
    @APIResponse(responseCode = "200",
4005
               description = "Globus file uploaded successfully to dataset")
4006
    @Tag(name = "addGlobusFilesToDataset", 
4007
         description = "Uploads a Globus file for a dataset")
4008
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4009
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
4010
                                            @PathParam("id") String datasetId,
4011
                                            @FormDataParam("jsonData") String jsonData,
4012
                                            @Context UriInfo uriInfo
4013
    ) throws IOException, ExecutionException, InterruptedException {
4014

4015
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
4016

4017
        // -------------------------------------
4018
        // (1) Get the user from the API key
4019
        // -------------------------------------
4020
        AuthenticatedUser authUser;
4021
        try {
4022
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4023
        } catch (WrappedResponse ex) {
×
4024
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
4025
            );
4026
        }
×
4027

4028
        // -------------------------------------
4029
        // (2) Get the Dataset Id
4030
        // -------------------------------------
4031
        Dataset dataset;
4032

4033
        try {
4034
            dataset = findDatasetOrDie(datasetId);
×
4035
        } catch (WrappedResponse wr) {
×
4036
            return wr.getResponse();
×
4037
        }
×
4038
        
4039
        // Is Globus upload service available? 
4040
        
4041
        // ... on this Dataverse instance?
NEW
4042
        if (!systemConfig.isGlobusUpload()) {
×
NEW
4043
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
4044
        }
4045

4046
        // ... and on this specific Dataset? 
NEW
4047
        String storeId = dataset.getEffectiveStorageDriverId();
×
4048
        // acceptsGlobusTransfers should only be true for an S3 or globus store
NEW
4049
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
NEW
4050
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
NEW
4051
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
4052
        }
4053
        
4054
        // Check if the dataset is already locked
4055
        // We are reusing the code and logic used by various command to determine 
4056
        // if there are any locks on the dataset that would prevent the current 
4057
        // users from modifying it:
4058
        try {
NEW
4059
            DataverseRequest dataverseRequest = createDataverseRequest(authUser);
×
NEW
4060
            permissionService.checkEditDatasetLock(dataset, dataverseRequest, null); 
×
NEW
4061
        } catch (IllegalCommandException icex) {
×
NEW
4062
            return error(Response.Status.FORBIDDEN, "Dataset " + datasetId + " is locked: " + icex.getLocalizedMessage());
×
NEW
4063
        }
×
4064
        
UNCOV
4065
        JsonObject jsonObject = null;
×
4066
        try {
4067
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
4068
        } catch (Exception ex) {
×
4069
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
4070
            return badRequest("Error parsing json body");
×
4071

4072
        }
×
4073

4074
        //------------------------------------
4075
        // (2b) Make sure dataset does not have package file
4076
        // --------------------------------------
4077

4078
        for (DatasetVersion dv : dataset.getVersions()) {
×
4079
            if (dv.isHasPackageFile()) {
×
4080
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4081
                );
4082
            }
4083
        }
×
4084

4085

4086
        String lockInfoMessage = "Globus Upload API started ";
×
4087
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
4088
                (authUser).getId(), lockInfoMessage);
×
4089
        if (lock != null) {
×
4090
            dataset.addLock(lock);
×
4091
        } else {
4092
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
4093
        }
4094

UNCOV
4095
        if(uriInfo != null) {
×
4096
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
4097
        }
4098

UNCOV
4099
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
4100
        
4101
        // Async Call
4102
        try {
NEW
4103
            globusService.globusUpload(jsonObject, dataset, requestUrl, authUser);
×
NEW
4104
        } catch (IllegalArgumentException ex) {
×
NEW
4105
            return badRequest("Invalid parameters: "+ex.getMessage());
×
NEW
4106
        }
×
4107

4108
        return ok("Async call to Globus Upload started ");
×
4109

4110
    }
4111
    
4112
/**
4113
 * Retrieve the parameters and signed URLs required to perform a globus
4114
 * transfer/download. This api endpoint is expected to be called as a signed
4115
 * callback after the globus-dataverse app/other app is launched, but it will
4116
 * accept other forms of authentication.
4117
 * 
4118
 * @param crc
4119
 * @param datasetId
4120
 * @param locale
4121
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
4122
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
4123
 */
4124
    @GET
4125
    @AuthRequired
4126
    @Path("{id}/globusDownloadParameters")
4127
    @Produces(MediaType.APPLICATION_JSON)
4128
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4129
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
4130
        // -------------------------------------
4131
        // (1) Get the user from the ContainerRequestContext
4132
        // -------------------------------------
4133
        AuthenticatedUser authUser = null;
×
4134
        try {
4135
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4136
        } catch (WrappedResponse e) {
×
4137
            logger.fine("guest user globus download");
×
4138
        }
×
4139
        // -------------------------------------
4140
        // (2) Get the Dataset Id
4141
        // -------------------------------------
4142
        Dataset dataset;
4143

4144
        try {
4145
            dataset = findDatasetOrDie(datasetId);
×
4146
        } catch (WrappedResponse wr) {
×
4147
            return wr.getResponse();
×
4148
        }
×
4149
        String storeId = dataset.getEffectiveStorageDriverId();
×
4150
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4151
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4152
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
4153
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4154
        }
4155

4156
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
4157
        if (files == null) {
×
4158
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4159
        }
4160

4161
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
4162

4163
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
4164
        String transferEndpoint = null;
×
4165

4166
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
4167
        queryParams.add("queryParameters",
×
4168
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
4169
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
4170
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
4171
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
4172
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
4173
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
4174
        JsonObjectBuilder params = Json.createObjectBuilder();
×
4175
        substitutedParams.keySet().forEach((key) -> {
×
4176
            params.add(key, substitutedParams.get(key));
×
4177
        });
×
4178
        params.add("managed", Boolean.toString(managed));
×
4179
        if (managed) {
×
4180
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
4181
            params.add("endpoint", transferEndpoint);
×
4182
        }
4183
        params.add("files", files);
×
4184
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
4185
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
4186
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
4187
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4188
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
4189
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4190
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
4191
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4192
                .add(URLTokenUtil.URL_TEMPLATE,
×
4193
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
4194
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4195
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
4196
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
4197
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
4198
                .add(URLTokenUtil.TIMEOUT, 5));
×
4199
        allowedApiCalls.add(
×
4200
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
4201
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
4202
                        .add(URLTokenUtil.TIMEOUT, 5));
×
4203

4204
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
4205
    }
4206

4207
    /**
4208
     * Requests permissions for a given globus user to download the specified files
4209
     * the dataset and returns information about the paths to transfer from.
4210
     * 
4211
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
4212
     * 
4213
     * @param crc
4214
     * @param datasetId
4215
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
4216
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
4217
     * @throws IOException
4218
     * @throws ExecutionException
4219
     * @throws InterruptedException
4220
     */
4221
    @POST
4222
    @AuthRequired
4223
    @Path("{id}/requestGlobusDownload")
4224
    @Consumes(MediaType.APPLICATION_JSON)
4225
    @Produces(MediaType.APPLICATION_JSON)
4226
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4227
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
4228
            throws IOException, ExecutionException, InterruptedException {
4229

4230
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
4231

4232
        if (!systemConfig.isGlobusDownload()) {
×
4233
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4234
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4235
        }
4236

4237
        // -------------------------------------
4238
        // (1) Get the user from the ContainerRequestContext
4239
        // -------------------------------------
4240
        User user = getRequestUser(crc);
×
4241

4242
        // -------------------------------------
4243
        // (2) Get the Dataset Id
4244
        // -------------------------------------
4245
        Dataset dataset;
4246

4247
        try {
4248
            dataset = findDatasetOrDie(datasetId);
×
4249
        } catch (WrappedResponse wr) {
×
4250
            return wr.getResponse();
×
4251
        }
×
4252
        JsonObject body = null;
×
4253
        if (jsonBody != null) {
×
4254
            body = JsonUtil.getJsonObject(jsonBody);
×
4255
        }
4256
        Set<String> fileIds = null;
×
4257
        if (downloadId != null) {
×
4258
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
4259
            if (files != null) {
×
4260
                fileIds = files.keySet();
×
4261
            }
4262
        } else {
×
4263
            if ((body!=null) && body.containsKey("fileIds")) {
×
4264
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
4265
                fileIds = new HashSet<String>(fileVals.size());
×
4266
                for (JsonValue fileVal : fileVals) {
×
4267
                    String id = null;
×
4268
                    switch (fileVal.getValueType()) {
×
4269
                    case STRING:
4270
                        id = ((JsonString) fileVal).getString();
×
4271
                        break;
×
4272
                    case NUMBER:
4273
                        id = ((JsonNumber) fileVal).toString();
×
4274
                        break;
×
4275
                    default:
4276
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
4277
                    }
4278
                    ;
4279
                    fileIds.add(id);
×
4280
                }
×
4281
            } else {
×
4282
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
4283
            }
4284
        }
4285

4286
        if (fileIds.isEmpty()) {
×
4287
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4288
        }
4289
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
4290
        for (String id : fileIds) {
×
4291
            boolean published = false;
×
4292
            logger.info("File id: " + id);
×
4293

4294
            DataFile df = null;
×
4295
            try {
4296
                df = findDataFileOrDie(id);
×
4297
            } catch (WrappedResponse wr) {
×
4298
                return wr.getResponse();
×
4299
            }
×
4300
            if (!df.getOwner().equals(dataset)) {
×
4301
                return badRequest("All files must be in the dataset");
×
4302
            }
4303
            dataFiles.add(df);
×
4304

4305
            for (FileMetadata fm : df.getFileMetadatas()) {
×
4306
                if (fm.getDatasetVersion().isPublished()) {
×
4307
                    published = true;
×
4308
                    break;
×
4309
                }
4310
            }
×
4311

4312
            if (!published) {
×
4313
                // If the file is not published, they can still download the file, if the user
4314
                // has the permission to view unpublished versions:
4315

4316
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
4317
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
4318
                    return forbidden("User doesn't have permission to download file: " + id);
×
4319
                }
4320
            } else { // published and restricted and/or embargoed
4321
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
4322
                    // This line also handles all three authenticated session user, token user, and
4323
                    // guest cases.
4324
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
4325
                        return forbidden("User doesn't have permission to download file: " + id);
×
4326
                    }
4327

4328
            }
4329
        }
×
4330
        // Allowed to download all requested files
4331
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
4332
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4333
            // If managed, give the principal read permissions
4334
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
4335
            switch (status) {
×
4336
            case 201:
4337
                return ok(files);
×
4338
            case 400:
4339
                return badRequest("Unable to grant permission");
×
4340
            case 409:
4341
                return conflict("Permission already exists");
×
4342
            default:
4343
                return error(null, "Unexpected error when granting permission");
×
4344
            }
4345

4346
        }
4347

4348
        return ok(files);
×
4349
    }
4350

4351
    /**
4352
     * Monitors a globus download and removes permissions on the dir/dataset when
4353
     * the specified transfer task is completed.
4354
     * 
4355
     * @param crc
4356
     * @param datasetId
4357
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
4358
     *                  Globus task to monitor.
4359
     * @return
4360
     * @throws IOException
4361
     * @throws ExecutionException
4362
     * @throws InterruptedException
4363
     */
4364
    @POST
4365
    @AuthRequired
4366
    @Path("{id}/monitorGlobusDownload")
4367
    @Consumes(MediaType.APPLICATION_JSON)
4368
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4369
            String jsonData) throws IOException, ExecutionException, InterruptedException {
4370

4371
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
4372

4373
        if (!systemConfig.isGlobusDownload()) {
×
4374
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4375
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4376
        }
4377

4378
        // -------------------------------------
4379
        // (1) Get the user from the ContainerRequestContext
4380
        // -------------------------------------
4381
        User authUser;
4382
        authUser = getRequestUser(crc);
×
4383

4384
        // -------------------------------------
4385
        // (2) Get the Dataset Id
4386
        // -------------------------------------
4387
        Dataset dataset;
4388

4389
        try {
4390
            dataset = findDatasetOrDie(datasetId);
×
4391
        } catch (WrappedResponse wr) {
×
4392
            return wr.getResponse();
×
4393
        }
×
4394

4395
        // Async Call
4396
        globusService.globusDownload(jsonData, dataset, authUser);
×
4397

4398
        return ok("Async call to Globus Download started");
×
4399

4400
    }
4401

4402
    /**
4403
     * Add multiple Files to an existing Dataset
4404
     *
4405
     * @param idSupplied
4406
     * @param jsonData
4407
     * @return
4408
     */
4409
    @POST
4410
    @AuthRequired
4411
    @Path("{id}/addFiles")
4412
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4413
    @Produces("application/json")
4414
    @Operation(summary = "Uploads a set of files to a dataset", 
4415
               description = "Uploads a set of files to a dataset")
4416
    @APIResponse(responseCode = "200",
4417
               description = "Files uploaded successfully to dataset")
4418
    @Tag(name = "addFilesToDataset", 
4419
         description = "Uploads a set of files to a dataset")
4420
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4421
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4422
            @FormDataParam("jsonData") String jsonData) {
4423

4424
        if (!systemConfig.isHTTPUpload()) {
×
4425
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4426
        }
4427

4428
        // -------------------------------------
4429
        // (1) Get the user from the ContainerRequestContext
4430
        // -------------------------------------
4431
        User authUser;
4432
        authUser = getRequestUser(crc);
×
4433

4434
        // -------------------------------------
4435
        // (2) Get the Dataset Id
4436
        // -------------------------------------
4437
        Dataset dataset;
4438

4439
        try {
4440
            dataset = findDatasetOrDie(idSupplied);
×
4441
        } catch (WrappedResponse wr) {
×
4442
            return wr.getResponse();
×
4443
        }
×
4444

4445
        dataset.getLocks().forEach(dl -> {
×
4446
            logger.info(dl.toString());
×
4447
        });
×
4448

4449
        //------------------------------------
4450
        // (2a) Make sure dataset does not have package file
4451
        // --------------------------------------
4452

4453
        for (DatasetVersion dv : dataset.getVersions()) {
×
4454
            if (dv.isHasPackageFile()) {
×
4455
                return error(Response.Status.FORBIDDEN,
×
4456
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4457
                );
4458
            }
4459
        }
×
4460

4461
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4462

4463
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4464
                dvRequest,
4465
                this.ingestService,
4466
                this.datasetService,
4467
                this.fileService,
4468
                this.permissionSvc,
4469
                this.commandEngine,
4470
                this.systemConfig
4471
        );
4472

4473
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4474

4475
    }
4476

4477
    /**
4478
     * Replace multiple Files to an existing Dataset
4479
     *
4480
     * @param idSupplied
4481
     * @param jsonData
4482
     * @return
4483
     */
4484
    @POST
4485
    @AuthRequired
4486
    @Path("{id}/replaceFiles")
4487
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4488
    @Produces("application/json")
4489
    @Operation(summary = "Replace a set of files to a dataset", 
4490
               description = "Replace a set of files to a dataset")
4491
    @APIResponse(responseCode = "200",
4492
               description = "Files replaced successfully to dataset")
4493
    @Tag(name = "replaceFilesInDataset", 
4494
         description = "Replace a set of files to a dataset")
4495
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) 
4496
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4497
                                          @PathParam("id") String idSupplied,
4498
                                          @FormDataParam("jsonData") String jsonData) {
4499

4500
        if (!systemConfig.isHTTPUpload()) {
×
4501
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4502
        }
4503

4504
        // -------------------------------------
4505
        // (1) Get the user from the ContainerRequestContext
4506
        // -------------------------------------
4507
        User authUser;
4508
        authUser = getRequestUser(crc);
×
4509

4510
        // -------------------------------------
4511
        // (2) Get the Dataset Id
4512
        // -------------------------------------
4513
        Dataset dataset;
4514

4515
        try {
4516
            dataset = findDatasetOrDie(idSupplied);
×
4517
        } catch (WrappedResponse wr) {
×
4518
            return wr.getResponse();
×
4519
        }
×
4520

4521
        dataset.getLocks().forEach(dl -> {
×
4522
            logger.info(dl.toString());
×
4523
        });
×
4524

4525
        //------------------------------------
4526
        // (2a) Make sure dataset does not have package file
4527
        // --------------------------------------
4528

4529
        for (DatasetVersion dv : dataset.getVersions()) {
×
4530
            if (dv.isHasPackageFile()) {
×
4531
                return error(Response.Status.FORBIDDEN,
×
4532
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4533
                );
4534
            }
4535
        }
×
4536

4537
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4538

4539
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4540
                dvRequest,
4541
                this.ingestService,
4542
                this.datasetService,
4543
                this.fileService,
4544
                this.permissionSvc,
4545
                this.commandEngine,
4546
                this.systemConfig
4547
        );
4548

4549
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4550

4551
    }
4552

4553
    /**
4554
     * API to find curation assignments and statuses
4555
     *
4556
     * @return
4557
     * @throws WrappedResponse
4558
     */
4559
    @GET
4560
    @AuthRequired
4561
    @Path("/listCurationStates")
4562
    @Produces("text/csv")
4563
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4564

4565
        try {
4566
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
4567
            if (!user.isSuperuser()) {
×
4568
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4569
            }
4570
        } catch (WrappedResponse wr) {
×
4571
            return wr.getResponse();
×
4572
        }
×
4573

4574
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4575
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
4576
        allRoles.forEach(r -> {
×
4577
            if (r.permissions().contains(Permission.PublishDataset))
×
4578
                curationRoles.add(r);
×
4579
        });
×
4580
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4581
        curationRoles.forEach(r -> {
×
4582
            assignees.put(r.getAlias(), null);
×
4583
        });
×
4584

4585
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4586
                BundleUtil.getStringFromBundle("dataset"),
×
4587
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4588
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4589
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4590
                String.join(",", assignees.keySet())));
×
4591
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4592
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4593
            curationRoles.forEach(r -> {
×
4594
                assignees.put(r.getAlias(), new HashSet<String>());
×
4595
            });
×
4596
            for (RoleAssignment ra : ras) {
×
4597
                if (curationRoles.contains(ra.getRole())) {
×
4598
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4599
                }
4600
            }
×
4601
            DatasetVersion dsv = dataset.getLatestVersion();
×
4602
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4603
            String status = dsv.getExternalStatusLabel();
×
4604
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4605
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4606
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4607
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4608
            List<String> sList = new ArrayList<String>();
×
4609
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4610
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4611
        }
×
4612
        csvSB.append("\n");
×
4613
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4614
    }
4615

4616
    // APIs to manage archival status
4617

4618
    @GET
4619
    @AuthRequired
4620
    @Produces(MediaType.APPLICATION_JSON)
4621
    @Path("/{id}/{version}/archivalStatus")
4622
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4623
                                                    @PathParam("id") String datasetId,
4624
                                                    @PathParam("version") String versionNumber,
4625
                                                    @Context UriInfo uriInfo,
4626
                                                    @Context HttpHeaders headers) {
4627

4628
        try {
4629
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4630
            if (!au.isSuperuser()) {
×
4631
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4632
            }
4633
            DataverseRequest req = createDataverseRequest(au);
×
4634
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4635
                    headers);
4636

4637
            if (dsv.getArchivalCopyLocation() == null) {
×
4638
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4639
            } else {
4640
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
4641
                return ok(status);
×
4642
            }
4643
        } catch (WrappedResponse wr) {
×
4644
            return wr.getResponse();
×
4645
        }
4646
    }
4647

4648
    @PUT
4649
    @AuthRequired
4650
    @Consumes(MediaType.APPLICATION_JSON)
4651
    @Path("/{id}/{version}/archivalStatus")
4652
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4653
                                                    @PathParam("id") String datasetId,
4654
                                                    @PathParam("version") String versionNumber,
4655
                                                    String newStatus,
4656
                                                    @Context UriInfo uriInfo,
4657
                                                    @Context HttpHeaders headers) {
4658

4659
        logger.fine(newStatus);
×
4660
        try {
4661
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4662

4663
            if (!au.isSuperuser()) {
×
4664
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4665
            }
4666
            
4667
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
4668
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4669
            
4670
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4671
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
4672
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
4673
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4674

4675
                    DataverseRequest req = createDataverseRequest(au);
×
4676
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4677
                            uriInfo, headers);
4678

4679
                    if (dsv == null) {
×
4680
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4681
                    }
4682
                    if (isSingleVersionArchiving()) {
×
4683
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
4684
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
4685
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4686
                            }
4687
                        }
×
4688
                    }
4689

4690
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
4691
                    dsv = datasetversionService.merge(dsv);
×
4692
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
4693
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4694

4695
                    return ok("Status updated");
×
4696
                }
4697
            }
4698
        } catch (WrappedResponse wr) {
×
4699
            return wr.getResponse();
×
4700
        } catch (JsonException| IllegalStateException ex) {
×
4701
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4702
        }
×
4703
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4704
    }
4705
    
4706
    @DELETE
4707
    @AuthRequired
4708
    @Produces(MediaType.APPLICATION_JSON)
4709
    @Path("/{id}/{version}/archivalStatus")
4710
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4711
                                                       @PathParam("id") String datasetId,
4712
                                                       @PathParam("version") String versionNumber,
4713
                                                       @Context UriInfo uriInfo,
4714
                                                       @Context HttpHeaders headers) {
4715

4716
        try {
4717
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4718
            if (!au.isSuperuser()) {
×
4719
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4720
            }
4721

4722
            DataverseRequest req = createDataverseRequest(au);
×
4723
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4724
                    headers);
4725
            if (dsv == null) {
×
4726
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4727
            }
4728
            dsv.setArchivalCopyLocation(null);
×
4729
            dsv = datasetversionService.merge(dsv);
×
4730

4731
            return ok("Status deleted");
×
4732

4733
        } catch (WrappedResponse wr) {
×
4734
            return wr.getResponse();
×
4735
        }
4736
    }
4737
    
4738
    private boolean isSingleVersionArchiving() {
4739
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
4740
        if (className != null) {
×
4741
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4742
            try {
4743
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
4744
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
4745
            } catch (ClassNotFoundException e) {
×
4746
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4747
            } catch (ClassCastException cce) {
×
4748
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
4749
            }
×
4750
        }
4751
        return false;
×
4752
    }
4753
    
4754
    // This method provides a callback for an external tool to retrieve it's
4755
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4756
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4757
    // will be plain/unsigned.
4758
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4759
    // for a draft dataset), or public case.
4760
    @GET
4761
    @AuthRequired
4762
    @Path("{id}/versions/{version}/toolparams/{tid}")
4763
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4764
                                            @PathParam("tid") long externalToolId,
4765
                                            @PathParam("id") String datasetId,
4766
                                            @PathParam("version") String version,
4767
                                            @QueryParam(value = "locale") String locale) {
4768
        try {
4769
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
4770
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
4771
            if (target == null) {
×
4772
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4773
            }
4774
            
4775
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
4776
            if(externalTool==null) {
×
4777
                return error(BAD_REQUEST, "External tool not found.");
×
4778
            }
4779
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
4780
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4781
            }
4782
            ApiToken apiToken = null;
×
4783
            User u = getRequestUser(crc);
×
4784
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4785

4786
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4787
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
4788
        } catch (WrappedResponse wr) {
×
4789
            return wr.getResponse();
×
4790
        }
4791
    }
4792

4793
    @GET
4794
    @Path("summaryFieldNames")
4795
    public Response getDatasetSummaryFieldNames() {
4796
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
4797
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
4798
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4799
        for (String fieldName : fieldNames) {
×
4800
            fieldNamesArrayBuilder.add(fieldName);
×
4801
        }
4802
        return ok(fieldNamesArrayBuilder);
×
4803
    }
4804

4805
    @GET
4806
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4807
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4808
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4809
        if (privateUrlUser == null) {
×
4810
            return notFound("Private URL user not found");
×
4811
        }
4812
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4813
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4814
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4815
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4816
        }
4817
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4818
        if (dsv == null || dsv.getId() == null) {
×
4819
            return notFound("Dataset version not found");
×
4820
        }
4821
        JsonObjectBuilder responseJson;
4822
        if (isAnonymizedAccess) {
×
4823
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
4824
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4825
        } else {
×
4826
            responseJson = json(dsv, null, true, returnOwners);
×
4827
        }
4828
        return ok(responseJson);
×
4829
    }
4830

4831
    @GET
4832
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4833
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
4834
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4835
        if (privateUrlUser == null) {
×
4836
            return notFound("Private URL user not found");
×
4837
        }
4838
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4839
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
4840
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4841
    }
4842

4843
    @GET
4844
    @AuthRequired
4845
    @Path("{id}/versions/{versionId}/citation")
4846
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4847
                                              @PathParam("id") String datasetId,
4848
                                              @PathParam("versionId") String versionId,
4849
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4850
                                              @Context UriInfo uriInfo,
4851
                                              @Context HttpHeaders headers) {
4852
        boolean checkFilePerms = false;
×
4853
        return response(req -> ok(
×
4854
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers,
×
4855
                        includeDeaccessioned, checkFilePerms).getCitation(true, false)),
×
4856
                getRequestUser(crc));
×
4857
    }
4858

4859
    @POST
4860
    @AuthRequired
4861
    @Path("{id}/versions/{versionId}/deaccession")
4862
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
4863
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
4864
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4865
        }
4866
        return response(req -> {
×
4867
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4868
            try {
4869
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4870
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
4871
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
4872
                if (deaccessionForwardURL != null) {
×
4873
                    try {
4874
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4875
                    } catch (IllegalArgumentException iae) {
×
4876
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
4877
                    }
×
4878
                }
4879
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4880
                
4881
                return ok("Dataset " + 
×
4882
                        (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + 
×
4883
                        " deaccessioned for version " + versionId);
4884
            } catch (JsonParsingException jpe) {
×
4885
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4886
            }
4887
        }, getRequestUser(crc));
×
4888
    }
4889

4890
    @GET
4891
    @AuthRequired
4892
    @Path("{identifier}/guestbookEntryAtRequest")
4893
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4894
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4895

4896
        Dataset dataset;
4897

4898
        try {
4899
            dataset = findDatasetOrDie(dvIdtf);
×
4900
        } catch (WrappedResponse ex) {
×
4901
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4902
        }
×
4903
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
4904
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
4905
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4906
        }
4907
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
4908
    }
4909

4910
    @PUT
4911
    @AuthRequired
4912
    @Path("{identifier}/guestbookEntryAtRequest")
4913
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4914
                                               boolean gbAtRequest,
4915
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4916

4917
        // Superuser-only:
4918
        AuthenticatedUser user;
4919
        try {
4920
            user = getRequestAuthenticatedUserOrDie(crc);
×
4921
        } catch (WrappedResponse ex) {
×
4922
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4923
        }
×
4924
        if (!user.isSuperuser()) {
×
4925
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4926
        }
4927

4928
        Dataset dataset;
4929

4930
        try {
4931
            dataset = findDatasetOrDie(dvIdtf);
×
4932
        } catch (WrappedResponse ex) {
×
4933
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4934
        }
×
4935
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
4936
        if (!gbAtRequestOpt.isPresent()) {
×
4937
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
4938
        }
4939
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
4940
        dataset.setGuestbookEntryAtRequest(choice);
×
4941
        datasetService.merge(dataset);
×
4942
        return ok("Guestbook Entry At Request set to: " + choice);
×
4943
    }
4944

4945
    @DELETE
4946
    @AuthRequired
4947
    @Path("{identifier}/guestbookEntryAtRequest")
4948
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4949
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4950

4951
        // Superuser-only:
4952
        AuthenticatedUser user;
4953
        try {
4954
            user = getRequestAuthenticatedUserOrDie(crc);
×
4955
        } catch (WrappedResponse ex) {
×
4956
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4957
        }
×
4958
        if (!user.isSuperuser()) {
×
4959
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4960
        }
4961

4962
        Dataset dataset;
4963

4964
        try {
4965
            dataset = findDatasetOrDie(dvIdtf);
×
4966
        } catch (WrappedResponse ex) {
×
4967
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4968
        }
×
4969

4970
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
4971
        datasetService.merge(dataset);
×
4972
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4973
    }
4974

4975
    @GET
4976
    @AuthRequired
4977
    @Path("{id}/userPermissions")
4978
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
4979
        Dataset dataset;
4980
        try {
4981
            dataset = findDatasetOrDie(datasetId);
×
4982
        } catch (WrappedResponse wr) {
×
4983
            return wr.getResponse();
×
4984
        }
×
4985
        User requestUser = getRequestUser(crc);
×
4986
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
4987
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
4988
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
4989
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
4990
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
4991
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
4992
        return ok(jsonObjectBuilder);
×
4993
    }
4994

4995
    @GET
4996
    @AuthRequired
4997
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
4998
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
4999
                                                 @PathParam("id") String datasetId,
5000
                                                 @PathParam("versionId") String versionId,
5001
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
5002
                                                 @Context UriInfo uriInfo,
5003
                                                 @Context HttpHeaders headers) {
5004
        return response(req -> {
×
5005
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
5006
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
5007
        }, getRequestUser(crc));
×
5008
    }
5009
    
5010
    /**
5011
     * Get the PidProvider that will be used for generating new DOIs in this dataset
5012
     *
5013
     * @return - the id of the effective PID generator for the given dataset
5014
     * @throws WrappedResponse
5015
     */
5016
    @GET
5017
    @AuthRequired
5018
    @Path("{identifier}/pidGenerator")
5019
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5020
            @Context HttpHeaders headers) throws WrappedResponse {
5021

5022
        Dataset dataset;
5023

5024
        try {
5025
            dataset = findDatasetOrDie(dvIdtf);
×
5026
        } catch (WrappedResponse ex) {
×
5027
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5028
        }
×
5029
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
5030
        if(pidProvider == null) {
×
5031
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
5032
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
5033
        }
5034
        String pidGeneratorId = pidProvider.getId();
×
5035
        return ok(pidGeneratorId);
×
5036
    }
5037

5038
    @PUT
5039
    @AuthRequired
5040
    @Path("{identifier}/pidGenerator")
5041
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
5042
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
5043

5044
        // Superuser-only:
5045
        AuthenticatedUser user;
5046
        try {
5047
            user = getRequestAuthenticatedUserOrDie(crc);
×
5048
        } catch (WrappedResponse ex) {
×
5049
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
5050
        }
×
5051
        if (!user.isSuperuser()) {
×
5052
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5053
        }
5054

5055
        Dataset dataset;
5056

5057
        try {
5058
            dataset = findDatasetOrDie(datasetId);
×
5059
        } catch (WrappedResponse ex) {
×
5060
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5061
        }
×
5062
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
5063
            dataset.setPidGeneratorId(generatorId);
×
5064
            datasetService.merge(dataset);
×
5065
            return ok("PID Generator set to: " + generatorId);
×
5066
        } else {
5067
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
5068
        }
5069

5070
    }
5071

5072
    @DELETE
5073
    @AuthRequired
5074
    @Path("{identifier}/pidGenerator")
5075
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5076
            @Context HttpHeaders headers) throws WrappedResponse {
5077

5078
        // Superuser-only:
5079
        AuthenticatedUser user;
5080
        try {
5081
            user = getRequestAuthenticatedUserOrDie(crc);
×
5082
        } catch (WrappedResponse ex) {
×
5083
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5084
        }
×
5085
        if (!user.isSuperuser()) {
×
5086
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5087
        }
5088

5089
        Dataset dataset;
5090

5091
        try {
5092
            dataset = findDatasetOrDie(dvIdtf);
×
5093
        } catch (WrappedResponse ex) {
×
5094
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5095
        }
×
5096

5097
        dataset.setPidGenerator(null);
×
5098
        datasetService.merge(dataset);
×
5099
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
5100
    }
5101

5102
    @GET
5103
    @Path("datasetTypes")
5104
    public Response getDatasetTypes() {
5105
        JsonArrayBuilder jab = Json.createArrayBuilder();
×
5106
        List<DatasetType> datasetTypes = datasetTypeSvc.listAll();
×
5107
        for (DatasetType datasetType : datasetTypes) {
×
5108
            JsonObjectBuilder job = Json.createObjectBuilder();
×
5109
            job.add("id", datasetType.getId());
×
5110
            job.add("name", datasetType.getName());
×
5111
            jab.add(job);
×
5112
        }
×
5113
        return ok(jab.build());
×
5114
    }
5115

5116
    @GET
5117
    @Path("datasetTypes/{idOrName}")
5118
    public Response getDatasetTypes(@PathParam("idOrName") String idOrName) {
5119
        DatasetType datasetType = null;
×
5120
        if (StringUtils.isNumeric(idOrName)) {
×
5121
            try {
5122
                long id = Long.parseLong(idOrName);
×
5123
                datasetType = datasetTypeSvc.getById(id);
×
5124
            } catch (NumberFormatException ex) {
×
5125
                return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
×
5126
            }
×
5127
        } else {
5128
            datasetType = datasetTypeSvc.getByName(idOrName);
×
5129
        }
5130
        if (datasetType != null) {
×
5131
            return ok(datasetType.toJson());
×
5132
        } else {
5133
            return error(NOT_FOUND, "Could not find a dataset type with name " + idOrName);
×
5134
        }
5135
    }
5136

5137
    @POST
5138
    @AuthRequired
5139
    @Path("datasetTypes")
5140
    public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) {
5141
        AuthenticatedUser user;
5142
        try {
5143
            user = getRequestAuthenticatedUserOrDie(crc);
×
5144
        } catch (WrappedResponse ex) {
×
5145
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5146
        }
×
5147
        if (!user.isSuperuser()) {
×
5148
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5149
        }
5150

5151
        if (jsonIn == null || jsonIn.isEmpty()) {
×
5152
            return error(BAD_REQUEST, "JSON input was null or empty!");
×
5153
        }
5154

5155
        String nameIn = null;
×
5156
        try {
5157
            JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn);
×
5158
            nameIn = jsonObject.getString("name", null);
×
5159
        } catch (JsonParsingException ex) {
×
5160
            return error(BAD_REQUEST, "Problem parsing supplied JSON: " + ex.getLocalizedMessage());
×
5161
        }
×
5162
        if (nameIn == null) {
×
5163
            return error(BAD_REQUEST, "A name for the dataset type is required");
×
5164
        }
5165
        if (StringUtils.isNumeric(nameIn)) {
×
5166
            // getDatasetTypes supports id or name so we don't want a names that looks like an id
5167
            return error(BAD_REQUEST, "The name of the type cannot be only digits.");
×
5168
        }
5169

5170
        try {
5171
            DatasetType datasetType = new DatasetType();
×
5172
            datasetType.setName(nameIn);
×
5173
            DatasetType saved = datasetTypeSvc.save(datasetType);
×
5174
            Long typeId = saved.getId();
×
5175
            String name = saved.getName();
×
5176
            return ok(saved.toJson());
×
5177
        } catch (WrappedResponse ex) {
×
5178
            return error(BAD_REQUEST, ex.getMessage());
×
5179
        }
5180
    }
5181

5182
    @DELETE
5183
    @AuthRequired
5184
    @Path("datasetTypes/{id}")
5185
    public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) {
5186
        AuthenticatedUser user;
5187
        try {
5188
            user = getRequestAuthenticatedUserOrDie(crc);
×
5189
        } catch (WrappedResponse ex) {
×
5190
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5191
        }
×
5192
        if (!user.isSuperuser()) {
×
5193
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5194
        }
5195

5196
        if (doomed == null || doomed.isEmpty()) {
×
5197
            throw new IllegalArgumentException("ID is required!");
×
5198
        }
5199

5200
        long idToDelete;
5201
        try {
5202
            idToDelete = Long.parseLong(doomed);
×
5203
        } catch (NumberFormatException e) {
×
5204
            throw new IllegalArgumentException("ID must be a number");
×
5205
        }
×
5206

5207
        DatasetType datasetTypeToDelete = datasetTypeSvc.getById(idToDelete);
×
5208
        if (datasetTypeToDelete == null) {
×
5209
            return error(BAD_REQUEST, "Could not find dataset type with id " + idToDelete);
×
5210
        }
5211

5212
        if (DatasetType.DEFAULT_DATASET_TYPE.equals(datasetTypeToDelete.getName())) {
×
5213
            return error(Status.FORBIDDEN, "You cannot delete the default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE);
×
5214
        }
5215

5216
        try {
5217
            int numDeleted = datasetTypeSvc.deleteById(idToDelete);
×
5218
            if (numDeleted == 1) {
×
5219
                return ok("deleted");
×
5220
            } else {
5221
                return error(BAD_REQUEST, "Something went wrong. Number of dataset types deleted: " + numDeleted);
×
5222
            }
5223
        } catch (WrappedResponse ex) {
×
5224
            return error(BAD_REQUEST, ex.getMessage());
×
5225
        }
5226
    }
5227

5228
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc