• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #23660

05 Nov 2024 05:11PM CUT coverage: 21.206% (-0.02%) from 21.224%
#23660

Pull #11001

github

GPortas
Changed: using JPA criteria instead of code looping for DatasetType query
Pull Request #11001: allow links between dataset types and metadata blocks

1 of 99 new or added lines in 7 files covered. (1.01%)

3 existing lines in 3 files now uncovered.

18316 of 86371 relevant lines covered (21.21%)

0.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.16
/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.s3.model.PartETag;
4
import edu.harvard.iq.dataverse.*;
5
import edu.harvard.iq.dataverse.DatasetLock.Reason;
6
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
7
import edu.harvard.iq.dataverse.api.auth.AuthRequired;
8
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
9
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
10
import edu.harvard.iq.dataverse.authorization.DataverseRole;
11
import edu.harvard.iq.dataverse.authorization.Permission;
12
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
13
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
14
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
15
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
16
import edu.harvard.iq.dataverse.authorization.users.User;
17
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
18
import edu.harvard.iq.dataverse.dataaccess.*;
19
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
20
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
21
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
22
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
23
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
24
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
25
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
26
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
27
import edu.harvard.iq.dataverse.engine.command.Command;
28
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
29
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
30
import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException;
31
import edu.harvard.iq.dataverse.engine.command.impl.*;
32
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
33
import edu.harvard.iq.dataverse.export.ExportService;
34
import edu.harvard.iq.dataverse.externaltools.ExternalTool;
35
import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
36
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
37
import edu.harvard.iq.dataverse.globus.GlobusUtil;
38
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
39
import edu.harvard.iq.dataverse.makedatacount.*;
40
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
41
import edu.harvard.iq.dataverse.metrics.MetricsUtil;
42
import edu.harvard.iq.dataverse.pidproviders.PidProvider;
43
import edu.harvard.iq.dataverse.pidproviders.PidUtil;
44
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
45
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
46
import edu.harvard.iq.dataverse.search.IndexServiceBean;
47
import edu.harvard.iq.dataverse.settings.FeatureFlags;
48
import edu.harvard.iq.dataverse.settings.JvmSettings;
49
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
50
import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
51
import edu.harvard.iq.dataverse.util.*;
52
import edu.harvard.iq.dataverse.util.bagit.OREMap;
53
import edu.harvard.iq.dataverse.util.json.*;
54
import edu.harvard.iq.dataverse.workflow.Workflow;
55
import edu.harvard.iq.dataverse.workflow.WorkflowContext;
56
import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
57
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
58
import jakarta.ejb.EJB;
59
import jakarta.ejb.EJBException;
60
import jakarta.inject.Inject;
61
import jakarta.json.*;
62
import jakarta.json.stream.JsonParsingException;
63
import jakarta.servlet.http.HttpServletRequest;
64
import jakarta.servlet.http.HttpServletResponse;
65
import jakarta.ws.rs.*;
66
import jakarta.ws.rs.container.ContainerRequestContext;
67
import jakarta.ws.rs.core.*;
68
import jakarta.ws.rs.core.Response.Status;
69
import org.apache.commons.lang3.StringUtils;
70
import org.eclipse.microprofile.openapi.annotations.Operation;
71
import org.eclipse.microprofile.openapi.annotations.media.Content;
72
import org.eclipse.microprofile.openapi.annotations.media.Schema;
73
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
74
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
75
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
76
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
77
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
78
import org.glassfish.jersey.media.multipart.FormDataParam;
79

80
import java.io.IOException;
81
import java.io.InputStream;
82
import java.net.URI;
83
import java.sql.Timestamp;
84
import java.text.MessageFormat;
85
import java.text.SimpleDateFormat;
86
import java.time.LocalDate;
87
import java.time.LocalDateTime;
88
import java.time.ZoneId;
89
import java.time.format.DateTimeFormatter;
90
import java.time.format.DateTimeParseException;
91
import java.util.*;
92
import java.util.Map.Entry;
93
import java.util.concurrent.ExecutionException;
94
import java.util.function.Predicate;
95
import java.util.logging.Level;
96
import java.util.logging.Logger;
97
import java.util.regex.Pattern;
98
import java.util.stream.Collectors;
99

100
import static edu.harvard.iq.dataverse.api.ApiConstants.*;
101
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
102
import edu.harvard.iq.dataverse.dataset.DatasetType;
103
import edu.harvard.iq.dataverse.dataset.DatasetTypeServiceBean;
104
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
105
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
106
import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
107
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
108

109
@Path("datasets")
110
public class Datasets extends AbstractApiBean {
×
111

112
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
1✔
113
    private static final Pattern dataFilePattern = Pattern.compile("^[0-9a-f]{11}-[0-9a-f]{12}\\.?.*");
1✔
114
    
115
    @Inject DataverseSession session;
116

117
    @EJB
118
    DatasetServiceBean datasetService;
119

120
    @EJB
121
    DataverseServiceBean dataverseService;
122
    
123
    @EJB
124
    GlobusServiceBean globusService;
125

126
    @EJB
127
    UserNotificationServiceBean userNotificationService;
128
    
129
    @EJB
130
    PermissionServiceBean permissionService;
131
    
132
    @EJB
133
    AuthenticationServiceBean authenticationServiceBean;
134
    
135
    @EJB
136
    DDIExportServiceBean ddiExportService;
137

138
    @EJB
139
    MetadataBlockServiceBean metadataBlockService;
140
    
141
    @EJB
142
    DataFileServiceBean fileService;
143

144
    @EJB
145
    IngestServiceBean ingestService;
146

147
    @EJB
148
    EjbDataverseEngine commandEngine;
149
    
150
    @EJB
151
    IndexServiceBean indexService;
152

153
    @EJB
154
    S3PackageImporter s3PackageImporter;
155
     
156
    @EJB
157
    SettingsServiceBean settingsService;
158

159
    // TODO: Move to AbstractApiBean
160
    @EJB
161
    DatasetMetricsServiceBean datasetMetricsSvc;
162
    
163
    @EJB
164
    DatasetExternalCitationsServiceBean datasetExternalCitationsService;
165

166
    @EJB
167
    EmbargoServiceBean embargoService;
168

169
    @EJB
170
    RetentionServiceBean retentionService;
171

172
    @Inject
173
    MakeDataCountLoggingServiceBean mdcLogService;
174
    
175
    @Inject
176
    DataverseRequestServiceBean dvRequestService;
177

178
    @Inject
179
    WorkflowServiceBean wfService;
180
    
181
    @Inject
182
    DataverseRoleServiceBean dataverseRoleService;
183

184
    @EJB
185
    DatasetVersionServiceBean datasetversionService;
186

187
    @Inject
188
    PrivateUrlServiceBean privateUrlService;
189

190
    @Inject
191
    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
192

193
    @Inject
194
    DatasetTypeServiceBean datasetTypeSvc;
195

196
    /**
197
     * Used to consolidate the way we parse and handle dataset versions.
198
     * @param <T> 
199
     */
200
    public interface DsVersionHandler<T> {
201
        T handleLatest();
202
        T handleDraft();
203
        T handleSpecific( long major, long minor );
204
        T handleLatestPublished();
205
    }
206
    
207
    @GET
208
    @AuthRequired
209
    @Path("{id}")
210
    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response,  @QueryParam("returnOwners") boolean returnOwners) {
211
        return response( req -> {
×
212
            final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id, true)));
×
213
            final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
214
            final JsonObjectBuilder jsonbuilder = json(retrieved, returnOwners);
×
215
            //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum)
216
            if((latest != null) && latest.isReleased()) {
×
217
                MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
×
218
                mdcLogService.logEntry(entry);
×
219
            }
220
            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
×
221
        }, getRequestUser(crc));
×
222
    }
223
    
224
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand 
225
    // to obtain the dataset that we are trying to export - which would handle
226
    // Auth in the process... For now, Auth isn't necessary - since export ONLY 
227
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
228
    @GET
229
    @Path("/export")
230
    @Produces({"application/xml", "application/json", "application/html", "application/ld+json", "*/*" })
231
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
232

233
        try {
234
            Dataset dataset = datasetService.findByGlobalId(persistentId);
×
235
            if (dataset == null) {
×
236
                return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
237
            }
238
            
239
            ExportService instance = ExportService.getInstance();
×
240
            
241
            InputStream is = instance.getExport(dataset, exporter);
×
242
           
243
            String mediaType = instance.getMediaType(exporter);
×
244
            //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft 
245
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset);
×
246
            mdcLogService.logEntry(entry);
×
247
            
248
            return Response.ok()
×
249
                    .entity(is)
×
250
                    .type(mediaType).
×
251
                    build();
×
252
        } catch (Exception wr) {
×
253
            logger.warning(wr.getMessage());
×
254
            return error(Response.Status.FORBIDDEN, "Export Failed");
×
255
        }
256
    }
257

258
    @DELETE
259
    @AuthRequired
260
    @Path("{id}")
261
    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
262
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
263
        // (and there's a comment that says "TODO: remove this command")
264
        // do we need an exposed API call for it? 
265
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, 
266
        // if the dataset only has 1 version... In other words, the functionality 
267
        // currently provided by this API is covered between the "deleteDraftVersion" and
268
        // "destroyDataset" API calls.  
269
        // (The logic below follows the current implementation of the underlying 
270
        // commands!)
271

272
        User u = getRequestUser(crc);
×
273
        return response( req -> {
×
274
            Dataset doomed = findDatasetOrDie(id);
×
275
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
276
            boolean destroy = false;
×
277
            
278
            if (doomed.getVersions().size() == 1) {
×
279
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
280
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
281
                }
282
                destroy = true;
×
283
            } else {
284
                if (!doomedVersion.isDraft()) {
×
285
                    throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT"));
×
286
                }
287
            }
288
            
289
            // Gather the locations of the physical files that will need to be 
290
            // deleted once the destroy command execution has been finalized:
291
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
292
            
293
            execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
294
            
295
            // If we have gotten this far, the destroy command has succeeded, 
296
            // so we can finalize it by permanently deleting the physical files:
297
            // (DataFileService will double-check that the datafiles no 
298
            // longer exist in the database, before attempting to delete 
299
            // the physical files)
300
            if (!deleteStorageLocations.isEmpty()) {
×
301
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
302
            }
303
            
304
            return ok("Dataset " + id + " deleted");
×
305
        }, u);
306
    }
307
        
308
    @DELETE
309
    @AuthRequired
310
    @Path("{id}/destroy")
311
    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
312

313
        User u = getRequestUser(crc);
×
314
        return response(req -> {
×
315
            // first check if dataset is released, and if so, if user is a superuser
316
            Dataset doomed = findDatasetOrDie(id);
×
317

318
            if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
319
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
320
            }
321

322
            // Gather the locations of the physical files that will need to be 
323
            // deleted once the destroy command execution has been finalized:
324
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
325

326
            execCommand(new DestroyDatasetCommand(doomed, req));
×
327

328
            // If we have gotten this far, the destroy command has succeeded, 
329
            // so we can finalize permanently deleting the physical files:
330
            // (DataFileService will double-check that the datafiles no 
331
            // longer exist in the database, before attempting to delete 
332
            // the physical files)
333
            if (!deleteStorageLocations.isEmpty()) {
×
334
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
335
            }
336

337
            return ok("Dataset " + id + " destroyed");
×
338
        }, u);
339
    }
340
    
341
    @DELETE
342
    @AuthRequired
343
    @Path("{id}/versions/{versionId}")
344
    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
345
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
346
            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
×
347
        }
348

349
        return response( req -> {
×
350
            Dataset dataset = findDatasetOrDie(id);
×
351
            DatasetVersion doomed = dataset.getLatestVersion();
×
352
            
353
            if (!doomed.isDraft()) {
×
354
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
355
            }
356
            
357
            // Gather the locations of the physical files that will need to be 
358
            // deleted once the destroy command execution has been finalized:
359
            
360
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
361
            
362
            execCommand( new DeleteDatasetVersionCommand(req, dataset));
×
363
            
364
            // If we have gotten this far, the delete command has succeeded - 
365
            // by either deleting the Draft version of a published dataset, 
366
            // or destroying an unpublished one. 
367
            // This means we can finalize permanently deleting the physical files:
368
            // (DataFileService will double-check that the datafiles no 
369
            // longer exist in the database, before attempting to delete 
370
            // the physical files)
371
            if (!deleteStorageLocations.isEmpty()) {
×
372
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
373
            }
374
            
375
            return ok("Draft version of dataset " + id + " deleted");
×
376
        }, getRequestUser(crc));
×
377
    }
378
        
379
    @DELETE
380
    @AuthRequired
381
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
382
    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
383
                boolean index = true;
×
384
        return response(req -> {
×
385
            execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
386
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
387
        }, getRequestUser(crc));
×
388
    }
389
        
390
    @PUT
391
    @AuthRequired
392
    @Path("{id}/citationdate")
393
    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
394
        return response( req -> {
×
395
            if ( dsfTypeName.trim().isEmpty() ){
×
396
                return badRequest("Please provide a dataset field type in the requst body.");
×
397
            }
398
            DatasetFieldType dsfType = null;
×
399
            if (!":publicationDate".equals(dsfTypeName)) {
×
400
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
401
                if (dsfType == null) {
×
402
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
403
                }
404
            }
405

406
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
407
            return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
408
        }, getRequestUser(crc));
×
409
    }
410
    
411
    @DELETE
412
    @AuthRequired
413
    @Path("{id}/citationdate")
414
    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
415
        return response( req -> {
×
416
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
417
            return ok("Citation Date for dataset " + id + " set to default");
×
418
        }, getRequestUser(crc));
×
419
    }
420
    
421
    @GET
422
    @AuthRequired
423
    @Path("{id}/versions")
424
    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
425

426
        return response( req -> {
×
427
            Dataset dataset = findDatasetOrDie(id);
×
428
            Boolean deepLookup = excludeFiles == null ? true : !excludeFiles;
×
429

430
            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) )
×
431
                                .stream()
×
432
                                .map( d -> json(d, deepLookup) )
×
433
                                .collect(toJsonArray()));
×
434
        }, getRequestUser(crc));
×
435
    }
436
    
437
    @GET
438
    @AuthRequired
439
    @Path("{id}/versions/{versionId}")
440
    public Response getVersion(@Context ContainerRequestContext crc,
441
                               @PathParam("id") String datasetId,
442
                               @PathParam("versionId") String versionId,
443
                               @QueryParam("excludeFiles") Boolean excludeFiles,
444
                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
445
                               @QueryParam("returnOwners") boolean returnOwners,
446
                               @Context UriInfo uriInfo,
447
                               @Context HttpHeaders headers) {
448
        return response( req -> {
×
449
            
450
            //If excludeFiles is null the default is to provide the files and because of this we need to check permissions. 
451
            boolean checkPerms = excludeFiles == null ? true : !excludeFiles;
×
452
            
453
            Dataset dataset = findDatasetOrDie(datasetId);
×
454
            DatasetVersion requestedDatasetVersion = getDatasetVersionOrDie(req, 
×
455
                                                                            versionId, 
456
                                                                            dataset, 
457
                                                                            uriInfo, 
458
                                                                            headers, 
459
                                                                            includeDeaccessioned,
460
                                                                            checkPerms);
461

462
            if (requestedDatasetVersion == null || requestedDatasetVersion.getId() == null) {
×
463
                return notFound("Dataset version not found");
×
464
            }
465

466
            if (excludeFiles == null ? true : !excludeFiles) {
×
467
                requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId());
×
468
            }
469

470
            JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion,
×
471
                                                 null, 
472
                                                 excludeFiles == null ? true : !excludeFiles, 
×
473
                                                 returnOwners);
474
            return ok(jsonBuilder);
×
475

476
        }, getRequestUser(crc));
×
477
    }
478

479
    @GET
480
    @AuthRequired
481
    @Path("{id}/versions/{versionId}/files")
482
    public Response getVersionFiles(@Context ContainerRequestContext crc,
483
                                    @PathParam("id") String datasetId,
484
                                    @PathParam("versionId") String versionId,
485
                                    @QueryParam("limit") Integer limit,
486
                                    @QueryParam("offset") Integer offset,
487
                                    @QueryParam("contentType") String contentType,
488
                                    @QueryParam("accessStatus") String accessStatus,
489
                                    @QueryParam("categoryName") String categoryName,
490
                                    @QueryParam("tabularTagName") String tabularTagName,
491
                                    @QueryParam("searchText") String searchText,
492
                                    @QueryParam("orderCriteria") String orderCriteria,
493
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
494
                                    @Context UriInfo uriInfo,
495
                                    @Context HttpHeaders headers) {
496
        return response(req -> {
×
497
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId, false), uriInfo, headers, includeDeaccessioned);
×
498
            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
499
            try {
500
                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
×
501
            } catch (IllegalArgumentException e) {
×
502
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
×
503
            }
×
504
            FileSearchCriteria fileSearchCriteria;
505
            try {
506
                fileSearchCriteria = new FileSearchCriteria(
×
507
                        contentType,
508
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
509
                        categoryName,
510
                        tabularTagName,
511
                        searchText
512
                );
513
            } catch (IllegalArgumentException e) {
×
514
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
515
            }
×
516
            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)),
×
517
                    datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
518
        }, getRequestUser(crc));
×
519
    }
520

521
    @GET
522
    @AuthRequired
523
    @Path("{id}/versions/{versionId}/files/counts")
524
    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
525
                                         @PathParam("id") String datasetId,
526
                                         @PathParam("versionId") String versionId,
527
                                         @QueryParam("contentType") String contentType,
528
                                         @QueryParam("accessStatus") String accessStatus,
529
                                         @QueryParam("categoryName") String categoryName,
530
                                         @QueryParam("tabularTagName") String tabularTagName,
531
                                         @QueryParam("searchText") String searchText,
532
                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
533
                                         @Context UriInfo uriInfo,
534
                                         @Context HttpHeaders headers) {
535
        return response(req -> {
×
536
            FileSearchCriteria fileSearchCriteria;
537
            try {
538
                fileSearchCriteria = new FileSearchCriteria(
×
539
                        contentType,
540
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
541
                        categoryName,
542
                        tabularTagName,
543
                        searchText
544
                );
545
            } catch (IllegalArgumentException e) {
×
546
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
547
            }
×
548
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
549
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
550
            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
×
551
            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
×
552
            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
×
553
            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
×
554
            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
×
555
            return ok(jsonObjectBuilder);
×
556
        }, getRequestUser(crc));
×
557
    }
558

559
    @GET
560
    @AuthRequired
561
    @Path("{id}/dirindex")
562
    @Produces("text/html")
563
    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
564

565
        folderName = folderName == null ? "" : folderName;
×
566
        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
×
567
        
568
        DatasetVersion version;
569
        try {
570
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
571
            version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
572
        } catch (WrappedResponse wr) {
×
573
            return wr.getResponse();
×
574
        }
×
575
        
576
        String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals);
×
577
        
578
        // return "NOT FOUND" if there is no such folder in the dataset version:
579
        
580
        if ("".equals(output)) {
×
581
            return notFound("Folder " + folderName + " does not exist");
×
582
        }
583
        
584
        
585
        String indexFileName = folderName.equals("") ? ".index.html"
×
586
                : ".index-" + folderName.replace('/', '_') + ".html";
×
587
        response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\"");
×
588

589
        
590
        return Response.ok()
×
591
                .entity(output)
×
592
                //.type("application/html").
593
                .build();
×
594
    }
595
    
596
    @GET
597
    @AuthRequired
598
    @Path("{id}/versions/{versionId}/metadata")
599
    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
600
        return response( req -> ok(
×
601
                    jsonByBlocks(
×
602
                        getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
×
603
                                .getDatasetFields())), getRequestUser(crc));
×
604
    }
605
    
606
    @GET
607
    @AuthRequired
608
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
609
    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
610
                                            @PathParam("id") String datasetId,
611
                                            @PathParam("versionNumber") String versionNumber,
612
                                            @PathParam("block") String blockName,
613
                                            @Context UriInfo uriInfo,
614
                                            @Context HttpHeaders headers) {
615
        
616
        return response( req -> {
×
617
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
×
618
            
619
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
620
            for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) {
×
621
                if ( p.getKey().getName().equals(blockName) ) {
×
622
                    return ok(json(p.getKey(), p.getValue()));
×
623
                }
624
            }
×
625
            return notFound("metadata block named " + blockName + " not found");
×
626
        }, getRequestUser(crc));
×
627
    }
628

629
    /**
630
     * Add Signposting
631
     * @param datasetId
632
     * @param versionId
633
     * @param uriInfo
634
     * @param headers
635
     * @return
636
     */
637
    @GET
638
    @AuthRequired
639
    @Path("{id}/versions/{versionId}/linkset")
640
    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
641
           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
642
        if (DS_VERSION_DRAFT.equals(versionId)) {
×
643
            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
×
644
        }
645
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
646
        try {
647
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
648
            return Response
×
649
                    .ok(Json.createObjectBuilder()
×
650
                            .add("linkset",
×
651
                                    new SignpostingResources(systemConfig, dsv,
652
                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
×
653
                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
×
654
                                                    .getJsonLinkset())
×
655
                            .build())
×
656
                    .type(MediaType.APPLICATION_JSON).build();
×
657
        } catch (WrappedResponse wr) {
×
658
            return wr.getResponse();
×
659
        }
660
    }
661

662
    @POST
663
    @AuthRequired
664
    @Path("{id}/modifyRegistration")
665
    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
666
        return response( req -> {
×
667
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
668
            return ok("Dataset " + id + " target url updated");
×
669
        }, getRequestUser(crc));
×
670
    }
671
    
672
    @POST
673
    @AuthRequired
674
    @Path("/modifyRegistrationAll")
675
    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
676
        return response( req -> {
×
677
            datasetService.findAll().forEach( ds -> {
×
678
                try {
679
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
680
                } catch (WrappedResponse ex) {
×
681
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
682
                }
×
683
            });
×
684
            return ok("Update All Dataset target url completed");
×
685
        }, getRequestUser(crc));
×
686
    }
687
    
688
    @POST
689
    @AuthRequired
690
    @Path("{id}/modifyRegistrationMetadata")
691
    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
692

693
        try {
694
            Dataset dataset = findDatasetOrDie(id);
×
695
            if (!dataset.isReleased()) {
×
696
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
697
            }
698
        } catch (WrappedResponse ex) {
×
699
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
700
        }
×
701

702
        return response(req -> {
×
703
            Dataset dataset = findDatasetOrDie(id);
×
704
            execCommand(new UpdateDvObjectPIDMetadataCommand(dataset, req));
×
705
            List<String> args = Arrays.asList(dataset.getIdentifier());
×
706
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
707
        }, getRequestUser(crc));
×
708
    }
709
    
710
    @POST
711
    @AuthRequired
712
    @Path("/modifyRegistrationPIDMetadataAll")
713
    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
714
        return response( req -> {
×
715
            datasetService.findAll().forEach( ds -> {
×
716
                try {
717
                    logger.fine("ReRegistering: " + ds.getId() + " : " + ds.getIdentifier());
×
718
                    if (!ds.isReleased() || (!ds.isIdentifierRegistered() || (ds.getIdentifier() == null))) {
×
719
                        if (ds.isReleased()) {
×
720
                            logger.warning("Dataset id=" + ds.getId() + " is in an inconsistent state (publicationdate but no identifier/identifier not registered");
×
721
                        }
722
                    } else {
723
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
724
                    }
725
                } catch (WrappedResponse ex) {
×
726
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
727
                }
×
728
            });
×
729
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
730
        }, getRequestUser(crc));
×
731
    }
732
  
733
    @PUT
734
    @AuthRequired
735
    @Path("{id}/versions/{versionId}")
736
    @Consumes(MediaType.APPLICATION_JSON)
737
    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
738
        if (!DS_VERSION_DRAFT.equals(versionId)) {
×
739
            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
×
740
        }
741
        
742
        try {
743
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
744
            Dataset ds = findDatasetOrDie(id);
×
745
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
746
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
747
            
748
            // clear possibly stale fields from the incoming dataset version.
749
            // creation and modification dates are updated by the commands.
750
            incomingVersion.setId(null);
×
751
            incomingVersion.setVersionNumber(null);
×
752
            incomingVersion.setMinorVersionNumber(null);
×
753
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
754
            incomingVersion.setDataset(ds);
×
755
            incomingVersion.setCreateTime(null);
×
756
            incomingVersion.setLastUpdateTime(null);
×
757
            
758
            if (!incomingVersion.getFileMetadatas().isEmpty()){
×
759
                return error( Response.Status.BAD_REQUEST, "You may not add files via this api.");
×
760
            }
761
            
762
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
763
            
764
            DatasetVersion managedVersion;
765
            if (updateDraft) {
×
766
                final DatasetVersion editVersion = ds.getOrCreateEditVersion();
×
767
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
768
                editVersion.setTermsOfUseAndAccess(incomingVersion.getTermsOfUseAndAccess());
×
769
                editVersion.getTermsOfUseAndAccess().setDatasetVersion(editVersion);
×
770
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(editVersion.getTermsOfUseAndAccess(), null);
×
771
                if (!hasValidTerms) {
×
772
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
773
                }
774
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
775
                managedVersion = managedDataset.getOrCreateEditVersion();
×
776
            } else {
×
777
                boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(incomingVersion.getTermsOfUseAndAccess(), null);
×
778
                if (!hasValidTerms) {
×
779
                    return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
780
                }
781
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
782
            }
783
            return ok( json(managedVersion, true) );
×
784
                    
785
        } catch (JsonParseException ex) {
×
786
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
787
            return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() );
×
788
            
789
        } catch (WrappedResponse ex) {
×
790
            return ex.getResponse();
×
791
            
792
        }
793
    }
794

795
    @GET
796
    @AuthRequired
797
    @Path("{id}/versions/{versionId}/metadata")
798
    @Produces("application/ld+json, application/json-ld")
799
    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
800
        try {
801
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
802
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
×
803
            OREMap ore = new OREMap(dsv,
×
804
                    settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
×
805
            return ok(ore.getOREMapBuilder(true));
×
806

807
        } catch (WrappedResponse ex) {
×
808
            ex.printStackTrace();
×
809
            return ex.getResponse();
×
810
        } catch (Exception jpe) {
×
811
            logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage());
×
812
            jpe.printStackTrace();
×
813
            return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage());
×
814
        }
815
    }
816

817
    @GET
818
    @AuthRequired
819
    @Path("{id}/metadata")
820
    @Produces("application/ld+json, application/json-ld")
821
    public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
822
        return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
×
823
    }
824

825
    @PUT
826
    @AuthRequired
827
    @Path("{id}/metadata")
828
    @Consumes("application/ld+json, application/json-ld")
829
    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
830

831
        try {
832
            Dataset ds = findDatasetOrDie(id);
×
833
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
834
            //Get draft state as of now
835

836
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
837
            //Get the current draft or create a new version to update
838
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
839
            dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
×
840
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
841
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
×
842
            if (!hasValidTerms) {
×
843
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
844
            }
845
            DatasetVersion managedVersion;
846
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
847
            managedVersion = managedDataset.getLatestVersion();
×
848
            String info = updateDraft ? "Version Updated" : "Version Created";
×
849
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
850

851
        } catch (WrappedResponse ex) {
×
852
            return ex.getResponse();
×
853
        } catch (JsonParsingException jpe) {
×
854
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
855
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
856
        }
857
    }
858

859
    @PUT
860
    @AuthRequired
861
    @Path("{id}/metadata/delete")
862
    @Consumes("application/ld+json, application/json-ld")
863
    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
864
        try {
865
            Dataset ds = findDatasetOrDie(id);
×
866
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
867
            //Get draft state as of now
868

869
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
870
            //Get the current draft or create a new version to update
871
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
872
            dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
×
873
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
874
            DatasetVersion managedVersion;
875
            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
876
            managedVersion = managedDataset.getLatestVersion();
×
877
            String info = updateDraft ? "Version Updated" : "Version Created";
×
878
            return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
×
879

880
        } catch (WrappedResponse ex) {
×
881
            ex.printStackTrace();
×
882
            return ex.getResponse();
×
883
        } catch (JsonParsingException jpe) {
×
884
            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody);
×
885
            jpe.printStackTrace();
×
886
            return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
887
        }
888
    }
889

890
    @PUT
891
    @AuthRequired
892
    @Path("{id}/deleteMetadata")
893
    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
894

895
        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
896

897
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
898
    }
899

900
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
901
        try {
902

903
            Dataset ds = findDatasetOrDie(id);
×
904
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
905
            //Get the current draft or create a new version to update
906
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
907
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
908
            List<DatasetField> fields = new LinkedList<>();
×
909
            DatasetField singleField = null;
×
910

911
            JsonArray fieldsJson = json.getJsonArray("fields");
×
912
            if (fieldsJson == null) {
×
913
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
914
                fields.add(singleField);
×
915
            } else {
916
                fields = jsonParser().parseMultipleFields(json);
×
917
            }
918

919
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
920

921
            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>();
×
922
            List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>();
×
923
            List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>();
×
924

925
            for (DatasetField updateField : fields) {
×
926
                boolean found = false;
×
927
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
928
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
929
                        if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
930
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
931
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
932
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
933
                                        for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) {
×
934
                                            if (existing.getStrValue().equals(cvv.getStrValue())) {
×
935
                                                found = true;
×
936
                                                controlledVocabularyItemsToRemove.add(existing);
×
937
                                            }
938
                                        }
×
939
                                        if (!found) {
×
940
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
941
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found.");
×
942
                                        }
943
                                    }
×
944
                                    for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
945
                                        dsf.getControlledVocabularyValues().remove(remove);
×
946
                                    }
×
947

948
                                } else {
949
                                    if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) {
×
950
                                        found = true;
×
951
                                        dsf.setSingleControlledVocabularyValue(null);
×
952
                                    }
953

954
                                }
955
                            } else {
956
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
957
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
958
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
959
                                            for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) {
×
960
                                                if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) {
×
961
                                                    found = true;
×
962
                                                    datasetFieldValueItemsToRemove.add(dfv);
×
963
                                                }
964
                                            }
×
965
                                            if (!found) {
×
966
                                                logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
967
                                                return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found.");
×
968
                                            }
969
                                        }
×
970
                                        datasetFieldValueItemsToRemove.forEach((remove) -> {
×
971
                                            dsf.getDatasetFieldValues().remove(remove);
×
972
                                        });
×
973

974
                                    } else {
975
                                        if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) {
×
976
                                            found = true;
×
977
                                            dsf.setSingleValue(null);
×
978
                                        }
979

980
                                    }
981
                                } else {
982
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
983
                                        String deleteVal = getCompoundDisplayValue(dfcv);
×
984
                                        for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) {
×
985
                                            String existingString = getCompoundDisplayValue(existing);
×
986
                                            if (existingString.equals(deleteVal)) {
×
987
                                                found = true;
×
988
                                                datasetFieldCompoundValueItemsToRemove.add(existing);
×
989
                                            }
990
                                        }
×
991
                                        datasetFieldCompoundValueItemsToRemove.forEach((remove) -> {
×
992
                                            dsf.getDatasetFieldCompoundValues().remove(remove);
×
993
                                        });
×
994
                                        if (!found) {
×
995
                                            logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
996
                                            return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found.");
×
997
                                        }
998
                                    }
×
999
                                }
1000
                            }
1001
                        } else {
1002
                            found = true;
×
1003
                            dsf.setSingleValue(null);
×
1004
                            dsf.setSingleControlledVocabularyValue(null);
×
1005
                        }
1006
                        break;
×
1007
                    }
1008
                }
×
1009
                if (!found){
×
1010
                    String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue();
×
1011
                    logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1012
                    return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." );
×
1013
                }
1014
            }
×
1015

1016

1017
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1018
            return ok(json(managedVersion, true));
×
1019

1020
        } catch (JsonParseException ex) {
×
1021
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1022
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1023

1024
        } catch (WrappedResponse ex) {
×
1025
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
1026
            return ex.getResponse();
×
1027

1028
        }
1029
    
1030
    }
1031
    
1032
    private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
1033
        String returnString = "";
×
1034
        for (DatasetField dsf : dscv.getChildDatasetFields()) {
×
1035
            for (String value : dsf.getValues()) {
×
1036
                if (!(value == null)) {
×
1037
                    returnString += (returnString.isEmpty() ? "" : "; ") + value.trim();
×
1038
                }
1039
            }
×
1040
        }
×
1041
        return returnString;
×
1042
    }
1043
    
1044
    @PUT
1045
    @AuthRequired
1046
    @Path("{id}/editMetadata")
1047
    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
1048

1049
        Boolean replaceData = replace != null;
×
1050
        DataverseRequest req = null;
×
1051
        req = createDataverseRequest(getRequestUser(crc));
×
1052

1053
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
1054
    }
1055
    
1056
    
1057
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
1058
        try {
1059
           
1060
            Dataset ds = findDatasetOrDie(id);
×
1061
            JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1062
            //Get the current draft or create a new version to update
1063
            DatasetVersion dsv = ds.getOrCreateEditVersion();
×
1064
            dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
×
1065
            List<DatasetField> fields = new LinkedList<>();
×
1066
            DatasetField singleField = null;
×
1067
            
1068
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1069
            if (fieldsJson == null) {
×
1070
                singleField = jsonParser().parseField(json, Boolean.FALSE);
×
1071
                fields.add(singleField);
×
1072
            } else {
1073
                fields = jsonParser().parseMultipleFields(json);
×
1074
            }
1075
            
1076

1077
            String valdationErrors = validateDatasetFieldValues(fields);
×
1078

1079
            if (!valdationErrors.isEmpty()) {
×
1080
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1081
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1082
            }
1083

1084
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1085

1086
            //loop through the update fields     
1087
            // and compare to the version fields  
1088
            //if exist add/replace values
1089
            //if not add entire dsf
1090
            for (DatasetField updateField : fields) {
×
1091
                boolean found = false;
×
1092
                for (DatasetField dsf : dsv.getDatasetFields()) {
×
1093
                    if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) {
×
1094
                        found = true;
×
1095
                        if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1096
                            List priorCVV = new ArrayList<>();
×
1097
                            String cvvDisplay = "";
×
1098

1099
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1100
                                cvvDisplay = dsf.getDisplayValue();
×
1101
                                for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) {
×
1102
                                    priorCVV.add(cvvOld);
×
1103
                                }
×
1104
                            }
1105

1106
                            if (replaceData) {
×
1107
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1108
                                    dsf.setDatasetFieldCompoundValues(new ArrayList<>());
×
1109
                                    dsf.setDatasetFieldValues(new ArrayList<>());
×
1110
                                    dsf.setControlledVocabularyValues(new ArrayList<>());
×
1111
                                    priorCVV.clear();
×
1112
                                    dsf.getControlledVocabularyValues().clear();
×
1113
                                } else {
1114
                                    dsf.setSingleValue("");
×
1115
                                    dsf.setSingleControlledVocabularyValue(null);
×
1116
                                }
1117
                              cvvDisplay="";
×
1118
                            }
1119
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1120
                                if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1121
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1122
                                        if (!cvvDisplay.contains(cvv.getStrValue())) {
×
1123
                                            priorCVV.add(cvv);
×
1124
                                        }
1125
                                    }
×
1126
                                    dsf.setControlledVocabularyValues(priorCVV);
×
1127
                                } else {
1128
                                    dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1129
                                }
1130
                            } else {
1131
                                if (!updateField.getDatasetFieldType().isCompound()) {
×
1132
                                    if (dsf.getDatasetFieldType().isAllowMultiples()) {
×
1133
                                        for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) {
×
1134
                                            if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) {
×
1135
                                                dfv.setDatasetField(dsf);
×
1136
                                                dsf.getDatasetFieldValues().add(dfv);
×
1137
                                            }
1138
                                        }
×
1139
                                    } else {
1140
                                        dsf.setSingleValue(updateField.getValue());
×
1141
                                    }
1142
                                } else {
1143
                                    for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) {
×
1144
                                        if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) {
×
1145
                                            dfcv.setParentDatasetField(dsf);
×
1146
                                            dsf.setDatasetVersion(dsv);
×
1147
                                            dsf.getDatasetFieldCompoundValues().add(dfcv);
×
1148
                                        }
1149
                                    }
×
1150
                                }
1151
                            }
1152
                        } else {
×
1153
                            if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) {
×
1154
                                return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")");
×
1155
                            }
1156
                        }
1157
                        break;
1158
                    }
1159
                }
×
1160
                if (!found) {
×
1161
                    updateField.setDatasetVersion(dsv);
×
1162
                    dsv.getDatasetFields().add(updateField);
×
1163
                }
1164
            }
×
1165
            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
×
1166

1167
            return ok(json(managedVersion, true));
×
1168

1169
        } catch (JsonParseException ex) {
×
1170
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1171
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1172

1173
        } catch (WrappedResponse ex) {
×
1174
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1175
            return ex.getResponse();
×
1176

1177
        }
1178
    }
1179
    
1180
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1181
        StringBuilder error = new StringBuilder();
×
1182

1183
        for (DatasetField dsf : fields) {
×
1184
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1185
                    && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) {
×
1186
                error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1187
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) {
×
1188
                error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" ");
×
1189
            }
1190
        }
×
1191

1192
        if (!error.toString().isEmpty()) {
×
1193
            return (error.toString());
×
1194
        }
1195
        return "";
×
1196
    }
1197
    
1198
    /**
1199
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
1200
     */
1201
    @GET
1202
    @AuthRequired
1203
    @Path("{id}/actions/:publish")
1204
    @Deprecated
1205
    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
1206
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
1207
        return publishDataset(crc, id, type, false);
×
1208
    }
1209

1210
    @POST
1211
    @AuthRequired
1212
    @Path("{id}/actions/:publish")
1213
    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
1214
        try {
1215
            if (type == null) {
×
1216
                return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
×
1217
            }
1218
            boolean updateCurrent=false;
×
1219
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1220
            type = type.toLowerCase();
×
1221
            boolean isMinor=false;
×
1222
            switch (type) {
×
1223
                case "minor":
1224
                    isMinor = true;
×
1225
                    break;
×
1226
                case "major":
1227
                    isMinor = false;
×
1228
                    break;
×
1229
                case "updatecurrent":
1230
                    if (user.isSuperuser()) {
×
1231
                        updateCurrent = true;
×
1232
                    } else {
1233
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
1234
                    }
1235
                    break;
1236
                default:
1237
                    return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
×
1238
            }
1239

1240
            Dataset ds = findDatasetOrDie(id);
×
1241
            
1242
            boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(ds.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1243
            if (!hasValidTerms) {
×
1244
                return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1245
            }
1246
            
1247
            if (mustBeIndexed) {
×
1248
                logger.fine("IT: " + ds.getIndexTime());
×
1249
                logger.fine("MT: " + ds.getModificationTime());
×
1250
                logger.fine("PIT: " + ds.getPermissionIndexTime());
×
1251
                logger.fine("PMT: " + ds.getPermissionModificationTime());
×
1252
                if (ds.getIndexTime() != null && ds.getModificationTime() != null) {
×
1253
                    logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0));
×
1254
                }
1255
                /*
1256
                 * Some calls, such as the /datasets/actions/:import* commands do not set the
1257
                 * modification or permission modification times. The checks here are trying to
1258
                 * see if indexing or permissionindexing could be pending, so they check to see
1259
                 * if the relevant modification time is set and if so, whether the index is also
1260
                 * set and if so, if it after the modification time. If the modification time is
1261
                 * set and the index time is null or is before the mod time, the 409/conflict
1262
                 * error is returned.
1263
                 *
1264
                 */
1265
                if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) ||
×
1266
                        (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) {
×
1267
                    return error(Response.Status.CONFLICT, "Dataset is awaiting indexing");
×
1268
                }
1269
            }
1270
            if (updateCurrent) {
×
1271
                /*
1272
                 * Note: The code here mirrors that in the
1273
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1274
                 * to the core logic (i.e. beyond updating the messaging about results) should
1275
                 * be applied to the code there as well.
1276
                 */
1277
                String errorMsg = null;
×
1278
                String successMsg = null;
×
1279
                try {
1280
                    CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
1281
                    ds = commandEngine.submit(cmd);
×
1282
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
1283

1284
                    // If configured, update archive copy as well
1285
                    String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString());
×
1286
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
1287
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion);
×
1288
                    if (archiveCommand != null) {
×
1289
                        // Delete the record of any existing copy since it is now out of date/incorrect
1290
                        updateVersion.setArchivalCopyLocation(null);
×
1291
                        /*
1292
                         * Then try to generate and submit an archival copy. Note that running this
1293
                         * command within the CuratePublishedDatasetVersionCommand was causing an error:
1294
                         * "The attribute [id] of class
1295
                         * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary
1296
                         * key column in the database. Updates are not allowed." To avoid that, and to
1297
                         * simplify reporting back to the GUI whether this optional step succeeded, I've
1298
                         * pulled this out as a separate submit().
1299
                         */
1300
                        try {
1301
                            updateVersion = commandEngine.submit(archiveCommand);
×
1302
                            if (!updateVersion.getArchivalCopyLocationStatus().equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)) {
×
1303
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success");
×
1304
                            } else {
1305
                                successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure");
×
1306
                            }
1307
                        } catch (CommandException ex) {
×
1308
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
1309
                            logger.severe(ex.getMessage());
×
1310
                        }
×
1311
                    }
1312
                } catch (CommandException ex) {
×
1313
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1314
                    logger.severe(ex.getMessage());
×
1315
                }
×
1316
                if (errorMsg != null) {
×
1317
                    return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1318
                } else {
1319
                    return Response.ok(Json.createObjectBuilder()
×
1320
                            .add("status", ApiConstants.STATUS_OK)
×
1321
                            .add("status_details", successMsg)
×
1322
                            .add("data", json(ds)).build())
×
1323
                            .type(MediaType.APPLICATION_JSON)
×
1324
                            .build();
×
1325
                }
1326
            } else {
1327
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds,
×
1328
                        createDataverseRequest(user),
×
1329
                        isMinor));
1330
                return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset()));
×
1331
            }
1332
        } catch (WrappedResponse ex) {
×
1333
            return ex.getResponse();
×
1334
        }
1335
    }
1336

1337
    @POST
1338
    @AuthRequired
1339
    @Path("{id}/actions/:releasemigrated")
1340
    @Consumes("application/ld+json, application/json-ld")
1341
    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
1342
        try {
1343
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
1344
            if (!user.isSuperuser()) {
×
1345
                return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
×
1346
            }
1347

1348
            Dataset ds = findDatasetOrDie(id);
×
1349
            try {
1350
                JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody);
×
1351
                String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl());
×
1352
                logger.fine("Submitted date: " + pubDate);
×
1353
                LocalDateTime dateTime = null;
×
1354
                if(!StringUtils.isEmpty(pubDate)) {
×
1355
                    dateTime = JSONLDUtil.getDateTimeFrom(pubDate);
×
1356
                    final Timestamp time = Timestamp.valueOf(dateTime);
×
1357
                    //Set version release date
1358
                    ds.getLatestVersion().setReleaseTime(new Date(time.getTime()));
×
1359
                }
1360
                // dataset.getPublicationDateFormattedYYYYMMDD())
1361
                // Assign a version number if not set
1362
                if (ds.getLatestVersion().getVersionNumber() == null) {
×
1363

1364
                    if (ds.getVersions().size() == 1) {
×
1365
                        // First Release
1366
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(1));
×
1367
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1368
                    } else if (ds.getLatestVersion().isMinorUpdate()) {
×
1369
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber()));
×
1370
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1));
×
1371
                    } else {
1372
                        // major, non-first release
1373
                        ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1));
×
1374
                        ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0));
×
1375
                    }
1376
                }
1377
                if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) {
×
1378
                    //Also set publication date if this is the first
1379
                    if(dateTime != null) {
×
1380
                      ds.setPublicationDate(Timestamp.valueOf(dateTime));
×
1381
                    }
1382
                    // Release User is only set in FinalizeDatasetPublicationCommand if the pub date
1383
                    // is null, so set it here.
1384
                    ds.setReleaseUser((AuthenticatedUser) user);
×
1385
                }
1386
            } catch (Exception e) {
×
1387
                logger.fine(e.getMessage());
×
1388
                throw new BadRequestException("Unable to set publication date ("
×
1389
                        + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage());
×
1390
            }
×
1391
            /*
1392
             * Note: The code here mirrors that in the
1393
             * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
1394
             * to the core logic (i.e. beyond updating the messaging about results) should
1395
             * be applied to the code there as well.
1396
             */
1397
            String errorMsg = null;
×
1398
            Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset);
×
1399

1400
            try {
1401
                // ToDo - should this be in onSuccess()? May relate to todo above
1402
                if (prePubWf.isPresent()) {
×
1403
                    // Start the workflow, the workflow will call FinalizeDatasetPublication later
1404
                    wfService.start(prePubWf.get(),
×
1405
                            new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider),
×
1406
                            false);
1407
                } else {
1408
                    FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds,
×
1409
                            createDataverseRequest(user), !contactPIDProvider);
×
1410
                    ds = commandEngine.submit(cmd);
×
1411
                }
1412
            } catch (CommandException ex) {
×
1413
                errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
1414
                logger.severe(ex.getMessage());
×
1415
            }
×
1416

1417
            if (errorMsg != null) {
×
1418
                return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
×
1419
            } else {
1420
                return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds));
×
1421
            }
1422

1423
        } catch (WrappedResponse ex) {
×
1424
            return ex.getResponse();
×
1425
        }
1426
    }
1427

1428
    @POST
1429
    @AuthRequired
1430
    @Path("{id}/move/{targetDataverseAlias}")
1431
    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
1432
        try {
1433
            User u = getRequestUser(crc);
×
1434
            Dataset ds = findDatasetOrDie(id);
×
1435
            Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
×
1436
            if (target == null) {
×
1437
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound"));
×
1438
            }
1439
            //Command requires Super user - it will be tested by the command
1440
            execCommand(new MoveDatasetCommand(
×
1441
                    createDataverseRequest(u), ds, target, force
×
1442
            ));
1443
            return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success"));
×
1444
        } catch (WrappedResponse ex) {
×
1445
            if (ex.getCause() instanceof UnforcedCommandException) {
×
1446
                return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce"));
×
1447
            } else {
1448
                return ex.getResponse();
×
1449
            }
1450
        }
1451
    }
1452

1453
    @POST
1454
    @AuthRequired
1455
    @Path("{id}/files/actions/:set-embargo")
1456
    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1457

1458
        // user is authenticated
1459
        AuthenticatedUser authenticatedUser = null;
×
1460
        try {
1461
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1462
        } catch (WrappedResponse ex) {
×
1463
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1464
        }
×
1465

1466
        Dataset dataset;
1467
        try {
1468
            dataset = findDatasetOrDie(id);
×
1469
        } catch (WrappedResponse ex) {
×
1470
            return ex.getResponse();
×
1471
        }
×
1472
        
1473
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1474
        
1475
        if (!hasValidTerms){
×
1476
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1477
        }
1478

1479
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1480
        /*
1481
         * This is only a pre-test - if there's no draft version, there are clearly no
1482
         * files that a normal user can change. The converse is not true. A draft
1483
         * version could contain only files that have already been released. Further, we
1484
         * haven't checked the file list yet so the user could still be trying to change
1485
         * released files even if there are some unreleased/draft-only files. Doing this
1486
         * check here does avoid having to do further parsing for some error cases. It
1487
         * also checks the user can edit this dataset, so we don't have to make that
1488
         * check later.
1489
         */
1490

1491
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1492
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1493
        }
1494

1495
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1496
        long maxEmbargoDurationInMonths = 0;
×
1497
        try {
1498
            maxEmbargoDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1499
        } catch (NumberFormatException nfe){
×
1500
            if (nfe.getMessage().contains("null")) {
×
1501
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1502
            }
1503
        }
×
1504
        if (maxEmbargoDurationInMonths == 0){
×
1505
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1506
        }
1507

1508
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1509

1510
        Embargo embargo = new Embargo();
×
1511

1512

1513
        LocalDate currentDateTime = LocalDate.now();
×
1514
        LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable"));
×
1515

1516
        // check :MaxEmbargoDurationInMonths if -1
1517
        LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null;
×
1518
        // dateAvailable is not in the past
1519
        if (dateAvailable.isAfter(currentDateTime)){
×
1520
            embargo.setDateAvailable(dateAvailable);
×
1521
        } else {
1522
            return error(Status.BAD_REQUEST, "Date available can not be in the past");
×
1523
        }
1524

1525
        // dateAvailable is within limits
1526
        if (maxEmbargoDateTime != null){
×
1527
            if (dateAvailable.isAfter(maxEmbargoDateTime)){
×
1528
                return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths);
×
1529
            }
1530
        }
1531

1532
        embargo.setReason(json.getString("reason"));
×
1533

1534
        List<DataFile> datasetFiles = dataset.getFiles();
×
1535
        List<DataFile> filesToEmbargo = new LinkedList<>();
×
1536

1537
        // extract fileIds from json, find datafiles and add to list
1538
        if (json.containsKey("fileIds")){
×
1539
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1540
            for (JsonValue jsv : fileIds) {
×
1541
                try {
1542
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1543
                    filesToEmbargo.add(dataFile);
×
1544
                } catch (WrappedResponse ex) {
×
1545
                    return ex.getResponse();
×
1546
                }
×
1547
            }
×
1548
        }
1549

1550
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1551
        // check if files belong to dataset
1552
        if (datasetFiles.containsAll(filesToEmbargo)) {
×
1553
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1554
            boolean badFiles = false;
×
1555
            for (DataFile datafile : filesToEmbargo) {
×
1556
                // superuser can overrule an existing embargo, even on released files
1557
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1558
                    restrictedFiles.add(datafile.getId());
×
1559
                    badFiles = true;
×
1560
                }
1561
            }
×
1562
            if (badFiles) {
×
1563
                return Response.status(Status.FORBIDDEN)
×
1564
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1565
                                .add("message", "You do not have permission to embargo the following files")
×
1566
                                .add("files", restrictedFiles).build())
×
1567
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1568
            }
1569
            embargo=embargoService.merge(embargo);
×
1570
            // Good request, so add the embargo. Track any existing embargoes so we can
1571
            // delete them if there are no files left that reference them.
1572
            for (DataFile datafile : filesToEmbargo) {
×
1573
                Embargo emb = datafile.getEmbargo();
×
1574
                if (emb != null) {
×
1575
                    emb.getDataFiles().remove(datafile);
×
1576
                    if (emb.getDataFiles().isEmpty()) {
×
1577
                        orphanedEmbargoes.add(emb);
×
1578
                    }
1579
                }
1580
                // Save merges the datafile with an embargo into the context
1581
                datafile.setEmbargo(embargo);
×
1582
                fileService.save(datafile);
×
1583
            }
×
1584
            //Call service to get action logged
1585
            long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier());
×
1586
            if (orphanedEmbargoes.size() > 0) {
×
1587
                for (Embargo emb : orphanedEmbargoes) {
×
1588
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1589
                }
×
1590
            }
1591
            //If superuser, report changes to any released files
1592
            if (authenticatedUser.isSuperuser()) {
×
1593
                String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased())
×
1594
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1595
                if (!releasedFiles.isBlank()) {
×
1596
                    actionLogSvc
×
1597
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo")
×
1598
                                    .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) "
×
1599
                                            + releasedFiles + ".")
1600
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1601
                }
1602
            }
1603
            return ok(Json.createObjectBuilder().add("message", "Files were embargoed"));
×
1604
        } else {
1605
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1606
        }
1607
    }
1608

1609
    @POST
1610
    @AuthRequired
1611
    @Path("{id}/files/actions/:unset-embargo")
1612
    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1613

1614
        // user is authenticated
1615
        AuthenticatedUser authenticatedUser = null;
×
1616
        try {
1617
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1618
        } catch (WrappedResponse ex) {
×
1619
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1620
        }
×
1621

1622
        Dataset dataset;
1623
        try {
1624
            dataset = findDatasetOrDie(id);
×
1625
        } catch (WrappedResponse ex) {
×
1626
            return ex.getResponse();
×
1627
        }
×
1628

1629
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1630
        // check if files are unreleased(DRAFT?)
1631
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1632
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1633
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1634
        }
1635

1636
        // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400
1637
        //Todo - is 400 right for embargoes not enabled
1638
        //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)?
1639
        int maxEmbargoDurationInMonths = 0;
×
1640
        try {
1641
            maxEmbargoDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString()));
×
1642
        } catch (NumberFormatException nfe){
×
1643
            if (nfe.getMessage().contains("null")) {
×
1644
                return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1645
            }
1646
        }
×
1647
        if (maxEmbargoDurationInMonths == 0){
×
1648
            return error(Status.BAD_REQUEST, "No Embargoes allowed");
×
1649
        }
1650

1651
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
1652

1653
        List<DataFile> datasetFiles = dataset.getFiles();
×
1654
        List<DataFile> embargoFilesToUnset = new LinkedList<>();
×
1655

1656
        // extract fileIds from json, find datafiles and add to list
1657
        if (json.containsKey("fileIds")){
×
1658
            JsonArray fileIds = json.getJsonArray("fileIds");
×
1659
            for (JsonValue jsv : fileIds) {
×
1660
                try {
1661
                    DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1662
                    embargoFilesToUnset.add(dataFile);
×
1663
                } catch (WrappedResponse ex) {
×
1664
                    return ex.getResponse();
×
1665
                }
×
1666
            }
×
1667
        }
1668

1669
        List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>();
×
1670
        // check if files belong to dataset
1671
        if (datasetFiles.containsAll(embargoFilesToUnset)) {
×
1672
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1673
            boolean badFiles = false;
×
1674
            for (DataFile datafile : embargoFilesToUnset) {
×
1675
                // superuser can overrule an existing embargo, even on released files
1676
                if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) {
×
1677
                    restrictedFiles.add(datafile.getId());
×
1678
                    badFiles = true;
×
1679
                }
1680
            }
×
1681
            if (badFiles) {
×
1682
                return Response.status(Status.FORBIDDEN)
×
1683
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1684
                                .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
×
1685
                                .add("files", restrictedFiles).build())
×
1686
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1687
            }
1688
            // Good request, so remove the embargo from the files. Track any existing embargoes so we can
1689
            // delete them if there are no files left that reference them.
1690
            for (DataFile datafile : embargoFilesToUnset) {
×
1691
                Embargo emb = datafile.getEmbargo();
×
1692
                if (emb != null) {
×
1693
                    emb.getDataFiles().remove(datafile);
×
1694
                    if (emb.getDataFiles().isEmpty()) {
×
1695
                        orphanedEmbargoes.add(emb);
×
1696
                    }
1697
                }
1698
                // Save merges the datafile with an embargo into the context
1699
                datafile.setEmbargo(null);
×
1700
                fileService.save(datafile);
×
1701
            }
×
1702
            if (orphanedEmbargoes.size() > 0) {
×
1703
                for (Embargo emb : orphanedEmbargoes) {
×
1704
                    embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier());
×
1705
                }
×
1706
            }
1707
            String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
1708
            if(!releasedFiles.isBlank()) {
×
1709
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + ".");
×
1710
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
1711
                actionLogSvc.log(removeRecord);
×
1712
            }
1713
            return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files"));
×
1714
        } else {
1715
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1716
        }
1717
    }
1718

1719
    @POST
1720
    @AuthRequired
1721
    @Path("{id}/files/actions/:set-retention")
1722
    public Response createFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1723

1724
        // user is authenticated
1725
        AuthenticatedUser authenticatedUser = null;
×
1726
        try {
1727
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1728
        } catch (WrappedResponse ex) {
×
1729
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1730
        }
×
1731

1732
        Dataset dataset;
1733
        try {
1734
            dataset = findDatasetOrDie(id);
×
1735
        } catch (WrappedResponse ex) {
×
1736
            return ex.getResponse();
×
1737
        }
×
1738

1739
        boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dataset.getLatestVersion().getTermsOfUseAndAccess(), null);
×
1740

1741
        if (!hasValidTerms){
×
1742
            return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
×
1743
        }
1744

1745
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1746
        // check if files are unreleased(DRAFT?)
1747
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1748
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1749
        }
1750

1751
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1752
        long minRetentionDurationInMonths = 0;
×
1753
        try {
1754
            minRetentionDurationInMonths  = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1755
        } catch (NumberFormatException nfe){
×
1756
            if (nfe.getMessage().contains("null")) {
×
1757
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1758
            }
1759
        }
×
1760
        if (minRetentionDurationInMonths == 0){
×
1761
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1762
        }
1763

1764
        JsonObject json;
1765
        try {
1766
            json = JsonUtil.getJsonObject(jsonBody);
×
1767
        } catch (JsonException ex) {
×
1768
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1769
        }
×
1770

1771
        Retention retention = new Retention();
×
1772

1773

1774
        LocalDate currentDateTime = LocalDate.now();
×
1775

1776
        // Extract the dateUnavailable - check if specified and valid
1777
        String dateUnavailableStr = "";
×
1778
        LocalDate dateUnavailable;
1779
        try {
1780
            dateUnavailableStr = json.getString("dateUnavailable");
×
1781
            dateUnavailable = LocalDate.parse(dateUnavailableStr);
×
1782
        } catch (NullPointerException npex) {
×
1783
            return error(Status.BAD_REQUEST, "Invalid retention period; no dateUnavailable specified");
×
1784
        } catch (ClassCastException ccex) {
×
1785
            return error(Status.BAD_REQUEST, "Invalid retention period; dateUnavailable must be a string");
×
1786
        } catch (DateTimeParseException dtpex) {
×
1787
            return error(Status.BAD_REQUEST, "Invalid date format for dateUnavailable: " + dateUnavailableStr);
×
1788
        }
×
1789

1790
        // check :MinRetentionDurationInMonths if -1
1791
        LocalDate minRetentionDateTime = minRetentionDurationInMonths != -1 ? LocalDate.now().plusMonths(minRetentionDurationInMonths) : null;
×
1792
        // dateUnavailable is not in the past
1793
        if (dateUnavailable.isAfter(currentDateTime)){
×
1794
            retention.setDateUnavailable(dateUnavailable);
×
1795
        } else {
1796
            return error(Status.BAD_REQUEST, "Date unavailable can not be in the past");
×
1797
        }
1798

1799
        // dateAvailable is within limits
1800
        if (minRetentionDateTime != null){
×
1801
            if (dateUnavailable.isBefore(minRetentionDateTime)){
×
1802
                return error(Status.BAD_REQUEST, "Date unavailable can not be earlier than MinRetentionDurationInMonths: "+minRetentionDurationInMonths + " from now");
×
1803
            }
1804
        }
1805
        
1806
        try {
1807
            String reason = json.getString("reason");
×
1808
            retention.setReason(reason);
×
1809
        } catch (NullPointerException npex) {
×
1810
            // ignoring; no reason specified is OK, it is optional
1811
        } catch (ClassCastException ccex) {
×
1812
            return error(Status.BAD_REQUEST, "Invalid retention period; reason must be a string");
×
1813
        }
×
1814

1815

1816
        List<DataFile> datasetFiles = dataset.getFiles();
×
1817
        List<DataFile> filesToRetention = new LinkedList<>();
×
1818

1819
        // extract fileIds from json, find datafiles and add to list
1820
        if (json.containsKey("fileIds")){
×
1821
            try {
1822
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1823
                for (JsonValue jsv : fileIds) {
×
1824
                    try {
1825
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1826
                        filesToRetention.add(dataFile);
×
1827
                    } catch (WrappedResponse ex) {
×
1828
                        return ex.getResponse();
×
1829
                    }
×
1830
                }
×
1831
            } catch (ClassCastException ccex) {
×
1832
                return error(Status.BAD_REQUEST, "Invalid retention period; fileIds must be an array of id strings");
×
1833
            } catch (NullPointerException npex) {
×
1834
                return error(Status.BAD_REQUEST, "Invalid retention period; no fileIds specified");
×
1835
            }
×
1836
        } else {
1837
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1838
        }
1839

1840
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1841
        // check if files belong to dataset
1842
        if (datasetFiles.containsAll(filesToRetention)) {
×
1843
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1844
            boolean badFiles = false;
×
1845
            for (DataFile datafile : filesToRetention) {
×
1846
                // superuser can overrule an existing retention, even on released files
1847
                if (datafile.isReleased() && !authenticatedUser.isSuperuser()) {
×
1848
                    restrictedFiles.add(datafile.getId());
×
1849
                    badFiles = true;
×
1850
                }
1851
            }
×
1852
            if (badFiles) {
×
1853
                return Response.status(Status.FORBIDDEN)
×
1854
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1855
                                .add("message", "You do not have permission to set a retention period for the following files")
×
1856
                                .add("files", restrictedFiles).build())
×
1857
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1858
            }
1859
            retention=retentionService.merge(retention);
×
1860
            // Good request, so add the retention. Track any existing retentions so we can
1861
            // delete them if there are no files left that reference them.
1862
            for (DataFile datafile : filesToRetention) {
×
1863
                Retention ret = datafile.getRetention();
×
1864
                if (ret != null) {
×
1865
                    ret.getDataFiles().remove(datafile);
×
1866
                    if (ret.getDataFiles().isEmpty()) {
×
1867
                        orphanedRetentions.add(ret);
×
1868
                    }
1869
                }
1870
                // Save merges the datafile with an retention into the context
1871
                datafile.setRetention(retention);
×
1872
                fileService.save(datafile);
×
1873
            }
×
1874
            //Call service to get action logged
1875
            long retentionId = retentionService.save(retention, authenticatedUser.getIdentifier());
×
1876
            if (orphanedRetentions.size() > 0) {
×
1877
                for (Retention ret : orphanedRetentions) {
×
1878
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
1879
                }
×
1880
            }
1881
            //If superuser, report changes to any released files
1882
            if (authenticatedUser.isSuperuser()) {
×
1883
                String releasedFiles = filesToRetention.stream().filter(d -> d.isReleased())
×
1884
                        .map(d -> d.getId().toString()).collect(Collectors.joining(","));
×
1885
                if (!releasedFiles.isBlank()) {
×
1886
                    actionLogSvc
×
1887
                            .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionAddedTo")
×
1888
                                    .setInfo("Retention id: " + retention.getId() + " added for released file(s), id(s) "
×
1889
                                            + releasedFiles + ".")
1890
                                    .setUserIdentifier(authenticatedUser.getIdentifier()));
×
1891
                }
1892
            }
1893
            return ok(Json.createObjectBuilder().add("message", "File(s) retention period has been set or updated"));
×
1894
        } else {
1895
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
1896
        }
1897
    }
1898

1899
    @POST
1900
    @AuthRequired
1901
    @Path("{id}/files/actions/:unset-retention")
1902
    public Response removeFileRetention(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
1903

1904
        // user is authenticated
1905
        AuthenticatedUser authenticatedUser = null;
×
1906
        try {
1907
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
1908
        } catch (WrappedResponse ex) {
×
1909
            return error(Status.UNAUTHORIZED, "Authentication is required.");
×
1910
        }
×
1911

1912
        Dataset dataset;
1913
        try {
1914
            dataset = findDatasetOrDie(id);
×
1915
        } catch (WrappedResponse ex) {
×
1916
            return ex.getResponse();
×
1917
        }
×
1918

1919
        // client is superadmin or (client has EditDataset permission on these files and files are unreleased)
1920
        // check if files are unreleased(DRAFT?)
1921
        //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files)
1922
        if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) {
×
1923
            return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions");
×
1924
        }
1925

1926
        // check if retentions are allowed(:MinRetentionDurationInMonths), gets the :MinRetentionDurationInMonths setting variable, if 0 or not set(null) return 400
1927
        int minRetentionDurationInMonths = 0;
×
1928
        try {
1929
            minRetentionDurationInMonths  = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MinRetentionDurationInMonths.toString()));
×
1930
        } catch (NumberFormatException nfe){
×
1931
            if (nfe.getMessage().contains("null")) {
×
1932
                return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1933
            }
1934
        }
×
1935
        if (minRetentionDurationInMonths == 0){
×
1936
            return error(Status.BAD_REQUEST, "No Retention periods allowed");
×
1937
        }
1938

1939
        JsonObject json;
1940
        try {
1941
            json = JsonUtil.getJsonObject(jsonBody);
×
1942
        } catch (JsonException ex) {
×
1943
            return error(Status.BAD_REQUEST, "Invalid JSON; error message: " + ex.getMessage());
×
1944
        }
×
1945

1946
        List<DataFile> datasetFiles = dataset.getFiles();
×
1947
        List<DataFile> retentionFilesToUnset = new LinkedList<>();
×
1948

1949
        // extract fileIds from json, find datafiles and add to list
1950
        if (json.containsKey("fileIds")){
×
1951
            try {
1952
                JsonArray fileIds = json.getJsonArray("fileIds");
×
1953
                for (JsonValue jsv : fileIds) {
×
1954
                    try {
1955
                        DataFile dataFile = findDataFileOrDie(jsv.toString());
×
1956
                        retentionFilesToUnset.add(dataFile);
×
1957
                    } catch (WrappedResponse ex) {
×
1958
                        return ex.getResponse();
×
1959
                    }
×
1960
                }
×
1961
            } catch (ClassCastException ccex) {
×
1962
                return error(Status.BAD_REQUEST, "fileIds must be an array of id strings");
×
1963
            } catch (NullPointerException npex) {
×
1964
                return error(Status.BAD_REQUEST, "No fileIds specified");
×
1965
            }
×
1966
        } else {
1967
            return error(Status.BAD_REQUEST, "No fileIds specified");
×
1968
        }
1969

1970
        List<Retention> orphanedRetentions = new ArrayList<Retention>();
×
1971
        // check if files belong to dataset
1972
        if (datasetFiles.containsAll(retentionFilesToUnset)) {
×
1973
            JsonArrayBuilder restrictedFiles = Json.createArrayBuilder();
×
1974
            boolean badFiles = false;
×
1975
            for (DataFile datafile : retentionFilesToUnset) {
×
1976
                // superuser can overrule an existing retention, even on released files
1977
                if (datafile.getRetention()==null || ((datafile.isReleased() && datafile.getRetention() != null) && !authenticatedUser.isSuperuser())) {
×
1978
                    restrictedFiles.add(datafile.getId());
×
1979
                    badFiles = true;
×
1980
                }
1981
            }
×
1982
            if (badFiles) {
×
1983
                return Response.status(Status.FORBIDDEN)
×
1984
                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
×
1985
                                .add("message", "The following files do not have retention periods or you do not have permission to remove their retention periods")
×
1986
                                .add("files", restrictedFiles).build())
×
1987
                        .type(MediaType.APPLICATION_JSON_TYPE).build();
×
1988
            }
1989
            // Good request, so remove the retention from the files. Track any existing retentions so we can
1990
            // delete them if there are no files left that reference them.
1991
            for (DataFile datafile : retentionFilesToUnset) {
×
1992
                Retention ret = datafile.getRetention();
×
1993
                if (ret != null) {
×
1994
                    ret.getDataFiles().remove(datafile);
×
1995
                    if (ret.getDataFiles().isEmpty()) {
×
1996
                        orphanedRetentions.add(ret);
×
1997
                    }
1998
                }
1999
                // Save merges the datafile with an retention into the context
2000
                datafile.setRetention(null);
×
2001
                fileService.save(datafile);
×
2002
            }
×
2003
            if (orphanedRetentions.size() > 0) {
×
2004
                for (Retention ret : orphanedRetentions) {
×
2005
                    retentionService.delete(ret, authenticatedUser.getIdentifier());
×
2006
                }
×
2007
            }
2008
            String releasedFiles = retentionFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(","));
×
2009
            if(!releasedFiles.isBlank()) {
×
2010
                ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "retentionRemovedFrom").setInfo("Retention removed from released file(s), id(s) " + releasedFiles + ".");
×
2011
                removeRecord.setUserIdentifier(authenticatedUser.getIdentifier());
×
2012
                actionLogSvc.log(removeRecord);
×
2013
            }
2014
            return ok(Json.createObjectBuilder().add("message", "Retention periods were removed from file(s)"));
×
2015
        } else {
2016
            return error(BAD_REQUEST, "Not all files belong to dataset");
×
2017
        }
2018
    }
2019

2020
    @PUT
2021
    @AuthRequired
2022
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
2023
    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
2024
        try {
2025
            User u = getRequestUser(crc);
×
2026
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
2027
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
2028
            if (linked == null){
×
2029
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
2030
            }
2031
            if (linking == null) {
×
2032
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
2033
            }
2034
            execCommand(new LinkDatasetCommand(
×
2035
                    createDataverseRequest(u), linking, linked
×
2036
            ));
2037
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
2038
        } catch (WrappedResponse ex) {
×
2039
            return ex.getResponse();
×
2040
        }
2041
    }
2042

2043
    @GET
2044
    @Path("{id}/versions/{versionId}/customlicense")
2045
    public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versionId") String versionId,
2046
            @Context UriInfo uriInfo, @Context HttpHeaders headers) {
2047
        User user = session.getUser();
×
2048
        String persistentId;
2049
        try {
2050
            if (DatasetUtil.getLicense(getDatasetVersionOrDie(createDataverseRequest(user), versionId, findDatasetOrDie(id), uriInfo, headers)) != null) {
×
2051
                return error(Status.NOT_FOUND, "This Dataset has no custom license");
×
2052
            }
2053
            persistentId = getRequestParameter(":persistentId".substring(1));
×
2054
            if (versionId.equals(DS_VERSION_DRAFT)) {
×
2055
                versionId = "DRAFT";
×
2056
            }
2057
        } catch (WrappedResponse wrappedResponse) {
×
2058
            return wrappedResponse.getResponse();
×
2059
        }
×
2060
        return Response.seeOther(URI.create(systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId="
×
2061
                + persistentId + "&version=" + versionId + "&selectTab=termsTab")).build();
×
2062
    }
2063

2064

2065
    @GET
2066
    @AuthRequired
2067
    @Path("{id}/links")
2068
    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
2069
        try {
2070
            User u = getRequestUser(crc);
×
2071
            if (!u.isSuperuser()) {
×
2072
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
2073
            }
2074
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2075

2076
            long datasetId = dataset.getId();
×
2077
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
2078
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
2079
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
2080
                JsonObjectBuilder datasetBuilder = Json.createObjectBuilder();
×
2081
                datasetBuilder.add("id", dataverse.getId());
×
2082
                datasetBuilder.add("alias", dataverse.getAlias());
×
2083
                datasetBuilder.add("displayName", dataverse.getDisplayName());
×
2084
                dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build());
×
2085
            }
×
2086
            JsonObjectBuilder response = Json.createObjectBuilder();
×
2087
            response.add("id", datasetId);
×
2088
            response.add("identifier", dataset.getIdentifier());
×
2089
            response.add("linked-dataverses", dataversesThatLinkToThisDatasetIdBuilder);
×
2090
            return ok(response);
×
2091
        } catch (WrappedResponse wr) {
×
2092
            return wr.getResponse();
×
2093
        }
2094
    }
2095

2096
    /**
2097
     * Add a given assignment to a given user or group
2098
     * @param ra     role assignment DTO
2099
     * @param id     dataset id
2100
     * @param apiKey
2101
     */
2102
    @POST
2103
    @AuthRequired
2104
    @Path("{identifier}/assignments")
2105
    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
2106
        try {
2107
            Dataset dataset = findDatasetOrDie(id);
×
2108
            
2109
            RoleAssignee assignee = findAssignee(ra.getAssignee());
×
2110
            if (assignee == null) {
×
2111
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error"));
×
2112
            }
2113
            
2114
            DataverseRole theRole;
2115
            Dataverse dv = dataset.getOwner();
×
2116
            theRole = null;
×
2117
            while ((theRole == null) && (dv != null)) {
×
2118
                for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) {
×
2119
                    if (aRole.getAlias().equals(ra.getRole())) {
×
2120
                        theRole = aRole;
×
2121
                        break;
×
2122
                    }
2123
                }
×
2124
                dv = dv.getOwner();
×
2125
            }
2126
            if (theRole == null) {
×
2127
                List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName());
×
2128
                return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args));
×
2129
            }
2130

2131
            String privateUrlToken = null;
×
2132
            return ok(
×
2133
                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
×
2134
        } catch (WrappedResponse ex) {
×
2135
            List<String> args = Arrays.asList(ex.getMessage());
×
2136
            logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
×
2137
            return ex.getResponse();
×
2138
        }
2139

2140
    }
2141
    
2142
    @DELETE
2143
    @AuthRequired
2144
    @Path("{identifier}/assignments/{id}")
2145
    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
2146
        RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
×
2147
        if (ra != null) {
×
2148
            try {
2149
                findDatasetOrDie(dsId);
×
2150
                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
×
2151
                List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
×
2152
                return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
×
2153
            } catch (WrappedResponse ex) {
×
2154
                return ex.getResponse();
×
2155
            }
2156
        } else {
2157
            List<String> args = Arrays.asList(Long.toString(assignmentId));
×
2158
            return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args));
×
2159
        }
2160
    }
2161

2162
    @GET
2163
    @AuthRequired
2164
    @Path("{identifier}/assignments")
2165
    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2166
        return response(req ->
×
2167
                ok(execCommand(
×
2168
                        new ListRoleAssignments(req, findDatasetOrDie(id)))
×
2169
                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
×
2170
    }
2171

2172
    @GET
2173
    @AuthRequired
2174
    @Path("{id}/privateUrl")
2175
    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2176
        return response( req -> {
×
2177
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
2178
            return (privateUrl != null) ? ok(json(privateUrl))
×
2179
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
2180
        }, getRequestUser(crc));
×
2181
    }
2182

2183
    @POST
2184
    @AuthRequired
2185
    @Path("{id}/privateUrl")
2186
    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
2187
        if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
×
2188
            throw new NotAcceptableException("Anonymized Access not enabled");
×
2189
        }
2190
        return response(req ->
×
2191
                ok(json(execCommand(
×
2192
                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
×
2193
    }
2194

2195
    @DELETE
2196
    @AuthRequired
2197
    @Path("{id}/privateUrl")
2198
    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2199
        return response( req -> {
×
2200
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2201
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
2202
            if (privateUrl != null) {
×
2203
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
2204
                return ok("Private URL deleted.");
×
2205
            } else {
2206
                return notFound("No Private URL to delete.");
×
2207
            }
2208
        }, getRequestUser(crc));
×
2209
    }
2210

2211
    @GET
2212
    @AuthRequired
2213
    @Path("{id}/thumbnail/candidates")
2214
    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2215
        try {
2216
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2217
            boolean canUpdateThumbnail = false;
×
2218
            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
×
2219
            if (!canUpdateThumbnail) {
×
2220
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
2221
            }
2222
            JsonArrayBuilder data = Json.createArrayBuilder();
×
2223
            boolean considerDatasetLogoAsCandidate = true;
×
2224
            for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) {
×
2225
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
2226
                String base64image = datasetThumbnail.getBase64image();
×
2227
                if (base64image != null) {
×
2228
                    logger.fine("found a candidate!");
×
2229
                    candidate.add("base64image", base64image);
×
2230
                }
2231
                DataFile dataFile = datasetThumbnail.getDataFile();
×
2232
                if (dataFile != null) {
×
2233
                    candidate.add("dataFileId", dataFile.getId());
×
2234
                }
2235
                data.add(candidate);
×
2236
            }
×
2237
            return ok(data);
×
2238
        } catch (WrappedResponse ex) {
×
2239
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
2240
        }
2241
    }
2242

2243
    @GET
2244
    @Produces({"image/png"})
2245
    @Path("{id}/thumbnail")
2246
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
2247
        try {
2248
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2249
            InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
×
2250
            if(is == null) {
×
2251
                return notFound("Thumbnail not available");
×
2252
            }
2253
            return Response.ok(is).build();
×
2254
        } catch (WrappedResponse wr) {
×
2255
            return notFound("Thumbnail not available");
×
2256
        }
2257
    }
2258

2259
    @GET
2260
    @Produces({ "image/png" })
2261
    @Path("{id}/logo")
2262
    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
2263
        try {
2264
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2265
            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
×
2266
            if (is == null) {
×
2267
                return notFound("Logo not available");
×
2268
            }
2269
            return Response.ok(is).build();
×
2270
        } catch (WrappedResponse wr) {
×
2271
            return notFound("Logo not available");
×
2272
        }
2273
    }
2274

2275
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
2276
    @POST
2277
    @AuthRequired
2278
    @Path("{id}/thumbnail/{dataFileId}")
2279
    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
2280
        try {
2281
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
2282
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
2283
        } catch (WrappedResponse wr) {
×
2284
            return wr.getResponse();
×
2285
        }
2286
    }
2287

2288
    @POST
2289
    @AuthRequired
2290
    @Path("{id}/thumbnail")
2291
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2292
    @Produces("application/json")
2293
    @Operation(summary = "Uploads a logo for a dataset", 
2294
               description = "Uploads a logo for a dataset")
2295
    @APIResponse(responseCode = "200",
2296
               description = "Dataset logo uploaded successfully")
2297
    @Tag(name = "uploadDatasetLogo", 
2298
         description = "Uploads a logo for a dataset")
2299
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))          
2300
    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
2301
        try {
2302
            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
×
2303
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
2304
        } catch (WrappedResponse wr) {
×
2305
            return wr.getResponse();
×
2306
        }
2307
    }
2308

2309
    @DELETE
2310
    @AuthRequired
2311
    @Path("{id}/thumbnail")
2312
    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2313
        try {
2314
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
×
2315
            return ok("Dataset thumbnail removed.");
×
2316
        } catch (WrappedResponse wr) {
×
2317
            return wr.getResponse();
×
2318
        }
2319
    }
2320

2321
    @Deprecated(forRemoval = true, since = "2024-07-07")
2322
    @GET
2323
    @AuthRequired
2324
    @Path("{identifier}/dataCaptureModule/rsync")
2325
    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
2326
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
2327
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
2328
            return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
×
2329
        }
2330
        Dataset dataset = null;
×
2331
        try {
2332
            dataset = findDatasetOrDie(id);
×
2333
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
2334
            ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
2335
            
2336
            DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
2337
            if (lock == null) {
×
2338
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
2339
                return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")");
×
2340
            }
2341
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null);
×
2342
        } catch (WrappedResponse wr) {
×
2343
            return wr.getResponse();
×
2344
        } catch (EJBException ex) {
×
2345
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
2346
        }
2347
    }
2348
    
2349
    /**
2350
     * This api endpoint triggers the creation of a "package" file in a dataset
2351
     * after that package has been moved onto the same filesystem via the Data Capture Module.
2352
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
2353
     * The "package" can be downloaded over RSAL.
2354
     *
2355
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
2356
     *
2357
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
2358
     * But due to the possibly immense number of files (millions) the package approach was taken.
2359
     * This is relevant because the posix ("file") code contains many remnants of that development work.
2360
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
2361
     * -MAD 4.9.1
2362
     */
2363
    @POST
2364
    @AuthRequired
2365
    @Path("{identifier}/dataCaptureModule/checksumValidation")
2366
    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
2367
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
2368
        AuthenticatedUser authenticatedUser = null;
×
2369
        try {
2370
            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2371
        } catch (WrappedResponse ex) {
×
2372
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
2373
        }
×
2374
        if (!authenticatedUser.isSuperuser()) {
×
2375
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
2376
        }
2377
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
2378
        try {
2379
            Dataset dataset = findDatasetOrDie(id);
×
2380
            if ("validation passed".equals(statusMessageFromDcm)) {
×
2381
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
2382

2383
                String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId();
×
2384
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
2385
                int totalSize = jsonFromDcm.getInt("totalSize");
×
2386
                String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type");
×
2387
                
2388
                if (storageDriverType.equals("file")) {
×
2389
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
2390

2391
                    ImportMode importMode = ImportMode.MERGE;
×
2392
                    try {
2393
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
×
2394
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
2395
                        String message = jsonFromImportJobKickoff.getString("message");
×
2396
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2397
                        job.add("jobId", jobId);
×
2398
                        job.add("message", message);
×
2399
                        return ok(job);
×
2400
                    } catch (WrappedResponse wr) {
×
2401
                        String message = wr.getMessage();
×
2402
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
×
2403
                    }
2404
                } else if(storageDriverType.equals(DataAccess.S3)) {
×
2405
                    
2406
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
2407
                    try {
2408
                        
2409
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
2410
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
2411
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize));
×
2412
                        
2413
                        if (packageFile == null) {
×
2414
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
2415
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
2416
                        }
2417
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
2418
                        if (dcmLock == null) {
×
2419
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
2420
                        } else {
2421
                            datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
2422
                            dataset.removeLock(dcmLock);
×
2423
                        }
2424
                        
2425
                        // update version using the command engine to enforce user permissions and constraints
2426
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
2427
                            try {
2428
                                Command<Dataset> cmd;
2429
                                cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
×
2430
                                commandEngine.submit(cmd);
×
2431
                            } catch (CommandException ex) {
×
2432
                                return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
2433
                            }
×
2434
                        } else {
2435
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
2436
                                    + "single version in draft mode.";
2437
                            logger.log(Level.SEVERE, constraintError);
×
2438
                        }
2439

2440
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
2441
                        return ok(job);
×
2442
                        
2443
                    } catch (IOException e) {
×
2444
                        String message = e.getMessage();
×
2445
                        return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
×
2446
                    }
2447
                } else {
2448
                    return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm");
×
2449
                }
2450
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
2451
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
×
2452
                distinctAuthors.values().forEach((value) -> {
×
2453
                    userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2454
                });
×
2455
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
2456
                if (superUsers != null && !superUsers.isEmpty()) {
×
2457
                    superUsers.forEach((au) -> {
×
2458
                        userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId());
×
2459
                    });
×
2460
                }
2461
                return ok("User notified about checksum validation failure.");
×
2462
            } else {
2463
                return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm);
×
2464
            }
2465
        } catch (WrappedResponse ex) {
×
2466
            return ex.getResponse();
×
2467
        }
2468
    }
2469
    
2470

2471
    @POST
2472
    @AuthRequired
2473
    @Path("{id}/submitForReview")
2474
    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2475
        try {
2476
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
×
2477
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2478
            
2479
            boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
×
2480
            
2481
            result.add("inReview", inReview);
×
2482
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
2483
            return ok(result);
×
2484
        } catch (WrappedResponse wr) {
×
2485
            return wr.getResponse();
×
2486
        }
2487
    }
2488

2489
    @POST
2490
    @AuthRequired
2491
    @Path("{id}/returnToAuthor")
2492
    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
2493

2494
        if (jsonBody == null || jsonBody.isEmpty()) {
×
2495
            return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
×
2496
        }
2497
        JsonObject json = JsonUtil.getJsonObject(jsonBody);
×
2498
        try {
2499
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2500
            String reasonForReturn = null;
×
2501
            reasonForReturn = json.getString("reasonForReturn");
×
2502
            if ((reasonForReturn == null || reasonForReturn.isEmpty())
×
2503
                    && !FeatureFlags.DISABLE_RETURN_TO_AUTHOR_REASON.enabled()) {
×
2504
                return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"));
×
2505
            }
2506
            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
×
2507
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
×
2508

2509
            JsonObjectBuilder result = Json.createObjectBuilder();
×
2510
            result.add("inReview", false);
×
2511
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
2512
            return ok(result);
×
2513
        } catch (WrappedResponse wr) {
×
2514
            return wr.getResponse();
×
2515
        }
2516
    }
2517

2518
    @GET
2519
    @AuthRequired
2520
    @Path("{id}/curationStatus")
2521
    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2522
        try {
2523
            Dataset ds = findDatasetOrDie(idSupplied);
×
2524
            DatasetVersion dsv = ds.getLatestVersion();
×
2525
            User user = getRequestUser(crc);
×
2526
            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
×
2527
                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
×
2528
            } else {
2529
                return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
×
2530
            }
2531
        } catch (WrappedResponse wr) {
×
2532
            return wr.getResponse();
×
2533
        }
2534
    }
2535

2536
    @PUT
2537
    @AuthRequired
2538
    @Path("{id}/curationStatus")
2539
    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
2540
        Dataset ds = null;
×
2541
        User u = null;
×
2542
        try {
2543
            ds = findDatasetOrDie(idSupplied);
×
2544
            u = getRequestUser(crc);
×
2545
        } catch (WrappedResponse wr) {
×
2546
            return wr.getResponse();
×
2547
        }
×
2548
        try {
2549
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label));
×
2550
            return ok("Curation Status updated");
×
2551
        } catch (WrappedResponse wr) {
×
2552
            // Just change to Bad Request and send
2553
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2554
        }
2555
    }
2556

2557
    @DELETE
2558
    @AuthRequired
2559
    @Path("{id}/curationStatus")
2560
    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
2561
        Dataset ds = null;
×
2562
        User u = null;
×
2563
        try {
2564
            ds = findDatasetOrDie(idSupplied);
×
2565
            u = getRequestUser(crc);
×
2566
        } catch (WrappedResponse wr) {
×
2567
            return wr.getResponse();
×
2568
        }
×
2569
        try {
2570
            execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null));
×
2571
            return ok("Curation Status deleted");
×
2572
        } catch (WrappedResponse wr) {
×
2573
            //Just change to Bad Request and send
2574
            return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build();
×
2575
        }
2576
    }
2577

2578
    @GET
2579
    @AuthRequired
2580
    @Path("{id}/uploadurls")
2581
    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
2582
        try {
2583
            Dataset dataset = findDatasetOrDie(idSupplied);
×
2584

2585
            boolean canUpdateDataset = false;
×
2586
            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
×
2587
                    .canIssue(UpdateDatasetVersionCommand.class);
×
2588
            if (!canUpdateDataset) {
×
2589
                return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
×
2590
            }
2591
            S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset);
×
2592
            if (s3io == null) {
×
2593
                return error(Response.Status.NOT_FOUND,
×
2594
                        "Direct upload not supported for files in this dataset: " + dataset.getId());
×
2595
            }
2596
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
×
2597
            if (maxSize != null) {
×
2598
                if(fileSize > maxSize) {
×
2599
                    return error(Response.Status.BAD_REQUEST,
×
2600
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2601
                                    "The maximum allowed file size is " + maxSize + " bytes.");
2602
                }
2603
            }
2604
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
2605
            if (limit != null) {
×
2606
                if(fileSize > limit.getRemainingQuotaInBytes()) {
×
2607
                    return error(Response.Status.BAD_REQUEST,
×
2608
                            "The file you are trying to upload is too large to be uploaded to this dataset. " +
2609
                                    "The remaing file size quota is " + limit.getRemainingQuotaInBytes() + " bytes.");
×
2610
                }
2611
            }
2612
            JsonObjectBuilder response = null;
×
2613
            String storageIdentifier = null;
×
2614
            try {
2615
                storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation());
×
2616
                response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize);
×
2617

2618
            } catch (IOException io) {
×
2619
                logger.warning(io.getMessage());
×
2620
                throw new WrappedResponse(io,
×
2621
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request"));
×
2622
            }
×
2623

2624
            response.add("storageIdentifier", storageIdentifier);
×
2625
            return ok(response);
×
2626
        } catch (WrappedResponse wr) {
×
2627
            return wr.getResponse();
×
2628
        }
2629
    }
2630

2631
    @DELETE
2632
    @AuthRequired
2633
    @Path("mpupload")
2634
    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2635
        try {
2636
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2637
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2638
            User user = session.getUser();
×
2639
            if (!user.isAuthenticated()) {
×
2640
                try {
2641
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2642
                } catch (WrappedResponse ex) {
×
2643
                    logger.info(
×
2644
                            "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
2645
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2646
                    throw ex;
×
2647
                }
×
2648
            }
2649
            boolean allowed = false;
×
2650
            if (dataset != null) {
×
2651
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2652
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2653
            } else {
2654
                /*
2655
                 * The only legitimate case where a global id won't correspond to a dataset is
2656
                 * for uploads during creation. Given that this call will still fail unless all
2657
                 * three parameters correspond to an active multipart upload, it should be safe
2658
                 * to allow the attempt for an authenticated user. If there are concerns about
2659
                 * permissions, one could check with the current design that the user is allowed
2660
                 * to create datasets in some dataverse that is configured to use the storage
2661
                 * provider specified in the storageidentifier, but testing for the ability to
2662
                 * create a dataset in a specific dataverse would requiring changing the design
2663
                 * somehow (e.g. adding the ownerId to this call).
2664
                 */
2665
                allowed = true;
×
2666
            }
2667
            if (!allowed) {
×
2668
                return error(Response.Status.FORBIDDEN,
×
2669
                        "You are not permitted to abort file uploads with the supplied parameters.");
2670
            }
2671
            try {
2672
                S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2673
            } catch (IOException io) {
×
2674
                logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier="
×
2675
                        + storageidentifier + " dataset Id: " + dataset.getId());
×
2676
                logger.warning(io.getMessage());
×
2677
                throw new WrappedResponse(io,
×
2678
                        error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload"));
×
2679
            }
×
2680
            return Response.noContent().build();
×
2681
        } catch (WrappedResponse wr) {
×
2682
            return wr.getResponse();
×
2683
        }
2684
    }
2685

2686
    @PUT
2687
    @AuthRequired
2688
    @Path("mpupload")
2689
    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
2690
        try {
2691
            Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
×
2692
            //Allow the API to be used within a session (e.g. for direct upload in the UI)
2693
            User user = session.getUser();
×
2694
            if (!user.isAuthenticated()) {
×
2695
                try {
2696
                    user = getRequestAuthenticatedUserOrDie(crc);
×
2697
                } catch (WrappedResponse ex) {
×
2698
                    logger.info(
×
2699
                            "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
2700
                                    + dataset.getId() + ": " + ex.getLocalizedMessage());
×
2701
                    throw ex;
×
2702
                }
×
2703
            }
2704
            boolean allowed = false;
×
2705
            if (dataset != null) {
×
2706
                allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset)
×
2707
                        .canIssue(UpdateDatasetVersionCommand.class);
×
2708
            } else {
2709
                /*
2710
                 * The only legitimate case where a global id won't correspond to a dataset is
2711
                 * for uploads during creation. Given that this call will still fail unless all
2712
                 * three parameters correspond to an active multipart upload, it should be safe
2713
                 * to allow the attempt for an authenticated user. If there are concerns about
2714
                 * permissions, one could check with the current design that the user is allowed
2715
                 * to create datasets in some dataverse that is configured to use the storage
2716
                 * provider specified in the storageidentifier, but testing for the ability to
2717
                 * create a dataset in a specific dataverse would requiring changing the design
2718
                 * somehow (e.g. adding the ownerId to this call).
2719
                 */
2720
                allowed = true;
×
2721
            }
2722
            if (!allowed) {
×
2723
                return error(Response.Status.FORBIDDEN,
×
2724
                        "You are not permitted to complete file uploads with the supplied parameters.");
2725
            }
2726
            List<PartETag> eTagList = new ArrayList<PartETag>();
×
2727
            logger.info("Etags: " + partETagBody);
×
2728
            try {
2729
                JsonObject object = JsonUtil.getJsonObject(partETagBody);
×
2730
                for (String partNo : object.keySet()) {
×
2731
                    eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
×
2732
                }
×
2733
                for (PartETag et : eTagList) {
×
2734
                    logger.info("Part: " + et.getPartNumber() + " : " + et.getETag());
×
2735
                }
×
2736
            } catch (JsonException je) {
×
2737
                logger.info("Unable to parse eTags from: " + partETagBody);
×
2738
                throw new WrappedResponse(je, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2739
            }
×
2740
            try {
2741
                S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList);
×
2742
            } catch (IOException io) {
×
2743
                logger.warning("Multipart upload completion failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2744
                logger.warning(io.getMessage());
×
2745
                try {
2746
                    S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId);
×
2747
                } catch (IOException e) {
×
2748
                    logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " globalId: " + idSupplied);
×
2749
                    logger.severe(io.getMessage());
×
2750
                }
×
2751

2752
                throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload"));
×
2753
            }
×
2754
            return ok("Multipart Upload completed");
×
2755
        } catch (WrappedResponse wr) {
×
2756
            return wr.getResponse();
×
2757
        }
2758
    }
2759

2760
    /**
2761
     * Add a File to an existing Dataset
2762
     *
2763
     * @param idSupplied
2764
     * @param jsonData
2765
     * @param fileInputStream
2766
     * @param contentDispositionHeader
2767
     * @param formDataBodyPart
2768
     * @return
2769
     */
2770
    @POST
2771
    @AuthRequired
2772
    @Path("{id}/add")
2773
    @Consumes(MediaType.MULTIPART_FORM_DATA)
2774
    @Produces("application/json")
2775
    @Operation(summary = "Uploads a file for a dataset", 
2776
               description = "Uploads a file for a dataset")
2777
    @APIResponse(responseCode = "200",
2778
               description = "File uploaded successfully to dataset")
2779
    @Tag(name = "addFileToDataset", 
2780
         description = "Uploads a file for a dataset")
2781
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
2782
    public Response addFileToDataset(@Context ContainerRequestContext crc,
2783
                    @PathParam("id") String idSupplied,
2784
                    @FormDataParam("jsonData") String jsonData,
2785
                    @FormDataParam("file") InputStream fileInputStream,
2786
                    @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
2787
                    @FormDataParam("file") final FormDataBodyPart formDataBodyPart
2788
                    ){
2789

2790
        if (!systemConfig.isHTTPUpload()) {
×
2791
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
2792
        }
2793

2794
        // -------------------------------------
2795
        // (1) Get the user from the ContainerRequestContext
2796
        // -------------------------------------
2797
        User authUser;
2798
        authUser = getRequestUser(crc);
×
2799

2800
        // -------------------------------------
2801
        // (2) Get the Dataset Id
2802
        //  
2803
        // -------------------------------------
2804
        Dataset dataset;
2805
        
2806
        try {
2807
            dataset = findDatasetOrDie(idSupplied);
×
2808
        } catch (WrappedResponse wr) {
×
2809
            return wr.getResponse();
×
2810
        }
×
2811
        
2812
        //------------------------------------
2813
        // (2a) Make sure dataset does not have package file
2814
        //
2815
        // --------------------------------------
2816
        
2817
        for (DatasetVersion dv : dataset.getVersions()) {
×
2818
            if (dv.isHasPackageFile()) {
×
2819
                return error(Response.Status.FORBIDDEN,
×
2820
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
2821
                );
2822
            }
2823
        }
×
2824

2825
        // (2a) Load up optional params via JSON
2826
        //---------------------------------------
2827
        OptionalFileParams optionalFileParams = null;
×
2828
        msgt("(api) jsonData: " + jsonData);
×
2829

2830
        try {
2831
            optionalFileParams = new OptionalFileParams(jsonData);
×
2832
        } catch (DataFileTagException ex) {
×
2833
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
2834
        }
2835
        catch (ClassCastException | com.google.gson.JsonParseException ex) {
×
2836
            return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing"));
×
2837
        }
×
2838
        
2839
        // -------------------------------------
2840
        // (3) Get the file name and content type
2841
        // -------------------------------------
2842
        String newFilename = null;
×
2843
        String newFileContentType = null;
×
2844
        String newStorageIdentifier = null;
×
2845
        if (null == contentDispositionHeader) {
×
2846
            if (optionalFileParams.hasStorageIdentifier()) {
×
2847
                newStorageIdentifier = optionalFileParams.getStorageIdentifier();
×
2848
                newStorageIdentifier = DataAccess.expandStorageIdentifierIfNeeded(newStorageIdentifier);
×
2849
                
2850
                if(!DataAccess.uploadToDatasetAllowed(dataset,  newStorageIdentifier)) {
×
2851
                    return error(BAD_REQUEST,
×
2852
                            "Dataset store configuration does not allow provided storageIdentifier.");
2853
                }
2854
                if (optionalFileParams.hasFileName()) {
×
2855
                    newFilename = optionalFileParams.getFileName();
×
2856
                    if (optionalFileParams.hasMimetype()) {
×
2857
                        newFileContentType = optionalFileParams.getMimeType();
×
2858
                    }
2859
                }
2860
            } else {
2861
                return error(BAD_REQUEST,
×
2862
                        "You must upload a file or provide a valid storageidentifier, filename, and mimetype.");
2863
            }
2864
        } else {
2865
            newFilename = contentDispositionHeader.getFileName();
×
2866
            // Let's see if the form data part has the mime (content) type specified.
2867
            // Note that we don't want to rely on formDataBodyPart.getMediaType() -
2868
            // because that defaults to "text/plain" when no "Content-Type:" header is
2869
            // present. Instead we'll go through the headers, and see if "Content-Type:"
2870
            // is there. If not, we'll default to "application/octet-stream" - the generic
2871
            // unknown type. This will prompt the application to run type detection and
2872
            // potentially find something more accurate.
2873
            // newFileContentType = formDataBodyPart.getMediaType().toString();
2874

2875
            for (String header : formDataBodyPart.getHeaders().keySet()) {
×
2876
                if (header.equalsIgnoreCase("Content-Type")) {
×
2877
                    newFileContentType = formDataBodyPart.getHeaders().get(header).get(0);
×
2878
                }
2879
            }
×
2880
            if (newFileContentType == null) {
×
2881
                newFileContentType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
×
2882
            }
2883
        }
2884

2885

2886
        //-------------------
2887
        // (3) Create the AddReplaceFileHelper object
2888
        //-------------------
2889
        msg("ADD!");
×
2890

2891
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
2892
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
×
2893
                ingestService,
2894
                datasetService,
2895
                fileService,
2896
                permissionSvc,
2897
                commandEngine,
2898
                systemConfig);
2899

2900

2901
        //-------------------
2902
        // (4) Run "runAddFileByDatasetId"
2903
        //-------------------
2904
        addFileHelper.runAddFileByDataset(dataset,
×
2905
                newFilename,
2906
                newFileContentType,
2907
                newStorageIdentifier,
2908
                fileInputStream,
2909
                optionalFileParams);
2910

2911

2912
        if (addFileHelper.hasError()){
×
2913
            //conflict response status added for 8859
2914
            if (Response.Status.CONFLICT.equals(addFileHelper.getHttpErrorCode())){
×
2915
                return conflict(addFileHelper.getErrorMessagesAsString("\n"));
×
2916
            }
2917
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
2918
        } else {
2919
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
2920
            try {
2921
                //msgt("as String: " + addFileHelper.getSuccessResult());
2922
                /**
2923
                 * @todo We need a consistent, sane way to communicate a human
2924
                 * readable message to an API client suitable for human
2925
                 * consumption. Imagine if the UI were built in Angular or React
2926
                 * and we want to return a message from the API as-is to the
2927
                 * user. Human readable.
2928
                 */
2929
                logger.fine("successMsg: " + successMsg);
×
2930
                String duplicateWarning = addFileHelper.getDuplicateFileWarning();
×
2931
                if (duplicateWarning != null && !duplicateWarning.isEmpty()) {
×
2932
                    return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2933
                } else {
2934
                    return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
×
2935
                }
2936

2937
                //"Look at that!  You added a file! (hey hey, it may have worked)");
2938
            } catch (NoFilesException ex) {
×
2939
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
2940
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
2941

2942
            }
2943
        }
2944
        
2945
    } // end: addFileToDataset
2946

2947

2948
    /**
2949
     * Clean storage of a Dataset
2950
     *
2951
     * @param idSupplied
2952
     * @return
2953
     */
2954
    @GET
2955
    @AuthRequired
2956
    @Path("{id}/cleanStorage")
2957
    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
2958
        // get user and dataset
2959
        User authUser = getRequestUser(crc);
×
2960

2961
        Dataset dataset;
2962
        try {
2963
            dataset = findDatasetOrDie(idSupplied);
×
2964
        } catch (WrappedResponse wr) {
×
2965
            return wr.getResponse();
×
2966
        }
×
2967
        
2968
        // check permissions
2969
        if (!permissionSvc.permissionsFor(createDataverseRequest(authUser), dataset).contains(Permission.EditDataset)) {
×
2970
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Access denied!");
×
2971
        }
2972

2973
        boolean doDryRun = dryrun != null && dryrun.booleanValue();
×
2974

2975
        // check if no legacy files are present
2976
        Set<String> datasetFilenames = getDatasetFilenames(dataset);
×
2977
        if (datasetFilenames.stream().anyMatch(x -> !dataFilePattern.matcher(x).matches())) {
×
2978
            logger.log(Level.WARNING, "Dataset contains legacy files not matching the naming pattern!");
×
2979
        }
2980

2981
        Predicate<String> filter = getToDeleteFilesFilter(datasetFilenames);
×
2982
        List<String> deleted;
2983
        try {
2984
            StorageIO<DvObject> datasetIO = DataAccess.getStorageIO(dataset);
×
2985
            deleted = datasetIO.cleanUp(filter, doDryRun);
×
2986
        } catch (IOException ex) {
×
2987
            logger.log(Level.SEVERE, null, ex);
×
2988
            return error(Response.Status.INTERNAL_SERVER_ERROR, "IOException! Serious Error! See administrator!");
×
2989
        }
×
2990

2991
        return ok("Found: " + datasetFilenames.stream().collect(Collectors.joining(", ")) + "\n" + "Deleted: " + deleted.stream().collect(Collectors.joining(", ")));
×
2992
        
2993
    }
2994

2995
    private static Set<String> getDatasetFilenames(Dataset dataset) {
2996
        Set<String> files = new HashSet<>();
×
2997
        for (DataFile dataFile: dataset.getFiles()) {
×
2998
            String storageIdentifier = dataFile.getStorageIdentifier();
×
2999
            String location = storageIdentifier.substring(storageIdentifier.indexOf("://") + 3);
×
3000
            String[] locationParts = location.split(":");//separate bucket, swift container, etc. from fileName
×
3001
            files.add(locationParts[locationParts.length-1]);
×
3002
        }
×
3003
        return files;
×
3004
    }
3005

3006
    public static Predicate<String> getToDeleteFilesFilter(Set<String> datasetFilenames) {
3007
        return f -> {
1✔
3008
            return dataFilePattern.matcher(f).matches() && datasetFilenames.stream().noneMatch(x -> f.startsWith(x));
1✔
3009
        };
3010
    }
3011

3012
    private void msg(String m) {
3013
        //System.out.println(m);
3014
        logger.fine(m);
×
3015
    }
×
3016

3017
    private void dashes() {
3018
        msg("----------------");
×
3019
    }
×
3020

3021
    private void msgt(String m) {
3022
        dashes();
×
3023
        msg(m);
×
3024
        dashes();
×
3025
    }
×
3026

3027

3028
    public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
3029
            throws WrappedResponse {
3030
        switch (versionId) {
×
3031
            case DS_VERSION_LATEST:
3032
                return hdl.handleLatest();
×
3033
            case DS_VERSION_DRAFT:
3034
                return hdl.handleDraft();
×
3035
            case DS_VERSION_LATEST_PUBLISHED:
3036
                return hdl.handleLatestPublished();
×
3037
            default:
3038
                try {
3039
                    String[] versions = versionId.split("\\.");
×
3040
                    switch (versions.length) {
×
3041
                        case 1:
3042
                            return hdl.handleSpecific(Long.parseLong(versions[0]), (long) 0.0);
×
3043
                        case 2:
3044
                            return hdl.handleSpecific(Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
3045
                        default:
3046
                            throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3047
                    }
3048
                } catch (NumberFormatException nfe) {
×
3049
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
3050
                }
3051
        }
3052
    }
3053

3054
    /*
3055
     * includeDeaccessioned default to false and checkPermsWhenDeaccessioned to false. Use it only when you are sure that the you don't need to work with
3056
     * a deaccessioned dataset.
3057
     */
3058
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, 
3059
                                                  String versionNumber, 
3060
                                                  final Dataset ds,
3061
                                                  UriInfo uriInfo, 
3062
                                                  HttpHeaders headers) throws WrappedResponse {
3063
        //The checkPerms was added to check the permissions ONLY when the dataset is deaccessioned.
3064
        boolean checkFilePerms = false;
×
3065
        boolean includeDeaccessioned = false;
×
3066
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkFilePerms);
×
3067
    }
3068
    
3069
    /*
3070
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3071
     */
3072
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3073
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
3074
        boolean checkPermsWhenDeaccessioned = true;
×
3075
        boolean bypassAccessCheck = false;
×
3076
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3077
    }
3078

3079
    /*
3080
     * checkPermsWhenDeaccessioned default to true. Be aware that the version will be only be obtainable if the user has edit permissions.
3081
     */
3082
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3083
                                                  UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned) throws WrappedResponse {
3084
        boolean bypassAccessCheck = false;
×
3085
        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, includeDeaccessioned, checkPermsWhenDeaccessioned, bypassAccessCheck);
×
3086
    }
3087

3088
    /*
3089
     * Will allow to define when the permissions should be checked when a deaccesioned dataset is requested. If the user doesn't have edit permissions will result in an error.
3090
     */
3091
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds,
3092
            UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned, boolean checkPermsWhenDeaccessioned,
3093
            boolean bypassAccessCheck)
3094
            throws WrappedResponse {
3095

3096
        DatasetVersion dsv = findDatasetVersionOrDie(req, versionNumber, ds, includeDeaccessioned, checkPermsWhenDeaccessioned);
×
3097

3098
        if (dsv == null || dsv.getId() == null) {
×
3099
            throw new WrappedResponse(
×
3100
                    notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found"));
×
3101
        }
3102
        if (dsv.isReleased()&& uriInfo!=null) {
×
3103
            MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds);
×
3104
            mdcLogService.logEntry(entry);
×
3105
        }
3106
        return dsv;
×
3107
    }
3108
 
3109
    @GET
3110
    @Path("{identifier}/locks")
3111
    public Response getLocksForDataset(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3112

3113
        Dataset dataset = null;
×
3114
        try {
3115
            dataset = findDatasetOrDie(id);
×
3116
            Set<DatasetLock> locks;
3117
            if (lockType == null) {
×
3118
                locks = dataset.getLocks();
×
3119
            } else {
3120
                // request for a specific type lock:
3121
                DatasetLock lock = dataset.getLockFor(lockType);
×
3122

3123
                locks = new HashSet<>();
×
3124
                if (lock != null) {
×
3125
                    locks.add(lock);
×
3126
                }
3127
            }
3128
            
3129
            return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3130

3131
        } catch (WrappedResponse wr) {
×
3132
            return wr.getResponse();
×
3133
        }
3134
    }
3135

3136
    @DELETE
3137
    @AuthRequired
3138
    @Path("{identifier}/locks")
3139
    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
3140

3141
        return response(req -> {
×
3142
            try {
3143
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3144
                if (!user.isSuperuser()) {
×
3145
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3146
                }
3147
                Dataset dataset = findDatasetOrDie(id);
×
3148
                
3149
                if (lockType == null) {
×
3150
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
3151
                    for (DatasetLock lock : dataset.getLocks()) {
×
3152
                        locks.add(lock.getReason());
×
3153
                    }
×
3154
                    if (!locks.isEmpty()) {
×
3155
                        for (DatasetLock.Reason locktype : locks) {
×
3156
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
3157
                            // refresh the dataset:
3158
                            dataset = findDatasetOrDie(id);
×
3159
                        }
×
3160
                        // kick of dataset reindexing, in case the locks removed 
3161
                        // affected the search card:
3162
                        indexService.asyncIndexDataset(dataset, true);
×
3163
                        return ok("locks removed");
×
3164
                    }
3165
                    return ok("dataset not locked");
×
3166
                }
3167
                // request for a specific type lock:
3168
                DatasetLock lock = dataset.getLockFor(lockType);
×
3169
                if (lock != null) {
×
3170
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
3171
                    // refresh the dataset:
3172
                    dataset = findDatasetOrDie(id);
×
3173
                    // ... and kick of dataset reindexing, in case the lock removed 
3174
                    // affected the search card:
3175
                    indexService.asyncIndexDataset(dataset, true);
×
3176
                    return ok("lock type " + lock.getReason() + " removed");
×
3177
                }
3178
                return ok("no lock type " + lockType + " on the dataset");
×
3179
            } catch (WrappedResponse wr) {
×
3180
                return wr.getResponse();
×
3181
            }
3182

3183
        }, getRequestUser(crc));
×
3184

3185
    }
3186
    
3187
    @POST
3188
    @AuthRequired
3189
    @Path("{identifier}/lock/{type}")
3190
    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
3191
        return response(req -> {
×
3192
            try {
3193
                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3194
                if (!user.isSuperuser()) {
×
3195
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
3196
                }
3197
                Dataset dataset = findDatasetOrDie(id);
×
3198
                DatasetLock lock = dataset.getLockFor(lockType);
×
3199
                if (lock != null) {
×
3200
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
3201
                }
3202
                lock = new DatasetLock(lockType, user);
×
3203
                execCommand(new AddLockCommand(req, dataset, lock));
×
3204
                // refresh the dataset:
3205
                dataset = findDatasetOrDie(id);
×
3206
                // ... and kick of dataset reindexing:
3207
                indexService.asyncIndexDataset(dataset, true);
×
3208

3209
                return ok("dataset locked with lock type " + lockType);
×
3210
            } catch (WrappedResponse wr) {
×
3211
                return wr.getResponse();
×
3212
            }
3213

3214
        }, getRequestUser(crc));
×
3215
    }
3216
    
3217
    @GET
3218
    @AuthRequired
3219
    @Path("locks")
3220
    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
3221
        // This API is here, under /datasets, and not under /admin, because we
3222
        // likely want it to be accessible to admin users who may not necessarily 
3223
        // have localhost access, that would be required to get to /api/admin in 
3224
        // most installations. It is still reasonable however to limit access to
3225
        // this api to admin users only.
3226
        AuthenticatedUser apiUser;
3227
        try {
3228
            apiUser = getRequestAuthenticatedUserOrDie(crc);
×
3229
        } catch (WrappedResponse ex) {
×
3230
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3231
        }
×
3232
        if (!apiUser.isSuperuser()) {
×
3233
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3234
        }
3235
        
3236
        // Locks can be optinally filtered by type, user or both.
3237
        DatasetLock.Reason lockTypeValue = null;
×
3238
        AuthenticatedUser user = null; 
×
3239
        
3240
        // For the lock type, we use a QueryParam of type String, instead of 
3241
        // DatasetLock.Reason; that would be less code to write, but this way 
3242
        // we can check if the value passed matches a valid lock type ("reason") 
3243
        // and provide a helpful error message if it doesn't. If you use a 
3244
        // QueryParam of an Enum type, trying to pass an invalid value to it 
3245
        // results in a potentially confusing "404/NOT FOUND - requested 
3246
        // resource is not available".
3247
        if (lockType != null && !lockType.isEmpty()) {
×
3248
            try {
3249
                lockTypeValue = DatasetLock.Reason.valueOf(lockType);
×
3250
            } catch (IllegalArgumentException iax) {
×
3251
                StringJoiner reasonJoiner = new StringJoiner(", ");
×
3252
                for (Reason r: Reason.values()) {
×
3253
                    reasonJoiner.add(r.name());
×
3254
                };
3255
                String errorMessage = "Invalid lock type value: " + lockType + 
×
3256
                        "; valid lock types: " + reasonJoiner.toString();
×
3257
                return error(Response.Status.BAD_REQUEST, errorMessage);
×
3258
            }
×
3259
        }
3260
        
3261
        if (userIdentifier != null && !userIdentifier.isEmpty()) {
×
3262
            user = authSvc.getAuthenticatedUser(userIdentifier);
×
3263
            if (user == null) {
×
3264
                return error(Response.Status.BAD_REQUEST, "Unknown user identifier: "+userIdentifier);
×
3265
            }
3266
        }
3267
        
3268
        //List<DatasetLock> locks = datasetService.getDatasetLocksByType(lockType);
3269
        List<DatasetLock> locks = datasetService.listLocks(lockTypeValue, user);
×
3270
                            
3271
        return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray()));
×
3272
    }   
3273
    
3274
    
3275
    @GET
3276
    @Path("{id}/makeDataCount/citations")
3277
    public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) {
3278
        
3279
        try {
3280
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3281
            JsonArrayBuilder datasetsCitations = Json.createArrayBuilder();
×
3282
            List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset);
×
3283
            for (DatasetExternalCitations citation : externalCitations) {
×
3284
                JsonObjectBuilder candidateObj = Json.createObjectBuilder();
×
3285
                /**
3286
                 * In the future we can imagine storing and presenting more
3287
                 * information about the citation such as the title of the paper
3288
                 * and the names of the authors. For now, we'll at least give
3289
                 * the URL of the citation so people can click and find out more
3290
                 * about the citation.
3291
                 */
3292
                candidateObj.add("citationUrl", citation.getCitedByUrl());
×
3293
                datasetsCitations.add(candidateObj);
×
3294
            }
×
3295
            return ok(datasetsCitations);
×
3296

3297
        } catch (WrappedResponse wr) {
×
3298
            return wr.getResponse();
×
3299
        }
3300

3301
    }
3302

3303
    @GET
3304
    @Path("{id}/makeDataCount/{metric}")
3305
    public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) {
3306
        String nullCurrentMonth = null;
×
3307
        return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
×
3308
    }
3309

3310
    @GET
3311
    @AuthRequired
3312
    @Path("{identifier}/storagesize")
3313
    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
3314
        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
×
3315
                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
×
3316
    }
3317

3318
    @GET
3319
    @AuthRequired
3320
    @Path("{identifier}/versions/{versionId}/downloadsize")
3321
    public Response getDownloadSize(@Context ContainerRequestContext crc,
3322
                                    @PathParam("identifier") String dvIdtf,
3323
                                    @PathParam("versionId") String version,
3324
                                    @QueryParam("contentType") String contentType,
3325
                                    @QueryParam("accessStatus") String accessStatus,
3326
                                    @QueryParam("categoryName") String categoryName,
3327
                                    @QueryParam("tabularTagName") String tabularTagName,
3328
                                    @QueryParam("searchText") String searchText,
3329
                                    @QueryParam("mode") String mode,
3330
                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
3331
                                    @Context UriInfo uriInfo,
3332
                                    @Context HttpHeaders headers) {
3333

3334
        return response(req -> {
×
3335
            FileSearchCriteria fileSearchCriteria;
3336
            try {
3337
                fileSearchCriteria = new FileSearchCriteria(
×
3338
                        contentType,
3339
                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
×
3340
                        categoryName,
3341
                        tabularTagName,
3342
                        searchText
3343
                );
3344
            } catch (IllegalArgumentException e) {
×
3345
                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
×
3346
            }
×
3347
            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
3348
            try {
3349
                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
×
3350
            } catch (IllegalArgumentException e) {
×
3351
                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
×
3352
            }
×
3353
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
×
3354
            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
×
3355
            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
×
3356
            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
3357
            jsonObjectBuilder.add("message", message);
×
3358
            jsonObjectBuilder.add("storageSize", datasetStorageSize);
×
3359
            return ok(jsonObjectBuilder);
×
3360
        }, getRequestUser(crc));
×
3361
    }
3362

3363
    @GET
3364
    @Path("{id}/makeDataCount/{metric}/{yyyymm}")
3365
    public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) {
3366
        try {
3367
            Dataset dataset = findDatasetOrDie(idSupplied);
×
3368
            NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder();
×
3369
            MakeDataCountUtil.MetricType metricType = null;
×
3370
            try {
3371
                metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied);
×
3372
            } catch (IllegalArgumentException ex) {
×
3373
                return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
3374
            }
×
3375
            String monthYear = null;
×
3376
            if (yyyymm != null) {
×
3377
                // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column.
3378
                // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is.
3379
                monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01";
×
3380
            }
3381
            if (country != null) {
×
3382
                country = country.toLowerCase();
×
3383
                if (!MakeDataCountUtil.isValidCountryCode(country)) {
×
3384
                    return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes");
×
3385
                }
3386
            }
3387
            DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country);
×
3388
            if (datasetMetrics == null) {
×
3389
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3390
            } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) {
×
3391
                return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + ".");
×
3392
            }
3393
            Long viewsTotalRegular = null;
×
3394
            Long viewsUniqueRegular = null;
×
3395
            Long downloadsTotalRegular = null;
×
3396
            Long downloadsUniqueRegular = null;
×
3397
            Long viewsTotalMachine = null;
×
3398
            Long viewsUniqueMachine = null;
×
3399
            Long downloadsTotalMachine = null;
×
3400
            Long downloadsUniqueMachine = null;
×
3401
            Long viewsTotal = null;
×
3402
            Long viewsUnique = null;
×
3403
            Long downloadsTotal = null;
×
3404
            Long downloadsUnique = null;
×
3405
            switch (metricSupplied) {
×
3406
                case "viewsTotal":
3407
                    viewsTotal = datasetMetrics.getViewsTotal();
×
3408
                    break;
×
3409
                case "viewsTotalRegular":
3410
                    viewsTotalRegular = datasetMetrics.getViewsTotalRegular();
×
3411
                    break;
×
3412
                case "viewsTotalMachine":
3413
                    viewsTotalMachine = datasetMetrics.getViewsTotalMachine();
×
3414
                    break;
×
3415
                case "viewsUnique":
3416
                    viewsUnique = datasetMetrics.getViewsUnique();
×
3417
                    break;
×
3418
                case "viewsUniqueRegular":
3419
                    viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular();
×
3420
                    break;
×
3421
                case "viewsUniqueMachine":
3422
                    viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine();
×
3423
                    break;
×
3424
                case "downloadsTotal":
3425
                    downloadsTotal = datasetMetrics.getDownloadsTotal();
×
3426
                    break;
×
3427
                case "downloadsTotalRegular":
3428
                    downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular();
×
3429
                    break;
×
3430
                case "downloadsTotalMachine":
3431
                    downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine();
×
3432
                    break;
×
3433
                case "downloadsUnique":
3434
                    downloadsUnique = datasetMetrics.getDownloadsUnique();
×
3435
                    break;
×
3436
                case "downloadsUniqueRegular":
3437
                    downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular();
×
3438
                    break;
×
3439
                case "downloadsUniqueMachine":
3440
                    downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine();
×
3441
                    break;
×
3442
                default:
3443
                    break;
3444
            }
3445
            /**
3446
             * TODO: Think more about the JSON output and the API design.
3447
             * getDatasetMetricsByDatasetMonthCountry returns a single row right
3448
             * now, by country. We could return multiple metrics (viewsTotal,
3449
             * viewsUnique, downloadsTotal, and downloadsUnique) by country.
3450
             */
3451
            jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular);
×
3452
            jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular);
×
3453
            jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular);
×
3454
            jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular);
×
3455
            jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine);
×
3456
            jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine);
×
3457
            jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine);
×
3458
            jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine);
×
3459
            jsonObjectBuilder.add("viewsTotal", viewsTotal);
×
3460
            jsonObjectBuilder.add("viewsUnique", viewsUnique);
×
3461
            jsonObjectBuilder.add("downloadsTotal", downloadsTotal);
×
3462
            jsonObjectBuilder.add("downloadsUnique", downloadsUnique);
×
3463
            return ok(jsonObjectBuilder);
×
3464
        } catch (WrappedResponse wr) {
×
3465
            return wr.getResponse();
×
3466
        } catch (Exception e) {
×
3467
            //bad date - caught in sanitize call
3468
            return error(BAD_REQUEST, e.getMessage());
×
3469
        }
3470
    }
3471
    
3472
    @GET
3473
    @AuthRequired
3474
    @Path("{identifier}/storageDriver")
3475
    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3476
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
3477
        
3478
        Dataset dataset; 
3479
        
3480
        try {
3481
            dataset = findDatasetOrDie(dvIdtf);
×
3482
        } catch (WrappedResponse ex) {
×
3483
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3484
        }
×
3485
        
3486
        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
×
3487
    }
3488
    
3489
    @PUT
3490
    @AuthRequired
3491
    @Path("{identifier}/storageDriver")
3492
    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3493
            String storageDriverLabel,
3494
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3495
        
3496
        // Superuser-only:
3497
        AuthenticatedUser user;
3498
        try {
3499
            user = getRequestAuthenticatedUserOrDie(crc);
×
3500
        } catch (WrappedResponse ex) {
×
3501
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3502
        }
×
3503
        if (!user.isSuperuser()) {
×
3504
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3505
        }
3506

3507
        Dataset dataset;
3508

3509
        try {
3510
            dataset = findDatasetOrDie(dvIdtf);
×
3511
        } catch (WrappedResponse ex) {
×
3512
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3513
        }
×
3514
        
3515
        // We don't want to allow setting this to a store id that does not exist: 
3516
        for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) {
×
3517
            if (store.getKey().equals(storageDriverLabel)) {
×
3518
                dataset.setStorageDriverId(store.getValue());
×
3519
                datasetService.merge(dataset);
×
3520
                return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue());
×
3521
            }
3522
        }
×
3523
        return error(Response.Status.BAD_REQUEST,
×
3524
                "No Storage Driver found for : " + storageDriverLabel);
3525
    }
3526
    
3527
    @DELETE
3528
    @AuthRequired
3529
    @Path("{identifier}/storageDriver")
3530
    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3531
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3532
        
3533
        // Superuser-only:
3534
        AuthenticatedUser user;
3535
        try {
3536
            user = getRequestAuthenticatedUserOrDie(crc);
×
3537
        } catch (WrappedResponse ex) {
×
3538
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3539
        }
×
3540
        if (!user.isSuperuser()) {
×
3541
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3542
        }
3543

3544
        Dataset dataset;
3545

3546
        try {
3547
            dataset = findDatasetOrDie(dvIdtf);
×
3548
        } catch (WrappedResponse ex) {
×
3549
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
3550
        }
×
3551
        
3552
        dataset.setStorageDriverId(null);
×
3553
        datasetService.merge(dataset);
×
3554
        return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
×
3555
    }
3556

3557
    @GET
3558
    @AuthRequired
3559
    @Path("{identifier}/curationLabelSet")
3560
    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3561
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3562

3563
        try {
3564
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
3565
            if (!user.isSuperuser()) {
×
3566
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3567
            }
3568
        } catch (WrappedResponse wr) {
×
3569
            return wr.getResponse();
×
3570
        }
×
3571

3572
        Dataset dataset;
3573

3574
        try {
3575
            dataset = findDatasetOrDie(dvIdtf);
×
3576
        } catch (WrappedResponse ex) {
×
3577
            return ex.getResponse();
×
3578
        }
×
3579

3580
        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
×
3581
    }
3582

3583
    @PUT
3584
    @AuthRequired
3585
    @Path("{identifier}/curationLabelSet")
3586
    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
3587
                                        @PathParam("identifier") String dvIdtf,
3588
                                        @QueryParam("name") String curationLabelSet,
3589
                                        @Context UriInfo uriInfo,
3590
                                        @Context HttpHeaders headers) throws WrappedResponse {
3591

3592
        // Superuser-only:
3593
        AuthenticatedUser user;
3594
        try {
3595
            user = getRequestAuthenticatedUserOrDie(crc);
×
3596
        } catch (WrappedResponse ex) {
×
3597
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
3598
        }
×
3599
        if (!user.isSuperuser()) {
×
3600
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3601
        }
3602

3603
        Dataset dataset;
3604

3605
        try {
3606
            dataset = findDatasetOrDie(dvIdtf);
×
3607
        } catch (WrappedResponse ex) {
×
3608
            return ex.getResponse();
×
3609
        }
×
3610
        if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) {
×
3611
            dataset.setCurationLabelSetName(curationLabelSet);
×
3612
            datasetService.merge(dataset);
×
3613
            return ok("Curation Label Set Name set to: " + curationLabelSet);
×
3614
        } else {
3615
            for (String setName : systemConfig.getCurationLabels().keySet()) {
×
3616
                if (setName.equals(curationLabelSet)) {
×
3617
                    dataset.setCurationLabelSetName(curationLabelSet);
×
3618
                    datasetService.merge(dataset);
×
3619
                    return ok("Curation Label Set Name set to: " + setName);
×
3620
                }
3621
            }
×
3622
        }
3623
        return error(Response.Status.BAD_REQUEST,
×
3624
            "No Such Curation Label Set");
3625
    }
3626

3627
    @DELETE
3628
    @AuthRequired
3629
    @Path("{identifier}/curationLabelSet")
3630
    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
3631
            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
3632

3633
        // Superuser-only:
3634
        AuthenticatedUser user;
3635
        try {
3636
            user = getRequestAuthenticatedUserOrDie(crc);
×
3637
        } catch (WrappedResponse ex) {
×
3638
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
3639
        }
×
3640
        if (!user.isSuperuser()) {
×
3641
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
3642
        }
3643

3644
        Dataset dataset;
3645

3646
        try {
3647
            dataset = findDatasetOrDie(dvIdtf);
×
3648
        } catch (WrappedResponse ex) {
×
3649
            return ex.getResponse();
×
3650
        }
×
3651

3652
        dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET);
×
3653
        datasetService.merge(dataset);
×
3654
        return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET);
×
3655
    }
3656

3657
    @GET
3658
    @AuthRequired
3659
    @Path("{identifier}/allowedCurationLabels")
3660
    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
3661
                                             @PathParam("identifier") String dvIdtf,
3662
                                             @Context UriInfo uriInfo,
3663
                                             @Context HttpHeaders headers) throws WrappedResponse {
3664
        AuthenticatedUser user = null;
×
3665
        try {
3666
            user = getRequestAuthenticatedUserOrDie(crc);
×
3667
        } catch (WrappedResponse wr) {
×
3668
            return wr.getResponse();
×
3669
        }
×
3670

3671
        Dataset dataset;
3672

3673
        try {
3674
            dataset = findDatasetOrDie(dvIdtf);
×
3675
        } catch (WrappedResponse ex) {
×
3676
            return ex.getResponse();
×
3677
        }
×
3678
        if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
×
3679
            String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
×
3680
            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
×
3681
        } else {
3682
            return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
×
3683
        }
3684
    }
3685

3686
    @GET
3687
    @AuthRequired
3688
    @Path("{identifier}/timestamps")
3689
    @Produces(MediaType.APPLICATION_JSON)
3690
    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
3691

3692
        Dataset dataset = null;
×
3693
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
×
3694
        try {
3695
            dataset = findDatasetOrDie(id);
×
3696
            User u = getRequestUser(crc);
×
3697
            Set<Permission> perms = new HashSet<Permission>();
×
3698
            perms.add(Permission.ViewUnpublishedDataset);
×
3699
            boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
×
3700
            JsonObjectBuilder timestamps = Json.createObjectBuilder();
×
3701
            logger.fine("CSD: " + canSeeDraft);
×
3702
            logger.fine("IT: " + dataset.getIndexTime());
×
3703
            logger.fine("MT: " + dataset.getModificationTime());
×
3704
            logger.fine("PIT: " + dataset.getPermissionIndexTime());
×
3705
            logger.fine("PMT: " + dataset.getPermissionModificationTime());
×
3706
            // Basic info if it's released
3707
            if (dataset.isReleased() || canSeeDraft) {
×
3708
                timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime()));
×
3709
                if (dataset.getPublicationDate() != null) {
×
3710
                    timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime()));
×
3711
                }
3712

3713
                if (dataset.getLastExportTime() != null) {
×
3714
                    timestamps.add("lastMetadataExportTime",
×
3715
                            formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault())));
×
3716

3717
                }
3718

3719
                if (dataset.getMostRecentMajorVersionReleaseDate() != null) {
×
3720
                    timestamps.add("lastMajorVersionReleaseTime", formatter.format(
×
3721
                            dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault())));
×
3722
                }
3723
                // If the modification/permissionmodification time is
3724
                // set and the index time is null or is before the mod time, the relevant index is stale
3725
                timestamps.add("hasStaleIndex",
×
3726
                        (dataset.getModificationTime() != null && (dataset.getIndexTime() == null
×
3727
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3728
                                : false);
×
3729
                timestamps.add("hasStalePermissionIndex",
×
3730
                        (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null
×
3731
                                || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true
×
3732
                                : false);
×
3733
            }
3734
            // More detail if you can see a draft
3735
            if (canSeeDraft) {
×
3736
                timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime()));
×
3737
                if (dataset.getIndexTime() != null) {
×
3738
                    timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime()));
×
3739
                }
3740
                if (dataset.getPermissionModificationTime() != null) {
×
3741
                    timestamps.add("lastPermissionUpdateTime",
×
3742
                            formatter.format(dataset.getPermissionModificationTime().toLocalDateTime()));
×
3743
                }
3744
                if (dataset.getPermissionIndexTime() != null) {
×
3745
                    timestamps.add("lastPermissionIndexTime",
×
3746
                            formatter.format(dataset.getPermissionIndexTime().toLocalDateTime()));
×
3747
                }
3748
                if (dataset.getGlobalIdCreateTime() != null) {
×
3749
                    timestamps.add("globalIdCreateTime", formatter
×
3750
                            .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault())));
×
3751
                }
3752

3753
            }
3754
            return ok(timestamps);
×
3755
        } catch (WrappedResponse wr) {
×
3756
            return wr.getResponse();
×
3757
        }
3758
    }
3759

3760

3761
/****************************
3762
 * Globus Support Section:
3763
 * 
3764
 * Globus transfer in (upload) and out (download) involve three basic steps: The
3765
 * app is launched and makes a callback to the
3766
 * globusUploadParameters/globusDownloadParameters method to get all of the info
3767
 * needed to set up it's display.
3768
 * 
3769
 * At some point after that, the user will make a selection as to which files to
3770
 * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
3771
 * to indicate a transfer is about to start. In addition to providing the
3772
 * details of where to transfer the files to/from, Dataverse also grants the
3773
 * Globus principal involved the relevant rw or r permission for the dataset.
3774
 * 
3775
 * Once the transfer is started, the app records the task id and sends it to
3776
 * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
3777
 * monitors the transfer task and when it ultimately succeeds for fails it
3778
 * revokes the principal's permission and, for the transfer in case, adds the
3779
 * files to the dataset. (The dataset is locked until the transfer completes.)
3780
 * 
3781
 * (If no transfer is started within a specified timeout, permissions will
3782
 * automatically be revoked - see the GlobusServiceBean for details.)
3783
 *
3784
 * The option to reference a file at a remote endpoint (rather than transfer it)
3785
 * follows the first two steps of the process above but completes with a call to
3786
 * the normal /addFiles endpoint (as there is no transfer to monitor and the
3787
 * files can be added to the dataset immediately.)
3788
 */
3789

3790
    /**
3791
     * Retrieve the parameters and signed URLs required to perform a globus
3792
     * transfer. This api endpoint is expected to be called as a signed callback
3793
     * after the globus-dataverse app/other app is launched, but it will accept
3794
     * other forms of authentication.
3795
     * 
3796
     * @param crc
3797
     * @param datasetId
3798
     */
3799
    @GET
3800
    @AuthRequired
3801
    @Path("{id}/globusUploadParameters")
3802
    @Produces(MediaType.APPLICATION_JSON)
3803
    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3804
            @QueryParam(value = "locale") String locale) {
3805
        // -------------------------------------
3806
        // (1) Get the user from the ContainerRequestContext
3807
        // -------------------------------------
3808
        AuthenticatedUser authUser;
3809
        try {
3810
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3811
        } catch (WrappedResponse e) {
×
3812
            return e.getResponse();
×
3813
        }
×
3814
        // -------------------------------------
3815
        // (2) Get the Dataset Id
3816
        // -------------------------------------
3817
        Dataset dataset;
3818

3819
        try {
3820
            dataset = findDatasetOrDie(datasetId);
×
3821
        } catch (WrappedResponse wr) {
×
3822
            return wr.getResponse();
×
3823
        }
×
3824
        String storeId = dataset.getEffectiveStorageDriverId();
×
3825
        // acceptsGlobusTransfers should only be true for an S3 or globus store
3826
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
3827
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
3828
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
3829
        }
3830

3831
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
3832

3833
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
3834
        String transferEndpoint = null;
×
3835
        JsonArray referenceEndpointsWithPaths = null;
×
3836
        if (managed) {
×
3837
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
3838
        } else {
3839
            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
×
3840
        }
3841

3842
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
3843
        queryParams.add("queryParameters",
×
3844
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
3845
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
3846
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
3847
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
3848
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
3849
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
3850
        JsonObjectBuilder params = Json.createObjectBuilder();
×
3851
        substitutedParams.keySet().forEach((key) -> {
×
3852
            params.add(key, substitutedParams.get(key));
×
3853
        });
×
3854
        params.add("managed", Boolean.toString(managed));
×
3855
        if (managed) {
×
3856
            Long maxSize = systemConfig.getMaxFileUploadSizeForStore(storeId);
×
3857
            if (maxSize != null) {
×
3858
                params.add("fileSizeLimit", maxSize);
×
3859
            }
3860
            UploadSessionQuotaLimit limit = fileService.getUploadSessionQuotaLimit(dataset);
×
3861
            if (limit != null) {
×
3862
                params.add("remainingQuota", limit.getRemainingQuotaInBytes());
×
3863
            }
3864
        }
3865
        if (transferEndpoint != null) {
×
3866
            params.add("endpoint", transferEndpoint);
×
3867
        } else {
3868
            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
×
3869
        }
3870
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
3871
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
3872
        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
×
3873
        allowedApiCalls.add(
×
3874
                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
×
3875
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
×
3876
                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3877
        if(managed) {
×
3878
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
×
3879
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3880
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
×
3881
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3882
        } else {
3883
            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
×
3884
                    .add(URLTokenUtil.HTTP_METHOD, "POST")
×
3885
                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
×
3886
                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
3887
        }
3888
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
3889
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
3890
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
3891
                .add(URLTokenUtil.TIMEOUT, 5));
×
3892
        allowedApiCalls.add(
×
3893
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
3894
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
3895
                        .add(URLTokenUtil.TIMEOUT, 5));
×
3896

3897
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
3898
    }
3899

3900
    /**
3901
     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
3902
     * 
3903
     * @param crc
3904
     * @param datasetId
3905
     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
3906
     * @return
3907
     * @throws IOException
3908
     * @throws ExecutionException
3909
     * @throws InterruptedException
3910
     */
3911
    @POST
3912
    @AuthRequired
3913
    @Path("{id}/requestGlobusUploadPaths")
3914
    @Consumes(MediaType.APPLICATION_JSON)
3915
    @Produces(MediaType.APPLICATION_JSON)
3916
    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
3917
            String jsonBody) throws IOException, ExecutionException, InterruptedException {
3918

3919
        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
×
3920

3921
        if (!systemConfig.isGlobusUpload()) {
×
3922
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
3923
                    BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
3924
        }
3925

3926
        // -------------------------------------
3927
        // (1) Get the user from the ContainerRequestContext
3928
        // -------------------------------------
3929
        AuthenticatedUser authUser;
3930
        try {
3931
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
3932
        } catch (WrappedResponse e) {
×
3933
            return e.getResponse();
×
3934
        }
×
3935

3936
        // -------------------------------------
3937
        // (2) Get the Dataset Id
3938
        // -------------------------------------
3939
        Dataset dataset;
3940

3941
        try {
3942
            dataset = findDatasetOrDie(datasetId);
×
3943
        } catch (WrappedResponse wr) {
×
3944
            return wr.getResponse();
×
3945
        }
×
3946
        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
×
3947
                .canIssue(UpdateDatasetVersionCommand.class)) {
×
3948

3949
            JsonObject params = JsonUtil.getJsonObject(jsonBody);
×
3950
            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
3951
                try {
3952
                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
×
3953
                    if (referencedFiles == null || referencedFiles.size() == 0) {
×
3954
                        return badRequest("No referencedFiles specified");
×
3955
                    }
3956
                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
×
3957
                    return (ok(fileMap));
×
3958
                } catch (Exception e) {
×
3959
                    return badRequest(e.getLocalizedMessage());
×
3960
                }
3961
            } else {
3962
                try {
3963
                    String principal = params.getString("principal");
×
3964
                    int numberOfPaths = params.getInt("numberOfFiles");
×
3965
                    if (numberOfPaths <= 0) {
×
3966
                        return badRequest("numberOfFiles must be positive");
×
3967
                    }
3968

3969
                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
×
3970
                    switch (response.getInt("status")) {
×
3971
                    case 201:
3972
                        return ok(response.getJsonObject("paths"));
×
3973
                    case 400:
3974
                        return badRequest("Unable to grant permission");
×
3975
                    case 409:
3976
                        return conflict("Permission already exists");
×
3977
                    default:
3978
                        return error(null, "Unexpected error when granting permission");
×
3979
                    }
3980

3981
                } catch (NullPointerException | ClassCastException e) {
×
3982
                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
×
3983

3984
                }
3985
            }
3986
        } else {
3987
            return forbidden("User doesn't have permission to upload to this dataset");
×
3988
        }
3989

3990
    }
3991

3992
    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
3993
     * 
3994
     * @param crc
3995
     * @param datasetId
3996
     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
3997
     * @param uriInfo
3998
     * @return
3999
     * @throws IOException
4000
     * @throws ExecutionException
4001
     * @throws InterruptedException
4002
     */
4003
    @POST
4004
    @AuthRequired
4005
    @Path("{id}/addGlobusFiles")
4006
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4007
    @Produces("application/json")
4008
    @Operation(summary = "Uploads a Globus file for a dataset", 
4009
               description = "Uploads a Globus file for a dataset")
4010
    @APIResponse(responseCode = "200",
4011
               description = "Globus file uploaded successfully to dataset")
4012
    @Tag(name = "addGlobusFilesToDataset", 
4013
         description = "Uploads a Globus file for a dataset")
4014
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4015
    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
4016
                                            @PathParam("id") String datasetId,
4017
                                            @FormDataParam("jsonData") String jsonData,
4018
                                            @Context UriInfo uriInfo
4019
    ) throws IOException, ExecutionException, InterruptedException {
4020

4021
        logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
×
4022

4023
        // -------------------------------------
4024
        // (1) Get the user from the API key
4025
        // -------------------------------------
4026
        AuthenticatedUser authUser;
4027
        try {
4028
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4029
        } catch (WrappedResponse ex) {
×
4030
            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
4031
            );
4032
        }
×
4033

4034
        // -------------------------------------
4035
        // (2) Get the Dataset Id
4036
        // -------------------------------------
4037
        Dataset dataset;
4038

4039
        try {
4040
            dataset = findDatasetOrDie(datasetId);
×
4041
        } catch (WrappedResponse wr) {
×
4042
            return wr.getResponse();
×
4043
        }
×
4044
        
4045
        // Is Globus upload service available? 
4046
        
4047
        // ... on this Dataverse instance?
4048
        if (!systemConfig.isGlobusUpload()) {
×
4049
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.globusUploadDisabled"));
×
4050
        }
4051

4052
        // ... and on this specific Dataset? 
4053
        String storeId = dataset.getEffectiveStorageDriverId();
×
4054
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4055
        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4056
                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
×
4057
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
×
4058
        }
4059
        
4060
        // Check if the dataset is already locked
4061
        // We are reusing the code and logic used by various command to determine 
4062
        // if there are any locks on the dataset that would prevent the current 
4063
        // users from modifying it:
4064
        try {
4065
            DataverseRequest dataverseRequest = createDataverseRequest(authUser);
×
4066
            permissionService.checkEditDatasetLock(dataset, dataverseRequest, null); 
×
4067
        } catch (IllegalCommandException icex) {
×
4068
            return error(Response.Status.FORBIDDEN, "Dataset " + datasetId + " is locked: " + icex.getLocalizedMessage());
×
4069
        }
×
4070
        
4071
        JsonObject jsonObject = null;
×
4072
        try {
4073
            jsonObject = JsonUtil.getJsonObject(jsonData);
×
4074
        } catch (Exception ex) {
×
4075
            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
×
4076
            return badRequest("Error parsing json body");
×
4077

4078
        }
×
4079

4080
        //------------------------------------
4081
        // (2b) Make sure dataset does not have package file
4082
        // --------------------------------------
4083

4084
        for (DatasetVersion dv : dataset.getVersions()) {
×
4085
            if (dv.isHasPackageFile()) {
×
4086
                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4087
                );
4088
            }
4089
        }
×
4090

4091

4092
        String lockInfoMessage = "Globus Upload API started ";
×
4093
        DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.GlobusUpload,
×
4094
                (authUser).getId(), lockInfoMessage);
×
4095
        if (lock != null) {
×
4096
            dataset.addLock(lock);
×
4097
        } else {
4098
            logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
4099
        }
4100

4101
        if(uriInfo != null) {
×
4102
            logger.info(" ====  (api uriInfo.getRequestUri()) jsonData   ====== " + uriInfo.getRequestUri().toString());
×
4103
        }
4104

4105
        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
×
4106
        
4107
        // Async Call
4108
        try {
4109
            globusService.globusUpload(jsonObject, dataset, requestUrl, authUser);
×
4110
        } catch (IllegalArgumentException ex) {
×
4111
            return badRequest("Invalid parameters: "+ex.getMessage());
×
4112
        }
×
4113

4114
        return ok("Async call to Globus Upload started ");
×
4115

4116
    }
4117
    
4118
/**
4119
 * Retrieve the parameters and signed URLs required to perform a globus
4120
 * transfer/download. This api endpoint is expected to be called as a signed
4121
 * callback after the globus-dataverse app/other app is launched, but it will
4122
 * accept other forms of authentication.
4123
 * 
4124
 * @param crc
4125
 * @param datasetId
4126
 * @param locale
4127
 * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
4128
 * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
4129
 */
4130
    @GET
4131
    @AuthRequired
4132
    @Path("{id}/globusDownloadParameters")
4133
    @Produces(MediaType.APPLICATION_JSON)
4134
    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4135
            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
4136
        // -------------------------------------
4137
        // (1) Get the user from the ContainerRequestContext
4138
        // -------------------------------------
4139
        AuthenticatedUser authUser = null;
×
4140
        try {
4141
            authUser = getRequestAuthenticatedUserOrDie(crc);
×
4142
        } catch (WrappedResponse e) {
×
4143
            logger.fine("guest user globus download");
×
4144
        }
×
4145
        // -------------------------------------
4146
        // (2) Get the Dataset Id
4147
        // -------------------------------------
4148
        Dataset dataset;
4149

4150
        try {
4151
            dataset = findDatasetOrDie(datasetId);
×
4152
        } catch (WrappedResponse wr) {
×
4153
            return wr.getResponse();
×
4154
        }
×
4155
        String storeId = dataset.getEffectiveStorageDriverId();
×
4156
        // acceptsGlobusTransfers should only be true for an S3 or globus store
4157
        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
×
4158
                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
×
4159
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4160
        }
4161

4162
        JsonObject files = globusService.getFilesForDownload(downloadId);
×
4163
        if (files == null) {
×
4164
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4165
        }
4166

4167
        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
×
4168

4169
        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
×
4170
        String transferEndpoint = null;
×
4171

4172
        JsonObjectBuilder queryParams = Json.createObjectBuilder();
×
4173
        queryParams.add("queryParameters",
×
4174
                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
×
4175
                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
×
4176
                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
×
4177
                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
×
4178
                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
×
4179
        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
×
4180
        JsonObjectBuilder params = Json.createObjectBuilder();
×
4181
        substitutedParams.keySet().forEach((key) -> {
×
4182
            params.add(key, substitutedParams.get(key));
×
4183
        });
×
4184
        params.add("managed", Boolean.toString(managed));
×
4185
        if (managed) {
×
4186
            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
×
4187
            params.add("endpoint", transferEndpoint);
×
4188
        }
4189
        params.add("files", files);
×
4190
        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
×
4191
        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
×
4192
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
×
4193
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4194
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
×
4195
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4196
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
×
4197
                .add(URLTokenUtil.HTTP_METHOD, "POST")
×
4198
                .add(URLTokenUtil.URL_TEMPLATE,
×
4199
                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
4200
                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
×
4201
        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
×
4202
                .add(URLTokenUtil.HTTP_METHOD, "GET")
×
4203
                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
×
4204
                .add(URLTokenUtil.TIMEOUT, 5));
×
4205
        allowedApiCalls.add(
×
4206
                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
×
4207
                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
×
4208
                        .add(URLTokenUtil.TIMEOUT, 5));
×
4209

4210
        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
×
4211
    }
4212

4213
    /**
4214
     * Requests permissions for a given globus user to download the specified files
4215
     * the dataset and returns information about the paths to transfer from.
4216
     * 
4217
     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
4218
     * 
4219
     * @param crc
4220
     * @param datasetId
4221
     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
4222
     * @return - a JSON object containing a map of file ids to Globus endpoint/path
4223
     * @throws IOException
4224
     * @throws ExecutionException
4225
     * @throws InterruptedException
4226
     */
4227
    @POST
4228
    @AuthRequired
4229
    @Path("{id}/requestGlobusDownload")
4230
    @Consumes(MediaType.APPLICATION_JSON)
4231
    @Produces(MediaType.APPLICATION_JSON)
4232
    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4233
            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
4234
            throws IOException, ExecutionException, InterruptedException {
4235

4236
        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
×
4237

4238
        if (!systemConfig.isGlobusDownload()) {
×
4239
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4240
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4241
        }
4242

4243
        // -------------------------------------
4244
        // (1) Get the user from the ContainerRequestContext
4245
        // -------------------------------------
4246
        User user = getRequestUser(crc);
×
4247

4248
        // -------------------------------------
4249
        // (2) Get the Dataset Id
4250
        // -------------------------------------
4251
        Dataset dataset;
4252

4253
        try {
4254
            dataset = findDatasetOrDie(datasetId);
×
4255
        } catch (WrappedResponse wr) {
×
4256
            return wr.getResponse();
×
4257
        }
×
4258
        JsonObject body = null;
×
4259
        if (jsonBody != null) {
×
4260
            body = JsonUtil.getJsonObject(jsonBody);
×
4261
        }
4262
        Set<String> fileIds = null;
×
4263
        if (downloadId != null) {
×
4264
            JsonObject files = globusService.getFilesForDownload(downloadId);
×
4265
            if (files != null) {
×
4266
                fileIds = files.keySet();
×
4267
            }
4268
        } else {
×
4269
            if ((body!=null) && body.containsKey("fileIds")) {
×
4270
                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
×
4271
                fileIds = new HashSet<String>(fileVals.size());
×
4272
                for (JsonValue fileVal : fileVals) {
×
4273
                    String id = null;
×
4274
                    switch (fileVal.getValueType()) {
×
4275
                    case STRING:
4276
                        id = ((JsonString) fileVal).getString();
×
4277
                        break;
×
4278
                    case NUMBER:
4279
                        id = ((JsonNumber) fileVal).toString();
×
4280
                        break;
×
4281
                    default:
4282
                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
×
4283
                    }
4284
                    ;
4285
                    fileIds.add(id);
×
4286
                }
×
4287
            } else {
×
4288
                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
×
4289
            }
4290
        }
4291

4292
        if (fileIds.isEmpty()) {
×
4293
            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
×
4294
        }
4295
        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
×
4296
        for (String id : fileIds) {
×
4297
            boolean published = false;
×
4298
            logger.info("File id: " + id);
×
4299

4300
            DataFile df = null;
×
4301
            try {
4302
                df = findDataFileOrDie(id);
×
4303
            } catch (WrappedResponse wr) {
×
4304
                return wr.getResponse();
×
4305
            }
×
4306
            if (!df.getOwner().equals(dataset)) {
×
4307
                return badRequest("All files must be in the dataset");
×
4308
            }
4309
            dataFiles.add(df);
×
4310

4311
            for (FileMetadata fm : df.getFileMetadatas()) {
×
4312
                if (fm.getDatasetVersion().isPublished()) {
×
4313
                    published = true;
×
4314
                    break;
×
4315
                }
4316
            }
×
4317

4318
            if (!published) {
×
4319
                // If the file is not published, they can still download the file, if the user
4320
                // has the permission to view unpublished versions:
4321

4322
                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
×
4323
                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
×
4324
                    return forbidden("User doesn't have permission to download file: " + id);
×
4325
                }
4326
            } else { // published and restricted and/or embargoed
4327
                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
×
4328
                    // This line also handles all three authenticated session user, token user, and
4329
                    // guest cases.
4330
                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
×
4331
                        return forbidden("User doesn't have permission to download file: " + id);
×
4332
                    }
4333

4334
            }
4335
        }
×
4336
        // Allowed to download all requested files
4337
        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
×
4338
        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
×
4339
            // If managed, give the principal read permissions
4340
            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
×
4341
            switch (status) {
×
4342
            case 201:
4343
                return ok(files);
×
4344
            case 400:
4345
                return badRequest("Unable to grant permission");
×
4346
            case 409:
4347
                return conflict("Permission already exists");
×
4348
            default:
4349
                return error(null, "Unexpected error when granting permission");
×
4350
            }
4351

4352
        }
4353

4354
        return ok(files);
×
4355
    }
4356

4357
    /**
4358
     * Monitors a globus download and removes permissions on the dir/dataset when
4359
     * the specified transfer task is completed.
4360
     * 
4361
     * @param crc
4362
     * @param datasetId
4363
     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
4364
     *                  Globus task to monitor.
4365
     * @return
4366
     * @throws IOException
4367
     * @throws ExecutionException
4368
     * @throws InterruptedException
4369
     */
4370
    @POST
4371
    @AuthRequired
4372
    @Path("{id}/monitorGlobusDownload")
4373
    @Consumes(MediaType.APPLICATION_JSON)
4374
    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
4375
            String jsonData) throws IOException, ExecutionException, InterruptedException {
4376

4377
        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
×
4378

4379
        if (!systemConfig.isGlobusDownload()) {
×
4380
            return error(Response.Status.SERVICE_UNAVAILABLE,
×
4381
                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
×
4382
        }
4383

4384
        // -------------------------------------
4385
        // (1) Get the user from the ContainerRequestContext
4386
        // -------------------------------------
4387
        User authUser;
4388
        authUser = getRequestUser(crc);
×
4389

4390
        // -------------------------------------
4391
        // (2) Get the Dataset Id
4392
        // -------------------------------------
4393
        Dataset dataset;
4394

4395
        try {
4396
            dataset = findDatasetOrDie(datasetId);
×
4397
        } catch (WrappedResponse wr) {
×
4398
            return wr.getResponse();
×
4399
        }
×
4400

4401
        // Async Call
4402
        globusService.globusDownload(jsonData, dataset, authUser);
×
4403

4404
        return ok("Async call to Globus Download started");
×
4405

4406
    }
4407

4408
    /**
4409
     * Add multiple Files to an existing Dataset
4410
     *
4411
     * @param idSupplied
4412
     * @param jsonData
4413
     * @return
4414
     */
4415
    @POST
4416
    @AuthRequired
4417
    @Path("{id}/addFiles")
4418
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4419
    @Produces("application/json")
4420
    @Operation(summary = "Uploads a set of files to a dataset", 
4421
               description = "Uploads a set of files to a dataset")
4422
    @APIResponse(responseCode = "200",
4423
               description = "Files uploaded successfully to dataset")
4424
    @Tag(name = "addFilesToDataset", 
4425
         description = "Uploads a set of files to a dataset")
4426
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))  
4427
    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
4428
            @FormDataParam("jsonData") String jsonData) {
4429

4430
        if (!systemConfig.isHTTPUpload()) {
×
4431
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4432
        }
4433

4434
        // -------------------------------------
4435
        // (1) Get the user from the ContainerRequestContext
4436
        // -------------------------------------
4437
        User authUser;
4438
        authUser = getRequestUser(crc);
×
4439

4440
        // -------------------------------------
4441
        // (2) Get the Dataset Id
4442
        // -------------------------------------
4443
        Dataset dataset;
4444

4445
        try {
4446
            dataset = findDatasetOrDie(idSupplied);
×
4447
        } catch (WrappedResponse wr) {
×
4448
            return wr.getResponse();
×
4449
        }
×
4450

4451
        dataset.getLocks().forEach(dl -> {
×
4452
            logger.info(dl.toString());
×
4453
        });
×
4454

4455
        //------------------------------------
4456
        // (2a) Make sure dataset does not have package file
4457
        // --------------------------------------
4458

4459
        for (DatasetVersion dv : dataset.getVersions()) {
×
4460
            if (dv.isHasPackageFile()) {
×
4461
                return error(Response.Status.FORBIDDEN,
×
4462
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4463
                );
4464
            }
4465
        }
×
4466

4467
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4468

4469
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4470
                dvRequest,
4471
                this.ingestService,
4472
                this.datasetService,
4473
                this.fileService,
4474
                this.permissionSvc,
4475
                this.commandEngine,
4476
                this.systemConfig
4477
        );
4478

4479
        return addFileHelper.addFiles(jsonData, dataset, authUser);
×
4480

4481
    }
4482

4483
    /**
4484
     * Replace multiple Files to an existing Dataset
4485
     *
4486
     * @param idSupplied
4487
     * @param jsonData
4488
     * @return
4489
     */
4490
    @POST
4491
    @AuthRequired
4492
    @Path("{id}/replaceFiles")
4493
    @Consumes(MediaType.MULTIPART_FORM_DATA)
4494
    @Produces("application/json")
4495
    @Operation(summary = "Replace a set of files to a dataset", 
4496
               description = "Replace a set of files to a dataset")
4497
    @APIResponse(responseCode = "200",
4498
               description = "Files replaced successfully to dataset")
4499
    @Tag(name = "replaceFilesInDataset", 
4500
         description = "Replace a set of files to a dataset")
4501
    @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA)) 
4502
    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
4503
                                          @PathParam("id") String idSupplied,
4504
                                          @FormDataParam("jsonData") String jsonData) {
4505

4506
        if (!systemConfig.isHTTPUpload()) {
×
4507
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
4508
        }
4509

4510
        // -------------------------------------
4511
        // (1) Get the user from the ContainerRequestContext
4512
        // -------------------------------------
4513
        User authUser;
4514
        authUser = getRequestUser(crc);
×
4515

4516
        // -------------------------------------
4517
        // (2) Get the Dataset Id
4518
        // -------------------------------------
4519
        Dataset dataset;
4520

4521
        try {
4522
            dataset = findDatasetOrDie(idSupplied);
×
4523
        } catch (WrappedResponse wr) {
×
4524
            return wr.getResponse();
×
4525
        }
×
4526

4527
        dataset.getLocks().forEach(dl -> {
×
4528
            logger.info(dl.toString());
×
4529
        });
×
4530

4531
        //------------------------------------
4532
        // (2a) Make sure dataset does not have package file
4533
        // --------------------------------------
4534

4535
        for (DatasetVersion dv : dataset.getVersions()) {
×
4536
            if (dv.isHasPackageFile()) {
×
4537
                return error(Response.Status.FORBIDDEN,
×
4538
                        BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")
×
4539
                );
4540
            }
4541
        }
×
4542

4543
        DataverseRequest dvRequest = createDataverseRequest(authUser);
×
4544

4545
        AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(
×
4546
                dvRequest,
4547
                this.ingestService,
4548
                this.datasetService,
4549
                this.fileService,
4550
                this.permissionSvc,
4551
                this.commandEngine,
4552
                this.systemConfig
4553
        );
4554

4555
        return addFileHelper.replaceFiles(jsonData, dataset, authUser);
×
4556

4557
    }
4558

4559
    /**
4560
     * API to find curation assignments and statuses
4561
     *
4562
     * @return
4563
     * @throws WrappedResponse
4564
     */
4565
    @GET
4566
    @AuthRequired
4567
    @Path("/listCurationStates")
4568
    @Produces("text/csv")
4569
    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
4570

4571
        try {
4572
            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
×
4573
            if (!user.isSuperuser()) {
×
4574
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4575
            }
4576
        } catch (WrappedResponse wr) {
×
4577
            return wr.getResponse();
×
4578
        }
×
4579

4580
        List<DataverseRole> allRoles = dataverseRoleService.findAll();
×
4581
        List<DataverseRole> curationRoles = new ArrayList<DataverseRole>();
×
4582
        allRoles.forEach(r -> {
×
4583
            if (r.permissions().contains(Permission.PublishDataset))
×
4584
                curationRoles.add(r);
×
4585
        });
×
4586
        HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>();
×
4587
        curationRoles.forEach(r -> {
×
4588
            assignees.put(r.getAlias(), null);
×
4589
        });
×
4590

4591
        StringBuilder csvSB = new StringBuilder(String.join(",",
×
4592
                BundleUtil.getStringFromBundle("dataset"),
×
4593
                BundleUtil.getStringFromBundle("datasets.api.creationdate"),
×
4594
                BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
×
4595
                BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
×
4596
                String.join(",", assignees.keySet())));
×
4597
        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
×
4598
            List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
×
4599
            curationRoles.forEach(r -> {
×
4600
                assignees.put(r.getAlias(), new HashSet<String>());
×
4601
            });
×
4602
            for (RoleAssignment ra : ras) {
×
4603
                if (curationRoles.contains(ra.getRole())) {
×
4604
                    assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
×
4605
                }
4606
            }
×
4607
            DatasetVersion dsv = dataset.getLatestVersion();
×
4608
            String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
×
4609
            String status = dsv.getExternalStatusLabel();
×
4610
            String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
×
4611
            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
×
4612
            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
×
4613
            String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
×
4614
            List<String> sList = new ArrayList<String>();
×
4615
            assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
×
4616
            csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList)));
×
4617
        }
×
4618
        csvSB.append("\n");
×
4619
        return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv");
×
4620
    }
4621

4622
    // APIs to manage archival status
4623

4624
    @GET
4625
    @AuthRequired
4626
    @Produces(MediaType.APPLICATION_JSON)
4627
    @Path("/{id}/{version}/archivalStatus")
4628
    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4629
                                                    @PathParam("id") String datasetId,
4630
                                                    @PathParam("version") String versionNumber,
4631
                                                    @Context UriInfo uriInfo,
4632
                                                    @Context HttpHeaders headers) {
4633

4634
        try {
4635
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4636
            if (!au.isSuperuser()) {
×
4637
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4638
            }
4639
            DataverseRequest req = createDataverseRequest(au);
×
4640
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4641
                    headers);
4642

4643
            if (dsv.getArchivalCopyLocation() == null) {
×
4644
                return error(Status.NOT_FOUND, "This dataset version has not been archived");
×
4645
            } else {
4646
                JsonObject status = JsonUtil.getJsonObject(dsv.getArchivalCopyLocation());
×
4647
                return ok(status);
×
4648
            }
4649
        } catch (WrappedResponse wr) {
×
4650
            return wr.getResponse();
×
4651
        }
4652
    }
4653

4654
    @PUT
4655
    @AuthRequired
4656
    @Consumes(MediaType.APPLICATION_JSON)
4657
    @Path("/{id}/{version}/archivalStatus")
4658
    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4659
                                                    @PathParam("id") String datasetId,
4660
                                                    @PathParam("version") String versionNumber,
4661
                                                    String newStatus,
4662
                                                    @Context UriInfo uriInfo,
4663
                                                    @Context HttpHeaders headers) {
4664

4665
        logger.fine(newStatus);
×
4666
        try {
4667
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4668

4669
            if (!au.isSuperuser()) {
×
4670
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4671
            }
4672
            
4673
            //Verify we have valid json after removing any HTML tags (the status gets displayed in the UI, so we want plain text).
4674
            JsonObject update= JsonUtil.getJsonObject(MarkupChecker.stripAllTags(newStatus));
×
4675
            
4676
            if (update.containsKey(DatasetVersion.ARCHIVAL_STATUS) && update.containsKey(DatasetVersion.ARCHIVAL_STATUS_MESSAGE)) {
×
4677
                String status = update.getString(DatasetVersion.ARCHIVAL_STATUS);
×
4678
                if (status.equals(DatasetVersion.ARCHIVAL_STATUS_PENDING) || status.equals(DatasetVersion.ARCHIVAL_STATUS_FAILURE)
×
4679
                        || status.equals(DatasetVersion.ARCHIVAL_STATUS_SUCCESS)) {
×
4680

4681
                    DataverseRequest req = createDataverseRequest(au);
×
4682
                    DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId),
×
4683
                            uriInfo, headers);
4684

4685
                    if (dsv == null) {
×
4686
                        return error(Status.NOT_FOUND, "Dataset version not found");
×
4687
                    }
4688
                    if (isSingleVersionArchiving()) {
×
4689
                        for (DatasetVersion version : dsv.getDataset().getVersions()) {
×
4690
                            if ((!dsv.equals(version)) && (version.getArchivalCopyLocation() != null)) {
×
4691
                                return error(Status.CONFLICT, "Dataset already archived.");
×
4692
                            }
4693
                        }
×
4694
                    }
4695

4696
                    dsv.setArchivalCopyLocation(JsonUtil.prettyPrint(update));
×
4697
                    dsv = datasetversionService.merge(dsv);
×
4698
                    logger.fine("status now: " + dsv.getArchivalCopyLocationStatus());
×
4699
                    logger.fine("message now: " + dsv.getArchivalCopyLocationMessage());
×
4700

4701
                    return ok("Status updated");
×
4702
                }
4703
            }
4704
        } catch (WrappedResponse wr) {
×
4705
            return wr.getResponse();
×
4706
        } catch (JsonException| IllegalStateException ex) {
×
4707
            return error(Status.BAD_REQUEST, "Unable to parse provided JSON");
×
4708
        }
×
4709
        return error(Status.BAD_REQUEST, "Unacceptable status format");
×
4710
    }
4711
    
4712
    @DELETE
4713
    @AuthRequired
4714
    @Produces(MediaType.APPLICATION_JSON)
4715
    @Path("/{id}/{version}/archivalStatus")
4716
    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
4717
                                                       @PathParam("id") String datasetId,
4718
                                                       @PathParam("version") String versionNumber,
4719
                                                       @Context UriInfo uriInfo,
4720
                                                       @Context HttpHeaders headers) {
4721

4722
        try {
4723
            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
×
4724
            if (!au.isSuperuser()) {
×
4725
                return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4726
            }
4727

4728
            DataverseRequest req = createDataverseRequest(au);
×
4729
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo,
×
4730
                    headers);
4731
            if (dsv == null) {
×
4732
                return error(Status.NOT_FOUND, "Dataset version not found");
×
4733
            }
4734
            dsv.setArchivalCopyLocation(null);
×
4735
            dsv = datasetversionService.merge(dsv);
×
4736

4737
            return ok("Status deleted");
×
4738

4739
        } catch (WrappedResponse wr) {
×
4740
            return wr.getResponse();
×
4741
        }
4742
    }
4743
    
4744
    private boolean isSingleVersionArchiving() {
4745
        String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName, null);
×
4746
        if (className != null) {
×
4747
            Class<? extends AbstractSubmitToArchiveCommand> clazz;
4748
            try {
4749
                clazz =  Class.forName(className).asSubclass(AbstractSubmitToArchiveCommand.class);
×
4750
                return ArchiverUtil.onlySingleVersionArchiving(clazz, settingsService);
×
4751
            } catch (ClassNotFoundException e) {
×
4752
                logger.warning(":ArchiverClassName does not refer to a known Archiver");
×
4753
            } catch (ClassCastException cce) {
×
4754
                logger.warning(":ArchiverClassName does not refer to an Archiver class");
×
4755
            }
×
4756
        }
4757
        return false;
×
4758
    }
4759
    
4760
    // This method provides a callback for an external tool to retrieve it's
4761
    // parameters/api URLs. If the request is authenticated, e.g. by it being
4762
    // signed, the api URLs will be signed. If a guest request is made, the URLs
4763
    // will be plain/unsigned.
4764
    // This supports the cases where a tool is accessing a restricted resource (e.g.
4765
    // for a draft dataset), or public case.
4766
    @GET
4767
    @AuthRequired
4768
    @Path("{id}/versions/{version}/toolparams/{tid}")
4769
    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
4770
                                            @PathParam("tid") long externalToolId,
4771
                                            @PathParam("id") String datasetId,
4772
                                            @PathParam("version") String version,
4773
                                            @QueryParam(value = "locale") String locale) {
4774
        try {
4775
            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
×
4776
            DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
×
4777
            if (target == null) {
×
4778
                return error(BAD_REQUEST, "DatasetVersion not found.");
×
4779
            }
4780
            
4781
            ExternalTool externalTool = externalToolService.findById(externalToolId);
×
4782
            if(externalTool==null) {
×
4783
                return error(BAD_REQUEST, "External tool not found.");
×
4784
            }
4785
            if (!ExternalTool.Scope.DATASET.equals(externalTool.getScope())) {
×
4786
                return error(BAD_REQUEST, "External tool does not have dataset scope.");
×
4787
            }
4788
            ApiToken apiToken = null;
×
4789
            User u = getRequestUser(crc);
×
4790
            apiToken = authSvc.getValidApiTokenForUser(u);
×
4791

4792
            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
×
4793
            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
×
4794
        } catch (WrappedResponse wr) {
×
4795
            return wr.getResponse();
×
4796
        }
4797
    }
4798

4799
    @GET
4800
    @Path("summaryFieldNames")
4801
    public Response getDatasetSummaryFieldNames() {
4802
        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
×
4803
        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
×
4804
        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
×
4805
        for (String fieldName : fieldNames) {
×
4806
            fieldNamesArrayBuilder.add(fieldName);
×
4807
        }
4808
        return ok(fieldNamesArrayBuilder);
×
4809
    }
4810

4811
    @GET
4812
    @Path("privateUrlDatasetVersion/{privateUrlToken}")
4813
    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken, @QueryParam("returnOwners") boolean returnOwners) {
4814
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4815
        if (privateUrlUser == null) {
×
4816
            return notFound("Private URL user not found");
×
4817
        }
4818
        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
×
4819
        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
×
4820
        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
×
4821
            throw new NotAcceptableException("Anonymized Access not enabled");
×
4822
        }
4823
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4824
        if (dsv == null || dsv.getId() == null) {
×
4825
            return notFound("Dataset version not found");
×
4826
        }
4827
        JsonObjectBuilder responseJson;
4828
        if (isAnonymizedAccess) {
×
4829
            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
×
4830
            responseJson = json(dsv, anonymizedFieldTypeNamesList, true, returnOwners);
×
4831
        } else {
×
4832
            responseJson = json(dsv, null, true, returnOwners);
×
4833
        }
4834
        return ok(responseJson);
×
4835
    }
4836

4837
    @GET
4838
    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
4839
    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
4840
        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
×
4841
        if (privateUrlUser == null) {
×
4842
            return notFound("Private URL user not found");
×
4843
        }
4844
        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
×
4845
        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
×
4846
                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
×
4847
    }
4848

4849
    @GET
4850
    @AuthRequired
4851
    @Path("{id}/versions/{versionId}/citation")
4852
    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
4853
                                              @PathParam("id") String datasetId,
4854
                                              @PathParam("versionId") String versionId,
4855
                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
4856
                                              @Context UriInfo uriInfo,
4857
                                              @Context HttpHeaders headers) {
4858
        boolean checkFilePerms = false;
×
4859
        return response(req -> ok(
×
4860
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers,
×
4861
                        includeDeaccessioned, checkFilePerms).getCitation(true, false)),
×
4862
                getRequestUser(crc));
×
4863
    }
4864

4865
    @POST
4866
    @AuthRequired
4867
    @Path("{id}/versions/{versionId}/deaccession")
4868
    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
4869
        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
×
4870
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
×
4871
        }
4872
        return response(req -> {
×
4873
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
×
4874
            try {
4875
                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
×
4876
                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
×
4877
                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
×
4878
                if (deaccessionForwardURL != null) {
×
4879
                    try {
4880
                        datasetVersion.setArchiveNote(deaccessionForwardURL);
×
4881
                    } catch (IllegalArgumentException iae) {
×
4882
                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
×
4883
                    }
×
4884
                }
4885
                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
×
4886
                
4887
                return ok("Dataset " + 
×
4888
                        (":persistentId".equals(datasetId) ? datasetVersion.getDataset().getGlobalId().asString() : datasetId) + 
×
4889
                        " deaccessioned for version " + versionId);
4890
            } catch (JsonParsingException jpe) {
×
4891
                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
×
4892
            }
4893
        }, getRequestUser(crc));
×
4894
    }
4895

4896
    @GET
4897
    @AuthRequired
4898
    @Path("{identifier}/guestbookEntryAtRequest")
4899
    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4900
                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4901

4902
        Dataset dataset;
4903

4904
        try {
4905
            dataset = findDatasetOrDie(dvIdtf);
×
4906
        } catch (WrappedResponse ex) {
×
4907
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4908
        }
×
4909
        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
×
4910
        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
×
4911
            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4912
        }
4913
        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
×
4914
    }
4915

4916
    @PUT
4917
    @AuthRequired
4918
    @Path("{identifier}/guestbookEntryAtRequest")
4919
    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4920
                                               boolean gbAtRequest,
4921
                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4922

4923
        // Superuser-only:
4924
        AuthenticatedUser user;
4925
        try {
4926
            user = getRequestAuthenticatedUserOrDie(crc);
×
4927
        } catch (WrappedResponse ex) {
×
4928
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4929
        }
×
4930
        if (!user.isSuperuser()) {
×
4931
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4932
        }
4933

4934
        Dataset dataset;
4935

4936
        try {
4937
            dataset = findDatasetOrDie(dvIdtf);
×
4938
        } catch (WrappedResponse ex) {
×
4939
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4940
        }
×
4941
        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
×
4942
        if (!gbAtRequestOpt.isPresent()) {
×
4943
            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
×
4944
        }
4945
        String choice = Boolean.valueOf(gbAtRequest).toString();
×
4946
        dataset.setGuestbookEntryAtRequest(choice);
×
4947
        datasetService.merge(dataset);
×
4948
        return ok("Guestbook Entry At Request set to: " + choice);
×
4949
    }
4950

4951
    @DELETE
4952
    @AuthRequired
4953
    @Path("{identifier}/guestbookEntryAtRequest")
4954
    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
4955
                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
4956

4957
        // Superuser-only:
4958
        AuthenticatedUser user;
4959
        try {
4960
            user = getRequestAuthenticatedUserOrDie(crc);
×
4961
        } catch (WrappedResponse ex) {
×
4962
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
4963
        }
×
4964
        if (!user.isSuperuser()) {
×
4965
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
4966
        }
4967

4968
        Dataset dataset;
4969

4970
        try {
4971
            dataset = findDatasetOrDie(dvIdtf);
×
4972
        } catch (WrappedResponse ex) {
×
4973
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
4974
        }
×
4975

4976
        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
×
4977
        datasetService.merge(dataset);
×
4978
        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
×
4979
    }
4980

4981
    @GET
4982
    @AuthRequired
4983
    @Path("{id}/userPermissions")
4984
    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
4985
        Dataset dataset;
4986
        try {
4987
            dataset = findDatasetOrDie(datasetId);
×
4988
        } catch (WrappedResponse wr) {
×
4989
            return wr.getResponse();
×
4990
        }
×
4991
        User requestUser = getRequestUser(crc);
×
4992
        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
×
4993
        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
×
4994
        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
×
4995
        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
×
4996
        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
×
4997
        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
×
4998
        return ok(jsonObjectBuilder);
×
4999
    }
5000

5001
    @GET
5002
    @AuthRequired
5003
    @Path("{id}/versions/{versionId}/canDownloadAtLeastOneFile")
5004
    public Response getCanDownloadAtLeastOneFile(@Context ContainerRequestContext crc,
5005
                                                 @PathParam("id") String datasetId,
5006
                                                 @PathParam("versionId") String versionId,
5007
                                                 @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
5008
                                                 @Context UriInfo uriInfo,
5009
                                                 @Context HttpHeaders headers) {
5010
        return response(req -> {
×
5011
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
×
5012
            return ok(permissionService.canDownloadAtLeastOneFile(req, datasetVersion));
×
5013
        }, getRequestUser(crc));
×
5014
    }
5015
    
5016
    /**
5017
     * Get the PidProvider that will be used for generating new DOIs in this dataset
5018
     *
5019
     * @return - the id of the effective PID generator for the given dataset
5020
     * @throws WrappedResponse
5021
     */
5022
    @GET
5023
    @AuthRequired
5024
    @Path("{identifier}/pidGenerator")
5025
    public Response getPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5026
            @Context HttpHeaders headers) throws WrappedResponse {
5027

5028
        Dataset dataset;
5029

5030
        try {
5031
            dataset = findDatasetOrDie(dvIdtf);
×
5032
        } catch (WrappedResponse ex) {
×
5033
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5034
        }
×
5035
        PidProvider pidProvider = dataset.getEffectivePidGenerator();
×
5036
        if(pidProvider == null) {
×
5037
            //This is basically a config error, e.g. if a valid pid provider was removed after this dataset used it
5038
            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.pidgenerator.notfound"));
×
5039
        }
5040
        String pidGeneratorId = pidProvider.getId();
×
5041
        return ok(pidGeneratorId);
×
5042
    }
5043

5044
    @PUT
5045
    @AuthRequired
5046
    @Path("{identifier}/pidGenerator")
5047
    public Response setPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String datasetId,
5048
            String generatorId, @Context HttpHeaders headers) throws WrappedResponse {
5049

5050
        // Superuser-only:
5051
        AuthenticatedUser user;
5052
        try {
5053
            user = getRequestAuthenticatedUserOrDie(crc);
×
5054
        } catch (WrappedResponse ex) {
×
5055
            return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
×
5056
        }
×
5057
        if (!user.isSuperuser()) {
×
5058
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5059
        }
5060

5061
        Dataset dataset;
5062

5063
        try {
5064
            dataset = findDatasetOrDie(datasetId);
×
5065
        } catch (WrappedResponse ex) {
×
5066
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5067
        }
×
5068
        if (PidUtil.getManagedProviderIds().contains(generatorId)) {
×
5069
            dataset.setPidGeneratorId(generatorId);
×
5070
            datasetService.merge(dataset);
×
5071
            return ok("PID Generator set to: " + generatorId);
×
5072
        } else {
5073
            return error(Response.Status.NOT_FOUND, "No PID Generator found for the give id");
×
5074
        }
5075

5076
    }
5077

5078
    @DELETE
5079
    @AuthRequired
5080
    @Path("{identifier}/pidGenerator")
5081
    public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
5082
            @Context HttpHeaders headers) throws WrappedResponse {
5083

5084
        // Superuser-only:
5085
        AuthenticatedUser user;
5086
        try {
5087
            user = getRequestAuthenticatedUserOrDie(crc);
×
5088
        } catch (WrappedResponse ex) {
×
5089
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5090
        }
×
5091
        if (!user.isSuperuser()) {
×
5092
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5093
        }
5094

5095
        Dataset dataset;
5096

5097
        try {
5098
            dataset = findDatasetOrDie(dvIdtf);
×
5099
        } catch (WrappedResponse ex) {
×
5100
            return error(Response.Status.NOT_FOUND, "No such dataset");
×
5101
        }
×
5102

5103
        dataset.setPidGenerator(null);
×
5104
        datasetService.merge(dataset);
×
5105
        return ok("Pid Generator reset to default: " + dataset.getEffectivePidGenerator().getId());
×
5106
    }
5107

5108
    @GET
5109
    @Path("datasetTypes")
5110
    public Response getDatasetTypes() {
5111
        JsonArrayBuilder jab = Json.createArrayBuilder();
×
NEW
5112
        for (DatasetType datasetType : datasetTypeSvc.listAll()) {
×
NEW
5113
            jab.add(datasetType.toJson());
×
NEW
5114
        }
×
NEW
5115
        return ok(jab);
×
5116
    }
5117

5118
    @GET
5119
    @Path("datasetTypes/{idOrName}")
5120
    public Response getDatasetTypes(@PathParam("idOrName") String idOrName) {
5121
        DatasetType datasetType = null;
×
5122
        if (StringUtils.isNumeric(idOrName)) {
×
5123
            try {
5124
                long id = Long.parseLong(idOrName);
×
5125
                datasetType = datasetTypeSvc.getById(id);
×
5126
            } catch (NumberFormatException ex) {
×
5127
                return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
×
5128
            }
×
5129
        } else {
5130
            datasetType = datasetTypeSvc.getByName(idOrName);
×
5131
        }
5132
        if (datasetType != null) {
×
5133
            return ok(datasetType.toJson());
×
5134
        } else {
5135
            return error(NOT_FOUND, "Could not find a dataset type with name " + idOrName);
×
5136
        }
5137
    }
5138

5139
    @POST
5140
    @AuthRequired
5141
    @Path("datasetTypes")
5142
    public Response addDatasetType(@Context ContainerRequestContext crc, String jsonIn) {
5143
        AuthenticatedUser user;
5144
        try {
5145
            user = getRequestAuthenticatedUserOrDie(crc);
×
5146
        } catch (WrappedResponse ex) {
×
5147
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5148
        }
×
5149
        if (!user.isSuperuser()) {
×
5150
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5151
        }
5152

5153
        if (jsonIn == null || jsonIn.isEmpty()) {
×
5154
            return error(BAD_REQUEST, "JSON input was null or empty!");
×
5155
        }
5156

5157
        String nameIn = null;
×
5158
        try {
5159
            JsonObject jsonObject = JsonUtil.getJsonObject(jsonIn);
×
5160
            nameIn = jsonObject.getString("name", null);
×
5161
        } catch (JsonParsingException ex) {
×
5162
            return error(BAD_REQUEST, "Problem parsing supplied JSON: " + ex.getLocalizedMessage());
×
5163
        }
×
5164
        if (nameIn == null) {
×
5165
            return error(BAD_REQUEST, "A name for the dataset type is required");
×
5166
        }
5167
        if (StringUtils.isNumeric(nameIn)) {
×
5168
            // getDatasetTypes supports id or name so we don't want a names that looks like an id
5169
            return error(BAD_REQUEST, "The name of the type cannot be only digits.");
×
5170
        }
5171

5172
        try {
5173
            DatasetType datasetType = new DatasetType();
×
5174
            datasetType.setName(nameIn);
×
5175
            DatasetType saved = datasetTypeSvc.save(datasetType);
×
5176
            Long typeId = saved.getId();
×
5177
            String name = saved.getName();
×
5178
            return ok(saved.toJson());
×
5179
        } catch (WrappedResponse ex) {
×
5180
            return error(BAD_REQUEST, ex.getMessage());
×
5181
        }
5182
    }
5183

5184
    @DELETE
5185
    @AuthRequired
5186
    @Path("datasetTypes/{id}")
5187
    public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathParam("id") String doomed) {
5188
        AuthenticatedUser user;
5189
        try {
5190
            user = getRequestAuthenticatedUserOrDie(crc);
×
5191
        } catch (WrappedResponse ex) {
×
5192
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
5193
        }
×
5194
        if (!user.isSuperuser()) {
×
5195
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
5196
        }
5197

5198
        if (doomed == null || doomed.isEmpty()) {
×
5199
            throw new IllegalArgumentException("ID is required!");
×
5200
        }
5201

5202
        long idToDelete;
5203
        try {
5204
            idToDelete = Long.parseLong(doomed);
×
5205
        } catch (NumberFormatException e) {
×
5206
            throw new IllegalArgumentException("ID must be a number");
×
5207
        }
×
5208

5209
        DatasetType datasetTypeToDelete = datasetTypeSvc.getById(idToDelete);
×
5210
        if (datasetTypeToDelete == null) {
×
5211
            return error(BAD_REQUEST, "Could not find dataset type with id " + idToDelete);
×
5212
        }
5213

5214
        if (DatasetType.DEFAULT_DATASET_TYPE.equals(datasetTypeToDelete.getName())) {
×
5215
            return error(Status.FORBIDDEN, "You cannot delete the default dataset type: " + DatasetType.DEFAULT_DATASET_TYPE);
×
5216
        }
5217

5218
        try {
5219
            int numDeleted = datasetTypeSvc.deleteById(idToDelete);
×
5220
            if (numDeleted == 1) {
×
5221
                return ok("deleted");
×
5222
            } else {
5223
                return error(BAD_REQUEST, "Something went wrong. Number of dataset types deleted: " + numDeleted);
×
5224
            }
5225
        } catch (WrappedResponse ex) {
×
5226
            return error(BAD_REQUEST, ex.getMessage());
×
5227
        }
5228
    }
5229

5230
    @AuthRequired
5231
    @PUT
5232
    @Path("datasetTypes/{idOrName}")
5233
    public Response updateDatasetTypeLinksWithMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("idOrName") String idOrName, String jsonBody) {
NEW
5234
        DatasetType datasetType = null;
×
NEW
5235
        if (StringUtils.isNumeric(idOrName)) {
×
5236
            try {
NEW
5237
                long id = Long.parseLong(idOrName);
×
NEW
5238
                datasetType = datasetTypeSvc.getById(id);
×
NEW
5239
            } catch (NumberFormatException ex) {
×
NEW
5240
                return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName);
×
NEW
5241
            }
×
5242
        } else {
NEW
5243
            datasetType = datasetTypeSvc.getByName(idOrName);
×
5244
        }
NEW
5245
        JsonArrayBuilder datasetTypesBefore = Json.createArrayBuilder();
×
NEW
5246
        for (MetadataBlock metadataBlock : datasetType.getMetadataBlocks()) {
×
NEW
5247
            datasetTypesBefore.add(metadataBlock.getName());
×
NEW
5248
        }
×
NEW
5249
        JsonArrayBuilder datasetTypesAfter = Json.createArrayBuilder();
×
NEW
5250
        List<MetadataBlock> metadataBlocksToSave = new ArrayList<>();
×
NEW
5251
        if (jsonBody != null && !jsonBody.isEmpty()) {
×
NEW
5252
            JsonArray json = JsonUtil.getJsonArray(jsonBody);
×
NEW
5253
            for (JsonString jsonValue : json.getValuesAs(JsonString.class)) {
×
NEW
5254
                String name = jsonValue.getString();
×
NEW
5255
                System.out.println("name: " + name);
×
NEW
5256
                MetadataBlock metadataBlock = metadataBlockSvc.findByName(name);
×
NEW
5257
                if (metadataBlock != null) {
×
NEW
5258
                    metadataBlocksToSave.add(metadataBlock);
×
NEW
5259
                    datasetTypesAfter.add(name);
×
5260
                } else {
NEW
5261
                    String availableBlocks = metadataBlockSvc.listMetadataBlocks().stream().map(MetadataBlock::getName).collect(Collectors.joining(", "));
×
NEW
5262
                    return badRequest("Metadata block not found: " + name + ". Available metadata blocks: " + availableBlocks);
×
5263
                }
NEW
5264
            }
×
5265
        }
5266
        try {
NEW
5267
            execCommand(new UpdateDatasetTypeLinksToMetadataBlocks(createDataverseRequest(getRequestUser(crc)), datasetType, metadataBlocksToSave));
×
NEW
5268
            return ok(Json.createObjectBuilder()
×
NEW
5269
                    .add("linkedMetadataBlocks", Json.createObjectBuilder()
×
NEW
5270
                            .add("before", datasetTypesBefore)
×
NEW
5271
                            .add("after", datasetTypesAfter))
×
5272
            );
5273

NEW
5274
        } catch (WrappedResponse ex) {
×
NEW
5275
            return ex.getResponse();
×
5276
        }
5277
    }
5278

5279
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc