• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

CeON / dataverse / 1362

25 Apr 2024 01:09PM UTC coverage: 25.183% (+0.04%) from 25.147%
1362

push

jenkins

web-flow
Refs #2452: Necessary refactoring of workflow execution interface to have access to output parameters, which are useful for rollback purposes (#2466)

0 of 1 new or added line in 1 file covered. (0.0%)

398 existing lines in 1 file now uncovered.

17521 of 69574 relevant lines covered (25.18%)

0.25 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/dataverse-webapp/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.pi.model.InvalidArgumentException;
4
import edu.harvard.iq.dataverse.DataFileServiceBean;
5
import edu.harvard.iq.dataverse.DatasetDao;
6
import edu.harvard.iq.dataverse.DataverseDao;
7
import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
8
import edu.harvard.iq.dataverse.EjbDataverseEngine;
9
import edu.harvard.iq.dataverse.PermissionServiceBean;
10
import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
11
import edu.harvard.iq.dataverse.S3PackageImporter;
12
import edu.harvard.iq.dataverse.api.annotations.ApiWriteOperation;
13
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
14
import edu.harvard.iq.dataverse.api.dto.DatasetLockDTO;
15
import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
16
import edu.harvard.iq.dataverse.api.dto.FileLabelsChangeOptionsDTO;
17
import edu.harvard.iq.dataverse.api.dto.FileMetadataDTO;
18
import edu.harvard.iq.dataverse.api.dto.MetadataBlockWithFieldsDTO;
19
import edu.harvard.iq.dataverse.api.dto.PrivateUrlDTO;
20
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
21
import edu.harvard.iq.dataverse.api.dto.SubmitForReviewDataDTO;
22
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
23
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
24
import edu.harvard.iq.dataverse.common.BundleUtil;
25
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
26
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
27
import edu.harvard.iq.dataverse.datafile.DataFileCreator;
28
import edu.harvard.iq.dataverse.datafile.file.FileDownloadAPIHandler;
29
import edu.harvard.iq.dataverse.dataset.DatasetFileDownloadUrlCsvWriter;
30
import edu.harvard.iq.dataverse.dataset.DatasetService;
31
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
32
import edu.harvard.iq.dataverse.dataset.DatasetThumbnailService;
33
import edu.harvard.iq.dataverse.dataset.FileLabelInfo;
34
import edu.harvard.iq.dataverse.dataset.FileLabelsService;
35
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
36
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
37
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
38
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
39
import edu.harvard.iq.dataverse.engine.command.Command;
40
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
41
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
42
import edu.harvard.iq.dataverse.engine.command.exception.NoDatasetFilesException;
43
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
44
import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
45
import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand;
46
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
47
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand;
48
import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
49
import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand;
50
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand;
51
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand;
52
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
53
import edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand;
54
import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand;
55
import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand;
56
import edu.harvard.iq.dataverse.engine.command.impl.GetDraftVersionIfExists;
57
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand;
58
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand;
59
import edu.harvard.iq.dataverse.engine.command.impl.GetPrivateUrlCommand;
60
import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand;
61
import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand;
62
import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand;
63
import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments;
64
import edu.harvard.iq.dataverse.engine.command.impl.ListVersionsCommand;
65
import edu.harvard.iq.dataverse.engine.command.impl.MoveDatasetCommand;
66
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
67
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult;
68
import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand;
69
import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
70
import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand;
71
import edu.harvard.iq.dataverse.engine.command.impl.SetDatasetCitationDateCommand;
72
import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand;
73
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetTargetURLCommand;
74
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand;
75
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
76
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand;
77
import edu.harvard.iq.dataverse.error.DataverseError;
78
import edu.harvard.iq.dataverse.export.ExportService;
79
import edu.harvard.iq.dataverse.export.ExporterType;
80
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
81
import edu.harvard.iq.dataverse.notification.NotificationObjectType;
82
import edu.harvard.iq.dataverse.notification.NotificationParameter;
83
import edu.harvard.iq.dataverse.notification.UserNotificationService;
84
import edu.harvard.iq.dataverse.persistence.datafile.DataFile;
85
import edu.harvard.iq.dataverse.persistence.dataset.ControlledVocabularyValue;
86
import edu.harvard.iq.dataverse.persistence.dataset.Dataset;
87
import edu.harvard.iq.dataverse.persistence.dataset.DatasetField;
88
import edu.harvard.iq.dataverse.persistence.dataset.DatasetFieldType;
89
import edu.harvard.iq.dataverse.persistence.dataset.DatasetFieldUtil;
90
import edu.harvard.iq.dataverse.persistence.dataset.DatasetLock;
91
import edu.harvard.iq.dataverse.persistence.dataset.DatasetVersion;
92
import edu.harvard.iq.dataverse.persistence.dataset.MetadataBlock;
93
import edu.harvard.iq.dataverse.persistence.dataverse.Dataverse;
94
import edu.harvard.iq.dataverse.persistence.user.AuthenticatedUser;
95
import edu.harvard.iq.dataverse.persistence.user.DataverseRole;
96
import edu.harvard.iq.dataverse.persistence.user.DataverseRole.BuiltInRole;
97
import edu.harvard.iq.dataverse.persistence.user.NotificationType;
98
import edu.harvard.iq.dataverse.persistence.user.Permission;
99
import edu.harvard.iq.dataverse.persistence.user.RoleAssignee;
100
import edu.harvard.iq.dataverse.persistence.user.User;
101
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
102
import edu.harvard.iq.dataverse.search.index.IndexServiceBean;
103
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
104
import edu.harvard.iq.dataverse.util.ArchiverUtil;
105
import edu.harvard.iq.dataverse.util.EjbUtil;
106
import edu.harvard.iq.dataverse.util.SystemConfig;
107
import edu.harvard.iq.dataverse.util.json.JsonParseException;
108
import io.vavr.control.Either;
109
import io.vavr.control.Try;
110
import org.apache.commons.cli.MissingArgumentException;
111
import org.apache.commons.io.IOUtils;
112
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
113
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
114
import org.glassfish.jersey.media.multipart.FormDataParam;
115

116
import javax.ejb.EJBException;
117
import javax.inject.Inject;
118
import javax.json.Json;
119
import javax.json.JsonArray;
120
import javax.json.JsonArrayBuilder;
121
import javax.json.JsonObject;
122
import javax.json.JsonObjectBuilder;
123
import javax.servlet.http.HttpServletRequest;
124
import javax.servlet.http.HttpServletResponse;
125
import javax.ws.rs.Consumes;
126
import javax.ws.rs.DELETE;
127
import javax.ws.rs.GET;
128
import javax.ws.rs.POST;
129
import javax.ws.rs.PUT;
130
import javax.ws.rs.Path;
131
import javax.ws.rs.PathParam;
132
import javax.ws.rs.Produces;
133
import javax.ws.rs.QueryParam;
134
import javax.ws.rs.core.Context;
135
import javax.ws.rs.core.MediaType;
136
import javax.ws.rs.core.MultivaluedMap;
137
import javax.ws.rs.core.Response;
138
import javax.ws.rs.core.StreamingOutput;
139
import javax.ws.rs.core.UriInfo;
140
import java.io.IOException;
141
import java.io.InputStream;
142
import java.io.StringReader;
143
import java.security.InvalidParameterException;
144
import java.sql.Timestamp;
145
import java.text.ParseException;
146
import java.text.SimpleDateFormat;
147
import java.time.Clock;
148
import java.util.ArrayList;
149
import java.util.Collections;
150
import java.util.Date;
151
import java.util.HashMap;
152
import java.util.HashSet;
153
import java.util.LinkedHashMap;
154
import java.util.LinkedList;
155
import java.util.List;
156
import java.util.Map;
157
import java.util.Optional;
158
import java.util.ResourceBundle;
159
import java.util.Set;
160
import java.util.function.Function;
161
import java.util.logging.Level;
162
import java.util.logging.Logger;
163
import java.util.stream.Collectors;
164

165
@Path("datasets")
166
public class Datasets extends AbstractApiBean {
167

UNCOV
168
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
×
169

170
    private DatasetDao datasetDao;
171
    private DataverseDao dataverseDao;
172
    private UserNotificationService userNotificationService;
173
    private PermissionServiceBean permissionService;
174
    private AuthenticationServiceBean authenticationServiceBean;
175
    private DataFileServiceBean fileService;
176
    private IngestServiceBean ingestService;
177
    private EjbDataverseEngine commandEngine;
178
    private IndexServiceBean indexService;
179
    private S3PackageImporter s3PackageImporter;
180
    private SettingsServiceBean settingsService;
181
    private ExportService exportService;
182
    private DatasetService datasetSvc;
183
    private DatasetsValidators datasetsValidators;
184
    private OptionalFileParams optionalFileParamsSvc;
185
    private DataFileCreator dataFileCreator;
186
    private DatasetThumbnailService datasetThumbnailService;
187
    private FileDownloadAPIHandler fileDownloadAPIHandler;
188
    private DataverseRoleServiceBean rolesSvc;
189
    private RoleAssigneeServiceBean roleAssigneeSvc;
190
    private PermissionServiceBean permissionSvc;
191
    private FileLabelsService fileLabelsService;
192
    private DatasetFileDownloadUrlCsvWriter fileDownloadUrlCsvWriter;
193

194
    // -------------------- CONSTRUCTORS --------------------
195

UNCOV
196
    public Datasets() { }
×
197

198
    @Inject
199
    public Datasets(DatasetDao datasetDao, DataverseDao dataverseDao,
200
                    UserNotificationService userNotificationService,
201
                    PermissionServiceBean permissionService, AuthenticationServiceBean authenticationServiceBean,
202
                    DataFileServiceBean fileService, IngestServiceBean ingestService,
203
                    EjbDataverseEngine commandEngine, IndexServiceBean indexService,
204
                    S3PackageImporter s3PackageImporter, SettingsServiceBean settingsService,
205
                    ExportService exportService, DatasetService datasetSvc,
206
                    DatasetsValidators datasetsValidators, OptionalFileParams optionalFileParamsSvc,
207
                    DataFileCreator dataFileCreator, DatasetThumbnailService datasetThumbnailService,
208
                    FileDownloadAPIHandler fileDownloadAPIHandler, DataverseRoleServiceBean rolesSvc,
209
                    RoleAssigneeServiceBean roleAssigneeSvc, PermissionServiceBean permissionSvc,
210
                    FileLabelsService fileLabelsService,
211
                    DatasetFileDownloadUrlCsvWriter fileDownloadUrlCsvWriter) {
×
212
        this.datasetDao = datasetDao;
×
213
        this.dataverseDao = dataverseDao;
×
214
        this.userNotificationService = userNotificationService;
×
215
        this.permissionService = permissionService;
×
216
        this.authenticationServiceBean = authenticationServiceBean;
×
217
        this.fileService = fileService;
×
218
        this.ingestService = ingestService;
×
219
        this.commandEngine = commandEngine;
×
220
        this.indexService = indexService;
×
221
        this.s3PackageImporter = s3PackageImporter;
×
222
        this.settingsService = settingsService;
×
223
        this.exportService = exportService;
×
224
        this.datasetSvc = datasetSvc;
×
225
        this.datasetsValidators = datasetsValidators;
×
226
        this.optionalFileParamsSvc = optionalFileParamsSvc;
×
227
        this.dataFileCreator = dataFileCreator;
×
228
        this.datasetThumbnailService = datasetThumbnailService;
×
229
        this.fileDownloadAPIHandler = fileDownloadAPIHandler;
×
230
        this.rolesSvc = rolesSvc;
×
231
        this.roleAssigneeSvc = roleAssigneeSvc;
×
UNCOV
232
        this.permissionSvc = permissionSvc;
×
UNCOV
233
        this.fileLabelsService = fileLabelsService;
×
UNCOV
234
        this.fileDownloadUrlCsvWriter = fileDownloadUrlCsvWriter;
×
UNCOV
235
    }
×
236

237
    // -------------------- LOGIC --------------------
238

239
    @GET
240
    @Path("{id}")
241
    public Response getDataset(@PathParam("id") String id) {
242
        return response(req -> {
×
243
            Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id)));
×
244
            DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
245
            DatasetDTO dataset = new DatasetDTO.Converter().convert(retrieved);
×
246
            if (latest != null) {
×
247
                DatasetVersionDTO latestVersion = new DatasetVersionDTO.Converter().convert(latest);
×
248
                latestVersion = settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
UNCOV
249
                        ? latestVersion.clearEmailFields() : latestVersion;
×
250
                Map<String, Object> dto = dataset.asMap();
×
UNCOV
251
                dto.put("latestVersion", latestVersion);
×
UNCOV
252
                return allowCors(ok(dto));
×
253
            } else {
UNCOV
254
                return allowCors(ok(dataset));
×
255
            }
256
        });
257
    }
258

259
    // TODO:
260
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand
261
    // to obtain the dataset that we are trying to export - which would handle
262
    // Auth in the process... For now, Auth isn't necessary - since export ONLY
263
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
264

265
    @GET
266
    @Path("/export")
267
    @Produces({"application/xml", "application/json"})
268
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter) {
269

UNCOV
270
        Optional<ExporterType> exporterConstant = ExporterType.fromPrefix(exporter);
×
271

272
        if (!exporterConstant.isPresent()) {
×
273
            return error(Response.Status.BAD_REQUEST, exporter + " is not a valid exporter");
×
274
        }
275

UNCOV
276
        Dataset dataset = datasetDao.findByGlobalId(persistentId);
×
277
        if (dataset == null) {
×
278
            return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
279
        }
280

281
        Either<DataverseError, String> exportedDataset
×
UNCOV
282
                = exportService.exportDatasetVersionAsString(dataset.getReleasedVersion(), exporterConstant.get());
×
283

284
        if (exportedDataset.isLeft()) {
×
285
            return error(Response.Status.FORBIDDEN, exportedDataset.getLeft().getErrorMsg());
×
286
        }
287

288
        String mediaType = exportService.getMediaType(exporterConstant.get());
×
UNCOV
289
        return allowCors(Response.ok()
×
UNCOV
290
                .entity(exportedDataset.get())
×
UNCOV
291
                .type(mediaType)
×
UNCOV
292
                .build());
×
293
    }
294

295
    @DELETE
296
    @ApiWriteOperation
297
    @Path("{id}")
298
    public Response deleteDataset(@PathParam("id") String id) {
299
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
300
        // (and there's a comment that says "TODO: remove this command")
301
        //  do we need an exposed API call for it?
302
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, if the dataset only has 1
303
        // version... In other words, the functionality currently provided by this API is covered between the
304
        // "deleteDraftVersion" and "destroyDataset" API calls. (The logic below follows the current implementation of
305
        // the underlying commands!)
306

307
        return response(req -> {
×
UNCOV
308
            Dataset doomed = findDatasetOrDie(id);
×
309
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
310
            User u = findUserOrDie();
×
311
            boolean destroy = false;
×
312

UNCOV
313
            if (doomed.getVersions().size() == 1) {
×
314
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
UNCOV
315
                    throw new WrappedResponse(
×
316
                            error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
317
                }
318
                destroy = true;
×
319
            } else {
UNCOV
320
                if (!doomedVersion.isDraft()) {
×
UNCOV
321
                    throw new WrappedResponse(
×
UNCOV
322
                            error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. " +
×
323
                                    "This API can only delete the latest version if it is a DRAFT"));
324
                }
325
            }
326

327
            // Gather the locations of the physical files that will need to be deleted once the destroy command
328
            // execution has been finalized:
UNCOV
329
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
UNCOV
330
            execCommand(new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
331

332
            // If we have gotten this far, the destroy command has succeeded, so we can finalize it by permanently
333
            // deleting the physical files: (DataFileService will double-check that the datafiles no longer exist in the
334
            // database, before attempting to delete the physical files)
335
            if (!deleteStorageLocations.isEmpty()) {
×
UNCOV
336
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
337
            }
338

UNCOV
339
            return ok("Dataset " + id + " deleted");
×
340
        });
341
    }
342

343
    @DELETE
344
    @ApiWriteOperation
345
    @Path("{id}/destroy")
346
    public Response destroyDataset(@PathParam("id") String id) {
UNCOV
347
        return response(req -> {
×
348
            // first check if dataset is released, and if so, if user is a superuser
349
            Dataset doomed = findDatasetOrDie(id);
×
350
            User user = findUserOrDie();
×
351

UNCOV
352
            if (doomed.isReleased() && (!(user instanceof AuthenticatedUser) || !user.isSuperuser())) {
×
UNCOV
353
                throw new WrappedResponse(
×
UNCOV
354
                        error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
355
            }
356

357
            // Gather the locations of the physical files that will need to be deleted once the destroy command
358
            // execution has been finalized:
UNCOV
359
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
UNCOV
360
            execCommand(new DestroyDatasetCommand(doomed, req));
×
361

362
            // If we have gotten this far, the destroy command has succeeded, so we can finalize permanently deleting
363
            // the physical files: (DataFileService will double-check that the datafiles no longer exist in the
364
            // database, before attempting to delete the physical files)
UNCOV
365
            if (!deleteStorageLocations.isEmpty()) {
×
UNCOV
366
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
367
            }
UNCOV
368
            return ok("Dataset " + id + " destroyed");
×
369
        });
370
    }
371

372
    @DELETE
373
    @ApiWriteOperation
374
    @Path("{id}/versions/{versionId}")
375
    public Response deleteDraftVersion(@PathParam("id") String id, @PathParam("versionId") String versionId) {
376
        if (!":draft".equals(versionId)) {
×
377
            return badRequest("Only the :draft version can be deleted");
×
378
        }
379

380
        return response(req -> {
×
381
            Dataset dataset = findDatasetOrDie(id);
×
UNCOV
382
            DatasetVersion doomed = dataset.getLatestVersion();
×
383

UNCOV
384
            if (!doomed.isDraft()) {
×
UNCOV
385
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
386
            }
387

388
            // Gather the locations of the physical files that will need to be deleted once the destroy command
389
            // execution has been finalized:
390

UNCOV
391
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
392

UNCOV
393
            execCommand(new DeleteDatasetVersionCommand(req, dataset));
×
394

395
            // If we have gotten this far, the delete command has succeeded - by either deleting the Draft version of a
396
            // published dataset, or destroying an unpublished one.
397
            // This means we can finalize permanently deleting the physical files: (DataFileService will double-check
398
            // that the datafiles no longer exist in the database, before attempting to delete the physical files)
399
            if (!deleteStorageLocations.isEmpty()) {
×
UNCOV
400
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
401
            }
402

UNCOV
403
            return ok("Draft version of dataset " + id + " deleted");
×
404
        });
405
    }
406

407
    @DELETE
408
    @ApiWriteOperation
409
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
410
    public Response deleteDatasetLinkingDataverse(@PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
411
        boolean index = true;
×
UNCOV
412
        return response(req -> {
×
UNCOV
413
            execCommand(new DeleteDatasetLinkingDataverseCommand(
×
UNCOV
414
                    req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
UNCOV
415
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
416
        });
417
    }
418

419
    @PUT
420
    @ApiWriteOperation
421
    @Path("{id}/citationdate")
422
    public Response setCitationDate(@PathParam("id") String id, String dsfTypeName) {
423
        return response(req -> {
×
424
            if (dsfTypeName.trim().isEmpty()) {
×
425
                return badRequest("Please provide a dataset field type in the requst body.");
×
426
            }
427
            DatasetFieldType dsfType = null;
×
UNCOV
428
            if (!":publicationDate".equals(dsfTypeName)) {
×
UNCOV
429
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
UNCOV
430
                if (dsfType == null) {
×
431
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
432
                }
433
            }
434

UNCOV
435
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
UNCOV
436
            return ok("Citation Date for dataset " + id + " set to: "
×
UNCOV
437
                    + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
438
        });
439
    }
440

441
    @DELETE
442
    @ApiWriteOperation
443
    @Path("{id}/citationdate")
444
    public Response useDefaultCitationDate(@PathParam("id") String id) {
UNCOV
445
        return response(req -> {
×
UNCOV
446
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
UNCOV
447
            return ok("Citation Date for dataset " + id + " set to default");
×
448
        });
449
    }
450

451
    @GET
452
    @Path("{id}/versions")
453
    public Response listVersions(@PathParam("id") String id) {
454
        DatasetVersionDTO.Converter converter = new DatasetVersionDTO.Converter();
×
455
        boolean excludeEmails = settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport);
×
456
        return allowCors(response(req -> ok(
×
UNCOV
457
                execCommand(new ListVersionsCommand(req, findDatasetOrDie(id))).stream()
×
458
                        .map(v -> {
×
UNCOV
459
                            DatasetVersionDTO dto = converter.convert(v);
×
UNCOV
460
                            return excludeEmails ? dto.clearEmailFields() : dto;
×
461
                        })
UNCOV
462
                        .collect(Collectors.toList()))));
×
463
    }
464

465
    @GET
466
    @Path("{id}/versions/{versionId}")
467
    public Response getVersion(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
468
        return allowCors(response(req -> {
×
UNCOV
469
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId));
×
UNCOV
470
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(datasetVersion);
×
UNCOV
471
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
UNCOV
472
                    ? dto.clearEmailFields() : dto);
×
473
        }));
474
    }
475

476
    @GET
477
    @Path("{id}/versions/{versionId}/files")
478
    public Response listVersionFiles(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
UNCOV
479
        return allowCors(response(req -> ok(new FileMetadataDTO.Converter().convert(
×
UNCOV
480
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId)).getFileMetadatas()))));
×
481
    }
482

483
    @GET
484
    @Path("{id}/versions/{versionId}/files/download")
485
    @Produces({"application/zip"})
486
    @ApiWriteOperation
487
    public Response getVersionFiles(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("gbrecs") boolean gbrecs,
488
                                    @Context HttpServletResponse response, @Context UriInfo uriInfo) {
489

490
        User apiTokenUser = Try.of(this::findUserOrDie)
×
491
                               .onFailure(throwable -> logger.log(Level.FINE, "Failed finding user for apiToken: ", throwable))
×
492
                               .get();
×
493

494
        String finalVersionId = versionId;
×
495
        if (!versionId.matches("[0-9]+")) {
×
496
            DataverseRequest dataverseRequest = createDataverseRequest(apiTokenUser);
×
497
            try {
498
                Dataset dataset = findDatasetOrDie(datasetId);
×
499
                DatasetVersion datasetVersion = getDatasetVersionOrDie(dataverseRequest, versionId, dataset);
×
UNCOV
500
                finalVersionId = datasetVersion.getId().toString();
×
UNCOV
501
            } catch (WrappedResponse wr) {
×
502
                return wr.getResponse();
×
UNCOV
503
            }
×
504
        }
505

UNCOV
506
        boolean originalFormatRequested = isOriginalFormatRequested(uriInfo.getQueryParameters());
×
507

508
        response.setHeader("Content-disposition", "attachment; filename=\"dataverse_files.zip\"");
×
UNCOV
509
        response.setHeader("Content-Type", "application/zip; name=\"dataverse_files.zip\"");
×
510

UNCOV
511
        StreamingOutput fileStream = fileDownloadAPIHandler.downloadFiles(apiTokenUser, finalVersionId, originalFormatRequested, gbrecs);
×
UNCOV
512
        return Response.ok(fileStream).build();
×
513
    }
514

515
    @GET
516
    @Path("{id}/versions/{versionId}/files/urls")
517
    @Produces({"text/csv"})
518
    public Response getVersionFilesUrls(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
519
        return allowCors(response(req -> {
×
520
            Dataset dataset = findDatasetOrDie(datasetId);
×
521
            if (dataset.hasActiveEmbargo()) {
×
522
                return badRequest("Requested dataset is under embargo.");
×
523
            }
524

UNCOV
525
            if (dataset.getGuestbook() != null && dataset.getGuestbook().isEnabled() && dataset.getGuestbook().getDataverse() != null) {
×
526
                return badRequest("Requested dataset has guestbook enabled.");
×
527
            }
528

UNCOV
529
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, dataset);
×
UNCOV
530
            if (!datasetVersion.isReleased()) {
×
UNCOV
531
                return badRequest("Requested version has not been released.");
×
532
            }
533

UNCOV
534
            StreamingOutput csvContent = output -> fileDownloadUrlCsvWriter.write(output, datasetVersion.getFileMetadatas());
×
535

536
            return Response.ok(csvContent)
×
537
                    .header("Content-Disposition", "attachment; filename=\"dataset-file-urls.csv\"")
×
UNCOV
538
                    .build();
×
539
        }));
540
    }
541

542
    @GET
543
    @Path("{id}/versions/{versionId}/metadata")
544
    public Response getVersionMetadata(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
UNCOV
545
        MetadataBlockWithFieldsDTO.Creator creator = new MetadataBlockWithFieldsDTO.Creator();
×
546
        return allowCors(response(r -> {
×
UNCOV
547
            List<DatasetField> fields = getDatasetVersionOrDie(r, versionId, findDatasetOrDie(datasetId)).getDatasetFields();
×
548
            Map<String, MetadataBlockWithFieldsDTO> dto = DatasetField.groupByBlock(fields)
×
549
                    .entrySet().stream()
×
UNCOV
550
                    .map(e -> creator.create(e.getKey(), e.getValue()))
×
UNCOV
551
                    .collect(Collectors.toMap(
×
UNCOV
552
                            MetadataBlockWithFieldsDTO::getDisplayName, Function.identity(),
×
UNCOV
553
                            (prev, next) -> next, LinkedHashMap::new));
×
UNCOV
554
            if (settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)) {
×
UNCOV
555
                dto.values().forEach(MetadataBlockWithFieldsDTO::clearEmailFields);
×
556
            }
557
            return ok(dto);
×
558
        }));
559
    }
560

561
    @GET
562
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
563
    public Response getVersionMetadataBlock(@PathParam("id") String datasetId,
564
                                            @PathParam("versionNumber") String versionNumber,
565
                                            @PathParam("block") String blockName) {
566

567
        return allowCors(response(req -> {
×
UNCOV
568
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId));
×
569

570
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
571
            for (Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet()) {
×
572
                if (p.getKey().getName().equals(blockName)) {
×
573
                    MetadataBlockWithFieldsDTO blockWithFields = new MetadataBlockWithFieldsDTO.Creator().create(p.getKey(), p.getValue());
×
574
                    if (settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)) {
×
UNCOV
575
                        blockWithFields.clearEmailFields();
×
576
                    }
UNCOV
577
                    return ok(blockWithFields);
×
578
                }
UNCOV
579
            }
×
UNCOV
580
            return notFound("metadata block named " + blockName + " not found");
×
581
        }));
582
    }
583

584
    @GET
585
    @Path("{id}/modifyRegistration")
586
    public Response updateDatasetTargetURL(@PathParam("id") String id) {
UNCOV
587
        return response(req -> {
×
588
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
589
            return ok("Dataset " + id + " target url updated");
×
590
        });
591
    }
592

593
    @POST
594
    @ApiWriteOperation
595
    @Path("/modifyRegistrationAll")
596
    public Response updateDatasetTargetURLAll() {
UNCOV
597
        return response(req -> {
×
UNCOV
598
            datasetDao.findAll().forEach(ds -> {
×
599
                try {
UNCOV
600
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
UNCOV
601
                } catch (WrappedResponse ex) {
×
UNCOV
602
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
603
                }
×
604
            });
×
UNCOV
605
            return ok("Update All Dataset target url completed");
×
606
        });
607
    }
608

609
    @POST
610
    @ApiWriteOperation
611
    @Path("{id}/modifyRegistrationMetadata")
612
    public Response updateDatasetPIDMetadata(@PathParam("id") String id) {
613
        try {
UNCOV
614
            Dataset dataset = findDatasetOrDie(id);
×
UNCOV
615
            if (!dataset.isReleased()) {
×
UNCOV
616
                return error(Response.Status.BAD_REQUEST,
×
UNCOV
617
                        BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
618
            }
UNCOV
619
        } catch (WrappedResponse ex) {
×
620
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
621
        }
×
622

UNCOV
623
        return response(req -> {
×
624
            execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req));
×
625
            List<String> args = Collections.singletonList(id);
×
626
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
627
        });
628
    }
629

630
    @GET
631
    @ApiWriteOperation
632
    @Path("/modifyRegistrationPIDMetadataAll")
633
    public Response updateDatasetPIDMetadataAll() {
634
        return response(req -> {
×
635
            datasetDao.findAll().forEach(ds -> {
×
636
                try {
637
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
638
                } catch (WrappedResponse ex) {
×
639
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
UNCOV
640
                }
×
UNCOV
641
            });
×
642
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
643
        });
644
    }
645

646
    @PUT
647
    @ApiWriteOperation
648
    @Path("{id}/versions/{versionId}")
649
    public Response updateDraftVersion(String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
650

651
        if (!":draft".equals(versionId)) {
×
652
            return error(Response.Status.BAD_REQUEST, "Only the :draft version can be updated");
×
653
        }
654

655
        try (StringReader rdr = new StringReader(jsonBody)) {
×
UNCOV
656
            DataverseRequest req = createDataverseRequest(findUserOrDie());
×
657
            Dataset ds = findDatasetOrDie(id);
×
658
            JsonObject json = Json.createReader(rdr).readObject();
×
UNCOV
659
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
660

661
            // clear possibly stale fields from the incoming dataset version.
662
            // creation and modification dates are updated by the commands.
UNCOV
663
            incomingVersion.setId(null);
×
UNCOV
664
            incomingVersion.setVersionNumber(null);
×
UNCOV
665
            incomingVersion.setMinorVersionNumber(null);
×
666
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
667
            incomingVersion.setDataset(ds);
×
UNCOV
668
            incomingVersion.setCreateTime(null);
×
UNCOV
669
            incomingVersion.setLastUpdateTime(null);
×
UNCOV
670
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
671

672
            DatasetVersion managedVersion;
UNCOV
673
            if (updateDraft) {
×
UNCOV
674
                final DatasetVersion editVersion = ds.getEditVersion();
×
675
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
676
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
677
                managedVersion = managedDataset.getEditVersion();
×
678
            } else {
×
UNCOV
679
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
680
            }
UNCOV
681
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(managedVersion);
×
682
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
683
                    ? dto.clearEmailFields() : dto);
×
684
        } catch (JsonParseException ex) {
×
685
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
686
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage());
×
687

688
        } catch (WrappedResponse ex) {
×
689
            return ex.getResponse();
×
690
        }
691
    }
692

693
    @PUT
694
    @ApiWriteOperation
695
    @Path("{id}/deleteMetadata")
696
    public Response deleteVersionMetadata(String jsonBody, @PathParam("id") String id) throws WrappedResponse {
697
        DataverseRequest req = createDataverseRequest(findUserOrDie());
×
698
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
699
    }
700

701
    @PUT
702
    @ApiWriteOperation
703
    @Path("{id}/setEmbargo")
704
    public Response setEmbargoDate(@PathParam("id") String id, @QueryParam("date") String date) {
705
        try {
UNCOV
706
            Dataset dataset = findDatasetOrDie(id);
×
UNCOV
707
            SimpleDateFormat dateFormat = new SimpleDateFormat(settingsService.getValueForKey(SettingsServiceBean.Key.DefaultDateFormat));
×
UNCOV
708
            if(date == null) {
×
709
                throw new WrappedResponse(badRequest(BundleUtil.getStringFromBundle(
×
710
                        "datasets.api.setEmbargo.failure.badDate.missing",
711
                        settingsSvc.getValueForKey(SettingsServiceBean.Key.DefaultDateFormat))));
×
712
            }
713
            Date embargoDate = dateFormat.parse(date);
×
714
            datasetsValidators.validateEmbargoDate(embargoDate);
×
715
            dataset = datasetSvc.setDatasetEmbargoDate(dataset, embargoDate);
×
716
            return ok(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.success",
×
717
                    dataset.getGlobalId(), dataset.getEmbargoDate().get().toInstant()));
×
718
        } catch (WrappedResponse wr) {
×
719
            return wr.getResponse();
×
UNCOV
720
        } catch (ParseException pe) {
×
UNCOV
721
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.failure.badDate.format",
×
UNCOV
722
                    settingsSvc.getValueForKey(SettingsServiceBean.Key.DefaultDateFormat)));
×
UNCOV
723
        } catch (InvalidArgumentException iae) {
×
UNCOV
724
            return badRequest(iae.getMessage());
×
UNCOV
725
        } catch (EJBException ise) {
×
UNCOV
726
            return badRequest(ise.getCause().getMessage());
×
UNCOV
727
        } catch (PermissionException pe) {
×
728
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.failure.missingPermissions",
×
729
                    pe.getMissingPermissions().toString()));
×
730
        } catch (Exception e) {
×
UNCOV
731
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.failure.unknown", e.getMessage()));
×
732
        }
733
    }
734

735
    @PUT
736
    @ApiWriteOperation
737
    @Path("{id}/liftEmbargo")
738
    public Response liftEmbargoDate(@PathParam("id") String id) {
739
        try {
UNCOV
740
            Dataset dataset = findDatasetOrDie(id);
×
741
            dataset = datasetSvc.liftDatasetEmbargoDate(dataset);
×
742
            return ok(BundleUtil.getStringFromBundle("datasets.api.liftEmbargo.success", dataset.getGlobalId()));
×
UNCOV
743
        } catch (WrappedResponse wr) {
×
UNCOV
744
            return wr.getResponse();
×
UNCOV
745
        } catch (EJBException ise) {
×
UNCOV
746
            return badRequest(ise.getCause().getMessage());
×
UNCOV
747
        } catch (PermissionException pe) {
×
UNCOV
748
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.liftEmbargo.failure.missingPermissions", pe.getMissingPermissions().toString()));
×
UNCOV
749
        } catch (Exception e) {
×
750
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.liftEmbargo.failure.unknown", e.getMessage()));
×
751
        }
752
    }
753

754
    @PUT
755
    @ApiWriteOperation
756
    @Path("{id}/editMetadata")
757
    public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace)
758
            throws WrappedResponse {
UNCOV
759
        Boolean replaceData = replace != null;
×
760
        DataverseRequest req = createDataverseRequest(findUserOrDie());
×
761
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
762
    }
763

764
    /**
765
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
766
     */
767
    @GET
768
    @ApiWriteOperation
769
    @Path("{id}/actions/:publish")
770
    @Deprecated
771
    public Response publishDataseUsingGetDeprecated(@PathParam("id") String id, @QueryParam("type") String type) {
UNCOV
772
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
773
        return publishDataset(id, type);
×
774
    }
775

776
    @POST
777
    @ApiWriteOperation
778
    @Path("{id}/actions/:publish")
779
    public Response publishDataset(@PathParam("id") String id, @QueryParam("type") String type) {
780
        try {
UNCOV
781
            if (type == null) {
×
UNCOV
782
                return error(Response.Status.BAD_REQUEST,
×
783
                        "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
784
            }
785
            boolean updateCurrent = false;
×
786
            AuthenticatedUser user = findAuthenticatedUserOrDie();
×
UNCOV
787
            type = type.toLowerCase();
×
788
            boolean isMinor = false;
×
789
            switch (type) {
×
790
                case "minor":
791
                    isMinor = true;
×
UNCOV
792
                    break;
×
793
                case "major":
794
                    isMinor = false;
×
795
                    break;
×
796
                case "updatecurrent":
797
                    if (user.isSuperuser()) {
×
798
                        updateCurrent = true;
×
799
                    } else {
800
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
801
                    }
802
                    break;
803
                default:
UNCOV
804
                    return error(Response.Status.BAD_REQUEST,
×
805
                                "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
806
            }
807

UNCOV
808
            Dataset ds = findDatasetOrDie(id);
×
809
            if (updateCurrent) {
×
810
                /*
811
                 * Note: The code here mirrors that in the
812
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
813
                 * to the core logic (i.e. beyond updating the messaging about results) should
814
                 * be applied to the code there as well.
815
                 */
UNCOV
816
                String errorMsg = null;
×
817
                String successMsg = null;
×
818
                try {
819
                    CuratePublishedDatasetVersionCommand cmd =
×
820
                            new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
821
                    ds = commandEngine.submit(cmd);
×
822
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
823

824
                    // If configured, update archive copy as well
825
                    String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName);
×
826
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
827
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(
×
UNCOV
828
                            className, createDataverseRequest(user), updateVersion, authenticationServiceBean, Clock.systemUTC());
×
829
                    if (archiveCommand != null) {
×
830
                        // Delete the record of any existing copy since it is now out of date/incorrect
831
                        updateVersion.setArchivalCopyLocation(null);
×
832

833
                        // Then try to generate and submit an archival copy. Note that running this command within the
834
                        // CuratePublishedDatasetVersionCommand was causing an error:
835
                        // "The attribute [id] of class [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped
836
                        // to a primary key column in the database. Updates are not allowed."
837
                        // To avoid that, and to simplify reporting back to the GUI whether this optional step
838
                        // succeeded, I've pulled this out as a separate submit().
839
                        try {
UNCOV
840
                            updateVersion = commandEngine.submit(archiveCommand);
×
UNCOV
841
                            successMsg = BundleUtil.getStringFromBundle(updateVersion.getArchivalCopyLocation() != null
×
842
                                    ? "datasetversion.update.archive.success" : "datasetversion.update.archive.failure");
843
                        } catch (CommandException ex) {
×
844
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
845
                            logger.severe(ex.getMessage());
×
846
                        }
×
847
                    }
UNCOV
848
                } catch (CommandException ex) {
×
849
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
850
                    logger.severe(ex.getMessage());
×
851
                }
×
852
                return errorMsg != null
×
UNCOV
853
                        ? error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg)
×
UNCOV
854
                        : ok(new DatasetDTO.Converter().convert(ds), successMsg);
×
855
            } else {
UNCOV
856
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, createDataverseRequest(user), isMinor));
×
UNCOV
857
                DatasetDTO dto = new DatasetDTO.Converter().convert(res.getDataset());
×
UNCOV
858
                return res.isCompleted() ? ok(dto) : accepted(dto);
×
859
            }
UNCOV
860
        } catch (WrappedResponse ex) {
×
UNCOV
861
            return ex.getResponse();
×
862
        } catch (NoDatasetFilesException ex) {
×
863
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Unable to publish dataset, since there are no files in it.");
×
864
        }
865
    }
866

867
    @POST
868
    @ApiWriteOperation
869
    @Path("{id}/move/{targetDataverseAlias}")
870
    public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias,
871
                                @QueryParam("forceMove") Boolean force) {
872
        try {
873
            User user = findUserOrDie();
×
874
            Dataset dataset = findDatasetOrDie(id);
×
UNCOV
875
            Dataverse target = dataverseDao.findByAlias(targetDataverseAlias);
×
UNCOV
876
            if (target == null) {
×
UNCOV
877
                return error(Response.Status.BAD_REQUEST, "Target Dataverse not found.");
×
878
            }
879
            //Command requires Super user - it will be tested by the command
UNCOV
880
            execCommand(new MoveDatasetCommand(createDataverseRequest(user), dataset, target, force));
×
UNCOV
881
            return ok("Dataset moved successfully");
×
882
        } catch (WrappedResponse ex) {
×
883
            return ex.getResponse();
×
884
        }
885
    }
886

887
    @PUT
888
    @ApiWriteOperation
889
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
890
    public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId,
891
                                @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
892
        try {
893
            User user = findUserOrDie();
×
894
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
895
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
896
            if (linked == null) {
×
897
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
898
            }
UNCOV
899
            if (linking == null) {
×
UNCOV
900
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
901
            }
UNCOV
902
            execCommand(new LinkDatasetCommand(createDataverseRequest(user), linking, linked));
×
UNCOV
903
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
UNCOV
904
        } catch (WrappedResponse ex) {
×
UNCOV
905
            return ex.getResponse();
×
906
        }
907
    }
908

909
    @GET
910
    @Path("{id}/links")
911
    public Response getLinks(@PathParam("id") String idSupplied) {
912
        try {
UNCOV
913
            User user = findUserOrDie();
×
UNCOV
914
            if (!user.isSuperuser()) {
×
915
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
916
            }
917
            Dataset dataset = findDatasetOrDie(idSupplied);
×
918
            long datasetId = dataset.getId();
×
UNCOV
919
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
920
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
921
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
922
                dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
×
923
            }
×
924
            JsonObjectBuilder response = Json.createObjectBuilder();
×
925
            response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
×
926
            return ok(response);
×
927
        } catch (WrappedResponse wr) {
×
UNCOV
928
            return wr.getResponse();
×
929
        }
930
    }
931

932
    /**
933
     * @todo Make this real. Currently only used for API testing. Copied from
934
     * the equivalent API endpoint for dataverses and simplified with values
935
     * hard coded.
936
     */
937
    @POST
938
    @ApiWriteOperation
939
    @Path("{identifier}/assignments")
940
    public Response createAssignment(String userOrGroup, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
UNCOV
941
        boolean apiTestingOnly = true;
×
UNCOV
942
        if (apiTestingOnly) {
×
UNCOV
943
            return error(Response.Status.FORBIDDEN, "This is only for API tests.");
×
944
        }
945
        try {
946
            Dataset dataset = findDatasetOrDie(id);
×
947
            RoleAssignee assignee = findAssignee(userOrGroup);
×
948
            if (assignee == null) {
×
UNCOV
949
                return error(Response.Status.BAD_REQUEST, "Assignee not found");
×
950
            }
UNCOV
951
            DataverseRole theRole = rolesSvc.findBuiltinRoleByAlias(BuiltInRole.ADMIN);
×
UNCOV
952
            String privateUrlToken = null;
×
UNCOV
953
            return ok(
×
UNCOV
954
                    new RoleAssignmentDTO.Converter().convert(execCommand(
×
UNCOV
955
                            new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(findUserOrDie()), privateUrlToken))));
×
956
        } catch (WrappedResponse ex) {
×
957
            logger.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage());
×
958
            return ex.getResponse();
×
959
        }
960
    }
961

962
    @GET
963
    @Path("{identifier}/assignments")
964
    public Response getAssignments(@PathParam("identifier") String id) {
965
        RoleAssignmentDTO.Converter converter = new RoleAssignmentDTO.Converter();
×
966
        return response(req -> ok(execCommand(new ListRoleAssignments(req,
×
967
                findDatasetOrDie(id))).stream()
×
968
                .map(converter::convert)
×
969
                .collect(Collectors.toList())));
×
970
    }
971

972
    @GET
973
    @Path("{id}/privateUrl")
974
    public Response getPrivateUrlData(@PathParam("id") String idSupplied) {
UNCOV
975
        return response(req -> {
×
UNCOV
976
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
UNCOV
977
            return privateUrl != null
×
UNCOV
978
                    ? ok(new PrivateUrlDTO.Converter().convert(privateUrl))
×
UNCOV
979
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
980
        });
981
    }
982

983
    @POST
984
    @ApiWriteOperation
985
    @Path("{id}/privateUrl")
986
    public Response createPrivateUrl(@PathParam("id") String idSupplied) {
987
        return response(req -> ok(
×
988
                new PrivateUrlDTO.Converter().convert(
×
989
                        execCommand(new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied))))));
×
990
    }
991

992
    @DELETE
993
    @ApiWriteOperation
994
    @Path("{id}/privateUrl")
995
    public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
996
        return response(req -> {
×
997
            Dataset dataset = findDatasetOrDie(idSupplied);
×
998
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
999
            if (privateUrl != null) {
×
UNCOV
1000
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
1001
                return ok("Private URL deleted.");
×
1002
            } else {
1003
                return notFound("No Private URL to delete.");
×
1004
            }
1005
        });
1006
    }
1007

1008
    @GET
1009
    @Path("{id}/thumbnail/candidates")
1010
    public Response getDatasetThumbnailCandidates(@PathParam("id") String idSupplied) {
1011
        try {
UNCOV
1012
            Dataset dataset = findDatasetOrDie(idSupplied);
×
UNCOV
1013
            boolean canUpdateThumbnail = false;
×
1014
            try {
UNCOV
1015
                canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset)
×
UNCOV
1016
                        .canIssue(UpdateDatasetThumbnailCommand.class);
×
UNCOV
1017
            } catch (WrappedResponse ex) {
×
1018
                logger.info("Exception thrown while trying to figure out permissions while getting thumbnail for dataset id "
×
1019
                        + dataset.getId() + ": " + ex.getLocalizedMessage());
×
1020
            }
×
1021
            if (!canUpdateThumbnail) {
×
UNCOV
1022
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
1023
            }
1024
            JsonArrayBuilder data = Json.createArrayBuilder();
×
1025
            for (DatasetThumbnail datasetThumbnail : datasetThumbnailService.getThumbnailCandidates(dataset, true)) {
×
UNCOV
1026
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
UNCOV
1027
                String base64image = datasetThumbnail.getBase64image();
×
UNCOV
1028
                if (base64image != null) {
×
UNCOV
1029
                    logger.fine("found a candidate!");
×
UNCOV
1030
                    candidate.add("base64image", base64image);
×
1031
                }
UNCOV
1032
                DataFile dataFile = datasetThumbnail.getDataFile();
×
UNCOV
1033
                if (dataFile != null) {
×
UNCOV
1034
                    candidate.add("dataFileId", dataFile.getId());
×
1035
                }
1036
                data.add(candidate);
×
1037
            }
×
1038
            return ok(data);
×
1039
        } catch (WrappedResponse ex) {
×
1040
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
1041
        }
1042
    }
1043

1044
    @GET
1045
    @Produces({"image/png"})
1046
    @Path("{id}/thumbnail")
1047
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
1048
        try {
UNCOV
1049
            Dataset dataset = findDatasetOrDie(idSupplied);
×
UNCOV
1050
            InputStream is = datasetThumbnailService.getThumbnailAsInputStream(dataset);
×
1051
            if (is == null) {
×
1052
                return notFound("Thumbnail not available");
×
1053
            }
1054
            return Response.ok(is).build();
×
1055
        } catch (WrappedResponse wr) {
×
1056
            return notFound("Thumbnail not available");
×
1057
        }
1058
    }
1059

1060
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
1061
    @POST
1062
    @ApiWriteOperation
1063
    @Path("{id}/thumbnail/{dataFileId}")
1064
    public Response setDataFileAsThumbnail(@PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
1065
        try {
UNCOV
1066
            DatasetThumbnail datasetThumbnail = execCommand(
×
1067
                    new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied),
×
1068
                            UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
1069
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
UNCOV
1070
        } catch (WrappedResponse wr) {
×
UNCOV
1071
            return wr.getResponse();
×
1072
        }
1073
    }
1074

1075
    @POST
1076
    @ApiWriteOperation
1077
    @Path("{id}/thumbnail")
1078
    @Consumes(MediaType.MULTIPART_FORM_DATA)
1079
    public Response uploadDatasetLogo(@PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream
1080
    ) {
1081
        try {
UNCOV
1082
            DatasetThumbnail datasetThumbnail = execCommand(
×
UNCOV
1083
                    new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied),
×
1084
                    UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
1085
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
1086
        } catch (WrappedResponse wr) {
×
1087
            return wr.getResponse();
×
1088
        }
1089
    }
1090

1091
    @DELETE
1092
    @ApiWriteOperation
1093
    @Path("{id}/thumbnail")
1094
    public Response removeDatasetLogo(@PathParam("id") String idSupplied) {
1095
        try {
1096
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied),
×
1097
                            UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
1098
            return ok("Dataset thumbnail removed.");
×
1099
        } catch (WrappedResponse wr) {
×
1100
            return wr.getResponse();
×
1101
        }
1102
    }
1103

1104
    @GET
1105
    @ApiWriteOperation
1106
    @Path("{identifier}/dataCaptureModule/rsync")
1107
    public Response getRsync(@PathParam("identifier") String id) {
1108
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
UNCOV
1109
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
UNCOV
1110
            return error(Response.Status.METHOD_NOT_ALLOWED,
×
1111
                         SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
1112
        }
1113
        Dataset dataset;
1114
        try {
UNCOV
1115
            dataset = findDatasetOrDie(id);
×
UNCOV
1116
            AuthenticatedUser user = findAuthenticatedUserOrDie();
×
UNCOV
1117
            ScriptRequestResponse scriptRequestResponse = execCommand(
×
UNCOV
1118
                    new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
1119

UNCOV
1120
            DatasetLock lock = datasetDao.addDatasetLock(
×
UNCOV
1121
                    dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
UNCOV
1122
            if (lock == null) {
×
1123
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
UNCOV
1124
                return error(Response.Status.FORBIDDEN,
×
UNCOV
1125
                             "Failed to lock the dataset (dataset id=" + dataset.getId() + ")");
×
1126
            }
1127
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN));
×
1128
        } catch (WrappedResponse wr) {
×
1129
            return wr.getResponse();
×
1130
        } catch (EJBException ex) {
×
1131
            return error(Response.Status.INTERNAL_SERVER_ERROR,
×
UNCOV
1132
                         "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
1133
        }
1134
    }
1135

1136
    /**
1137
     * This api endpoint triggers the creation of a "package" file in a dataset
1138
     * after that package has been moved onto the same filesystem via the Data Capture Module.
1139
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
1140
     * The "package" can be downloaded over RSAL.
1141
     * <p>
1142
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
1143
     * <p>
1144
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
1145
     * But due to the possibly immense number of files (millions) the package approach was taken.
1146
     * This is relevant because the posix ("file") code contains many remnants of that development work.
1147
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
1148
     * -MAD 4.9.1
1149
     */
1150
    @POST
1151
    @ApiWriteOperation
1152
    @Path("{identifier}/dataCaptureModule/checksumValidation")
1153
    public Response receiveChecksumValidationResults(@PathParam("identifier") String id, JsonObject jsonFromDcm) {
1154
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
1155
        AuthenticatedUser authenticatedUser;
1156
        try {
1157
            authenticatedUser = findAuthenticatedUserOrDie();
×
1158
        } catch (WrappedResponse ex) {
×
1159
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
UNCOV
1160
        }
×
UNCOV
1161
        if (!authenticatedUser.isSuperuser()) {
×
1162
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
1163
        }
UNCOV
1164
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
1165
        try {
UNCOV
1166
            Dataset dataset = findDatasetOrDie(id);
×
1167
            if ("validation passed".equals(statusMessageFromDcm)) {
×
1168
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
1169

1170
                String storageDriver = (System.getProperty("dataverse.files.storage-driver-id") != null)
×
1171
                        ? System.getProperty("dataverse.files.storage-driver-id") : "file";
×
1172
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
UNCOV
1173
                int totalSize = jsonFromDcm.getInt("totalSize");
×
1174

1175
                if (storageDriver.equals("file")) {
×
1176
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
1177

1178
                    ImportMode importMode = ImportMode.MERGE;
×
1179
                    try {
UNCOV
1180
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(
×
UNCOV
1181
                                createDataverseRequest(findUserOrDie()), dataset, uploadFolder, (long) totalSize, importMode));
×
UNCOV
1182
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
1183
                        String message = jsonFromImportJobKickoff.getString("message");
×
UNCOV
1184
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
UNCOV
1185
                        job.add("jobId", jobId);
×
1186
                        job.add("message", message);
×
UNCOV
1187
                        return ok(job);
×
1188
                    } catch (WrappedResponse wr) {
×
1189
                        String message = wr.getMessage();
×
1190
                        return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1191
                                     "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
1192
                    }
UNCOV
1193
                } else if (storageDriver.equals("s3")) {
×
1194
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
1195
                    try {
1196

1197
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
1198
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
1199
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, totalSize);
×
1200

1201
                        if (packageFile == null) {
×
1202
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
UNCOV
1203
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
1204
                        }
UNCOV
1205
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
1206
                        if (dcmLock == null) {
×
UNCOV
1207
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
1208
                        } else {
1209
                            datasetDao.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
1210
                            dataset.removeLock(dcmLock);
×
1211
                        }
1212

1213
                        // update version using the command engine to enforce user permissions and constraints
1214
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
1215
                            try {
1216
                                Command<Dataset> cmd;
1217
                                cmd = new UpdateDatasetVersionCommand(dataset,
×
1218
                                        new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
UNCOV
1219
                                commandEngine.submit(cmd);
×
1220
                            } catch (CommandException ex) {
×
UNCOV
1221
                                return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1222
                                             "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
UNCOV
1223
                            }
×
1224
                        } else {
1225
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
1226
                                    + "single version in draft mode.";
UNCOV
1227
                            logger.log(Level.SEVERE, constraintError);
×
1228
                        }
UNCOV
1229
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
UNCOV
1230
                        return ok(job);
×
UNCOV
1231
                    } catch (IOException e) {
×
UNCOV
1232
                        String message = e.getMessage();
×
UNCOV
1233
                        return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1234
                                     "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
1235
                    }
1236
                } else {
1237
                    return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1238
                                 "Invalid storage driver in Dataverse, not compatible with dcm");
1239
                }
1240
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
1241
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(
×
1242
                        Permission.EditDataset,
1243
                        dataset);
1244
                distinctAuthors.values().forEach((value) -> userNotificationService.sendNotificationWithEmail(value,
×
1245
                        new Timestamp(new Date().getTime()), NotificationType.CHECKSUMFAIL, dataset.getId(), NotificationObjectType.DATASET));
×
UNCOV
1246
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
UNCOV
1247
                if (superUsers != null && !superUsers.isEmpty()) {
×
UNCOV
1248
                    superUsers.forEach((au) -> userNotificationService.sendNotificationWithEmail(au,
×
UNCOV
1249
                            new Timestamp(new Date().getTime()), NotificationType.CHECKSUMFAIL, dataset.getId(), NotificationObjectType.DATASET));
×
1250
                }
UNCOV
1251
                return ok("User notified about checksum validation failure.");
×
1252
            } else {
UNCOV
1253
                return error(Response.Status.BAD_REQUEST,
×
1254
                             "Unexpected status cannot be processed: " + statusMessageFromDcm);
1255
            }
UNCOV
1256
        } catch (WrappedResponse ex) {
×
UNCOV
1257
            return ex.getResponse();
×
1258
        }
1259
    }
1260

1261
    @POST
1262
    @ApiWriteOperation
1263
    @Path("{id}/submitForReview")
1264
    @Consumes(MediaType.APPLICATION_JSON)
1265
    public Response submitForReview(@PathParam("id") String idSupplied, SubmitForReviewDataDTO submitForReviewData) {
1266
        try {
1267
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()),
×
UNCOV
1268
                    findDatasetOrDie(idSupplied), submitForReviewData.getComment()));
×
UNCOV
1269
            JsonObjectBuilder result = Json.createObjectBuilder();
×
1270
            result.add("inReview", updatedDataset.isLockedFor(DatasetLock.Reason.InReview));
×
1271
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
1272
            return ok(result);
×
1273
        } catch (WrappedResponse wr) {
×
1274
            return wr.getResponse();
×
UNCOV
1275
        } catch (NoDatasetFilesException ex) {
×
UNCOV
1276
            return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1277
                         "Unable to submit dataset for review, since there are no files in it.");
1278
        }
1279
    }
1280

1281
    @POST
1282
    @ApiWriteOperation
1283
    @Path("{id}/returnToAuthor")
1284
    public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBody) {
UNCOV
1285
        if (jsonBody == null || jsonBody.isEmpty()) {
×
UNCOV
1286
            return error(Response.Status.BAD_REQUEST,
×
1287
                         "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
1288
        }
UNCOV
1289
        StringReader rdr = new StringReader(jsonBody);
×
UNCOV
1290
        JsonObject json = Json.createReader(rdr).readObject();
×
1291
        try {
UNCOV
1292
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1293
            String reasonForReturn;
UNCOV
1294
            reasonForReturn = json.getString("reasonForReturn");
×
1295
            // TODO: Once we add a box for the curator to type into, pass the reason for return to the
1296
            //  ReturnDatasetToAuthorCommand and delete this check and call to setReturnReason on the API side.
UNCOV
1297
            if (reasonForReturn == null || reasonForReturn.isEmpty()) {
×
1298
                return error(Response.Status.BAD_REQUEST,
×
1299
                             "You must enter a reason for returning a dataset to the author(s).");
1300
            }
UNCOV
1301
            AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie();
×
UNCOV
1302
            Map<String, String> params = new HashMap<>();
×
UNCOV
1303
            params.put(NotificationParameter.MESSAGE.key(), reasonForReturn);
×
UNCOV
1304
            params.put(NotificationParameter.REPLY_TO.key(), authenticatedUser.getEmail());
×
1305
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(
×
1306
                    authenticatedUser), dataset, params));
1307

1308
            JsonObjectBuilder result = Json.createObjectBuilder();
×
UNCOV
1309
            result.add("inReview", false);
×
1310
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
UNCOV
1311
            return ok(result);
×
UNCOV
1312
        } catch (WrappedResponse wr) {
×
UNCOV
1313
            return wr.getResponse();
×
1314
        }
1315
    }
1316

1317
    /**
1318
     * Add a File to an existing Dataset
1319
     */
1320
    @POST
1321
    @ApiWriteOperation
1322
    @Path("{id}/add")
1323
    @Consumes(MediaType.MULTIPART_FORM_DATA)
1324
    public Response addFileToDataset(@PathParam("id") String idSupplied,
1325
                                     @FormDataParam("jsonData") String jsonData,
1326
                                     @FormDataParam("file") InputStream fileInputStream,
1327
                                     @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
1328
                                     @FormDataParam("file") final FormDataBodyPart formDataBodyPart) {
UNCOV
1329
        if (!systemConfig.isHTTPUpload()) {
×
1330
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
1331
        }
1332

1333
        // (1) Get the user from the API key
1334
        User authUser;
1335
        try {
UNCOV
1336
            authUser = findUserOrDie();
×
UNCOV
1337
        } catch (WrappedResponse ex) {
×
1338
            return error(Response.Status.FORBIDDEN,
×
1339
                         BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
1340
            );
1341
        }
×
1342

1343
        // (2) Get the Dataset Id
1344
        Dataset dataset;
1345
        try {
1346
            dataset = findDatasetOrDie(idSupplied);
×
1347
        } catch (WrappedResponse wr) {
×
1348
            return wr.getResponse();
×
1349
        }
×
1350

1351
        // (2a) Make sure dataset does not have package file
1352
        for (DatasetVersion dv : dataset.getVersions()) {
×
1353
            if (dv.isHasPackageFile()) {
×
UNCOV
1354
                return error(Response.Status.FORBIDDEN,
×
UNCOV
1355
                             ResourceBundle.getBundle("Bundle").getString("file.api.alreadyHasPackageFile")
×
1356
                );
1357
            }
1358
        }
×
1359

1360
        // (3) Get the file name and content type
UNCOV
1361
        String newFilename = contentDispositionHeader.getFileName();
×
UNCOV
1362
        String newFileContentType = formDataBodyPart.getMediaType().toString();
×
1363

1364
        // (2a) Load up optional params via JSON
1365
        OptionalFileParams optionalFileParams;
1366
        logger.fine("Loading (api) jsonData: " + jsonData);
×
1367

1368
        try {
UNCOV
1369
            optionalFileParams = optionalFileParamsSvc.create(jsonData);
×
UNCOV
1370
        } catch (DataFileTagException ex) {
×
1371
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
1372
        }
×
1373

1374
        try {
1375
            datasetsValidators.validateFileTermsOfUseDTO(optionalFileParams.getFileTermsOfUseDTO());
×
1376
        } catch (MissingArgumentException | InvalidParameterException pe) {
×
UNCOV
1377
            return error(Response.Status.BAD_REQUEST, pe.getMessage());
×
UNCOV
1378
        } catch (EJBException ejbe) {
×
UNCOV
1379
            return error(Response.Status.BAD_REQUEST, ejbe.getCause().getMessage());
×
UNCOV
1380
        }
×
1381

1382
        // (3) Create the AddReplaceFileHelper object
UNCOV
1383
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
UNCOV
1384
        AddReplaceFileHelper addFileHelper =
×
1385
                new AddReplaceFileHelper(dvRequest2, ingestService, fileService, dataFileCreator, permissionSvc, commandEngine, optionalFileParamsSvc);
1386

1387
        // (4) Run "runAddFileByDatasetId"
1388
        try {
1389
            addFileHelper.runAddFileByDataset(dataset, newFilename, newFileContentType, fileInputStream, optionalFileParams);
×
1390
        } finally {
UNCOV
1391
            IOUtils.closeQuietly(fileInputStream);
×
1392
        }
1393

1394
        if (addFileHelper.hasError()) {
×
1395
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
1396
        } else {
UNCOV
1397
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
1398
            try {
1399
                // Todo We need a consistent, sane way to communicate a human readable message to an API client suitable
1400
                // for human consumption. Imagine if the UI were built in Angular or React and we want to return a
1401
                // message from the API as-is to the user. Human readable.
1402
                logger.fine("successMsg: " + successMsg);
×
1403
                return ok(addFileHelper.getSuccessResult());
×
1404
                // "Look at that!  You added a file! (hey hey, it may have worked)");
UNCOV
1405
            } catch (NoFilesException ex) {
×
UNCOV
1406
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
UNCOV
1407
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
1408
            }
1409
        }
1410
    }
1411

1412
    @GET
1413
    @Path("{identifier}/locks")
1414
    public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
1415
        Dataset dataset;
1416
        try {
1417
            dataset = findDatasetOrDie(id);
×
1418
            Set<DatasetLock> locks;
1419
            if (lockType == null) {
×
1420
                locks = dataset.getLocks();
×
1421
            } else {
1422
                // request for a specific type lock:
1423
                DatasetLock lock = dataset.getLockFor(lockType);
×
1424
                locks = new HashSet<>();
×
1425
                if (lock != null) {
×
1426
                    locks.add(lock);
×
1427
                }
1428
            }
1429
            List<DatasetLockDTO> allLocks = locks.stream()
×
UNCOV
1430
                    .map(l -> new DatasetLockDTO.Converter().convert(l))
×
1431
                    .collect(Collectors.toList());
×
1432
            return ok(allLocks);
×
UNCOV
1433
        } catch (WrappedResponse wr) {
×
1434
            return wr.getResponse();
×
1435
        }
1436
    }
1437

1438
    @DELETE
1439
    @ApiWriteOperation
1440
    @Path("{identifier}/locks")
1441
    public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
UNCOV
1442
        return response(req -> {
×
1443
            try {
1444
                AuthenticatedUser user = findAuthenticatedUserOrDie();
×
UNCOV
1445
                if (!user.isSuperuser()) {
×
1446
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
1447
                }
1448
                Dataset dataset = findDatasetOrDie(id);
×
1449

UNCOV
1450
                if (lockType == null) {
×
UNCOV
1451
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
UNCOV
1452
                    for (DatasetLock lock : dataset.getLocks()) {
×
UNCOV
1453
                        locks.add(lock.getReason());
×
UNCOV
1454
                    }
×
UNCOV
1455
                    if (!locks.isEmpty()) {
×
UNCOV
1456
                        for (DatasetLock.Reason locktype : locks) {
×
1457
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
1458
                            // refresh the dataset:
1459
                            dataset = findDatasetOrDie(id);
×
1460
                        }
×
1461
                        // kick of dataset reindexing, in case the locks removed affected the search card:
UNCOV
1462
                        indexService.indexDataset(dataset, true);
×
1463
                        return ok("locks removed");
×
1464
                    }
1465
                    return ok("dataset not locked");
×
1466
                }
1467
                // request for a specific type lock:
1468
                DatasetLock lock = dataset.getLockFor(lockType);
×
1469
                if (lock != null) {
×
UNCOV
1470
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
1471
                    // refresh the dataset:
UNCOV
1472
                    dataset = findDatasetOrDie(id);
×
1473
                    // ... and kick of dataset reindexing, in case the lock removed affected the search card:
1474
                    indexService.indexDataset(dataset, true);
×
1475
                    return ok("lock type " + lock.getReason() + " removed");
×
1476
                }
UNCOV
1477
                return ok("no lock type " + lockType + " on the dataset");
×
UNCOV
1478
            } catch (WrappedResponse wr) {
×
UNCOV
1479
                return wr.getResponse();
×
1480
            }
1481
        });
1482
    }
1483

1484
    @POST
1485
    @ApiWriteOperation
1486
    @Path("{identifier}/lock/{type}")
1487
    public Response lockDataset(@PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
UNCOV
1488
        return response(req -> {
×
1489
            try {
UNCOV
1490
                AuthenticatedUser user = findAuthenticatedUserOrDie();
×
UNCOV
1491
                if (!user.isSuperuser()) {
×
UNCOV
1492
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
1493
                }
UNCOV
1494
                Dataset dataset = findDatasetOrDie(id);
×
1495
                DatasetLock lock = dataset.getLockFor(lockType);
×
UNCOV
1496
                if (lock != null) {
×
UNCOV
1497
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
1498
                }
1499
                lock = new DatasetLock(lockType, user);
×
1500
                execCommand(new AddLockCommand(req, dataset, lock));
×
1501
                // refresh the dataset:
1502
                dataset = findDatasetOrDie(id);
×
1503
                // ... and kick of dataset reindexing:
UNCOV
1504
                indexService.indexDataset(dataset, true);
×
1505
                return ok("dataset locked with lock type " + lockType);
×
UNCOV
1506
            } catch (WrappedResponse wr) {
×
UNCOV
1507
                return wr.getResponse();
×
1508
            }
1509
        });
1510
    }
1511

1512
    @GET
1513
    @Path("{id}/filelabels")
1514
    @Produces(MediaType.APPLICATION_JSON)
1515
    public Response listLabels(@PathParam("id") String datasetId) throws WrappedResponse {
1516
        Dataset dataset = findDatasetOrDie(datasetId);
×
1517
        return ok(fileLabelsService.prepareFileLabels(dataset, new FileLabelsChangeOptionsDTO()));
×
1518
    }
1519

1520
    @POST
1521
    @ApiWriteOperation
1522
    @Path("{id}/filelabels")
1523
    @Consumes(MediaType.APPLICATION_JSON)
1524
    @Produces(MediaType.APPLICATION_JSON)
1525
    public Response changeLabels(@PathParam("id") String datasetId, FileLabelsChangeOptionsDTO options) throws WrappedResponse {
UNCOV
1526
        Dataset dataset = findDatasetOrDie(datasetId);
×
1527
        List<FileLabelInfo> changedLabels;
1528
        try {
UNCOV
1529
            changedLabels = fileLabelsService.changeLabels(fileLabelsService.prepareFileLabels(dataset, options), options);
×
1530
            List<FileLabelInfo> result = fileLabelsService.updateDataset(dataset, changedLabels, options);
×
UNCOV
1531
            return ok(result.stream().filter(FileLabelInfo::isAffected).collect(Collectors.toList()));
×
1532
        } catch (EJBException ee) {
×
1533
            if (ee.getCause() instanceof IllegalStateException) {
×
UNCOV
1534
                throw new WrappedResponse(badRequest("Error occurred – probably input contained duplicated filenames"));
×
1535
            } else {
1536
                throw ee;
×
1537
            }
1538
        }
1539
    }
1540

1541
    // -------------------- PRIVATE --------------------
1542

1543
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
1544
        try (StringReader rdr = new StringReader(jsonBody)) {
×
1545

1546
            Dataset dataset = findDatasetOrDie(id);
×
1547
            JsonObject json = Json.createReader(rdr).readObject();
×
1548
            DatasetVersion dsv = dataset.getEditVersion();
×
1549

1550
            List<DatasetField> fields;
1551

1552
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1553
            if (fieldsJson == null) {
×
1554
                fields = new LinkedList<>(jsonParser().parseField(json, Boolean.FALSE));
×
1555
            } else {
1556
                fields = jsonParser().parseMultipleFields(json);
×
1557
            }
1558

1559
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1560

1561
            List<DatasetField> dsfChildsToRemove = new ArrayList<>();
×
1562

UNCOV
1563
            Map<DatasetFieldType, List<DatasetField>> fieldsToRemoveGroupedByType = fields.stream()
×
1564
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1565

1566
            Map<DatasetFieldType, List<DatasetField>> oldFieldsGroupedByType = dsv.getDatasetFields().stream()
×
1567
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1568

UNCOV
1569
            for (Map.Entry<DatasetFieldType, List<DatasetField>> fieldsToRemoveEntry : fieldsToRemoveGroupedByType.entrySet()) {
×
1570
                for (DatasetField removableField : fieldsToRemoveEntry.getValue()) {
×
1571
                    boolean valueFound = false;
×
1572
                    for (DatasetField oldField : oldFieldsGroupedByType.get(fieldsToRemoveEntry.getKey())) {
×
1573
                        if (oldField.getDatasetFieldType().isControlledVocabulary()) {
×
1574
                            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<>();
×
1575
                            if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
UNCOV
1576
                                for (ControlledVocabularyValue cvv : removableField.getControlledVocabularyValues()) {
×
UNCOV
1577
                                    for (ControlledVocabularyValue existing : oldField.getControlledVocabularyValues()) {
×
1578
                                        if (existing.getStrValue().equals(cvv.getStrValue())) {
×
1579
                                            controlledVocabularyItemsToRemove.add(existing);
×
1580
                                            valueFound = true;
×
1581
                                        }
UNCOV
1582
                                    }
×
UNCOV
1583
                                    if (!controlledVocabularyItemsToRemove.contains(cvv)) {
×
UNCOV
1584
                                        logger.log(Level.SEVERE, String.format("Delete metadata failed: %s: %s not found.",
×
1585
                                                cvv.getDatasetFieldType().getDisplayName(), cvv.getStrValue()));
×
1586
                                        return error(Response.Status.BAD_REQUEST,
×
1587
                                                String.format("Delete metadata failed: %s: %s not found.",
×
1588
                                                        cvv.getDatasetFieldType().getDisplayName(), cvv.getStrValue()));
×
1589
                                    }
1590
                                }
×
1591
                                for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
1592
                                    oldField.getControlledVocabularyValues().remove(remove);
×
UNCOV
1593
                                }
×
1594
                            } else {
1595
                                if (oldField.getSingleControlledVocabularyValue().getStrValue().equals(
×
UNCOV
1596
                                        removableField.getSingleControlledVocabularyValue().getStrValue())) {
×
1597
                                    oldField.setSingleControlledVocabularyValue(null);
×
1598
                                    valueFound = true;
×
1599
                                }
1600
                            }
1601
                        } else {
×
1602
                            if (removableField.getDatasetFieldType().isPrimitive()) {
×
1603
                                if (oldField.getFieldValue().getOrElse("")
×
1604
                                        .equals(removableField.getFieldValue().getOrElse(""))) {
×
1605
                                    oldField.setFieldValue(null);
×
1606
                                    valueFound = true;
×
1607
                                }
1608
                            } else {
1609
                                if (DatasetFieldUtil.joinAllValues(removableField)
×
1610
                                        .equals(DatasetFieldUtil.joinAllValues(oldField))) {
×
1611
                                    dsfChildsToRemove.addAll(oldField.getDatasetFieldsChildren());
×
1612
                                    valueFound = true;
×
1613
                                }
1614
                            }
1615
                        }
UNCOV
1616
                    }
×
UNCOV
1617
                    if (!valueFound) {
×
1618
                        String displayValue = !removableField.getDisplayValue().isEmpty()
×
UNCOV
1619
                                ? removableField.getDisplayValue() : removableField.getCompoundDisplayValue();
×
1620
                        logger.log(Level.SEVERE, String.format("Delete metadata failed: %s: %s not found.",
×
1621
                                removableField.getDatasetFieldType().getDisplayName(), displayValue));
×
1622
                        return error(Response.Status.BAD_REQUEST, String.format("Delete metadata failed: %s: %s not found.",
×
UNCOV
1623
                                removableField.getDatasetFieldType().getDisplayName(), displayValue));
×
1624
                    }
UNCOV
1625
                }
×
1626
            }
×
1627

1628
            fields.stream()
×
1629
                    .map(DatasetField::getDatasetFieldsChildren)
×
UNCOV
1630
                    .forEach(datasetFields -> datasetFields.removeAll(dsfChildsToRemove));
×
1631

UNCOV
1632
            boolean updateDraft = dataset.getLatestVersion().isDraft();
×
1633
            DatasetVersion managedVersion = updateDraft
×
1634
                    ? execCommand(new UpdateDatasetVersionCommand(dataset, req)).getEditVersion()
×
1635
                    : execCommand(new CreateDatasetVersionCommand(req, dataset, dsv));
×
UNCOV
1636
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(managedVersion);
×
UNCOV
1637
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
1638
                    ? dto.clearEmailFields() : dto);
×
UNCOV
1639
        } catch (JsonParseException ex) {
×
UNCOV
1640
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
UNCOV
1641
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1642
        } catch (WrappedResponse ex) {
×
1643
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
UNCOV
1644
            return ex.getResponse();
×
1645
        }
1646
    }
1647

1648
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData) {
UNCOV
1649
        try (StringReader rdr = new StringReader(jsonBody)) {
×
1650

1651
            Dataset ds = findDatasetOrDie(id);
×
1652
            JsonObject json = Json.createReader(rdr).readObject();
×
1653
            DatasetVersion dsv = ds.getEditVersion();
×
1654

1655
            List<DatasetField> freshFieldsModel;
1656

UNCOV
1657
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1658
            freshFieldsModel = fieldsJson == null
×
1659
                    ? new LinkedList<>(jsonParser().parseField(json, Boolean.FALSE))
×
UNCOV
1660
                    : jsonParser().parseMultipleFields(json);
×
1661

1662
            String valdationErrors = validateDatasetFieldValues(freshFieldsModel);
×
1663

1664
            if (!valdationErrors.isEmpty()) {
×
1665
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1666
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1667
            }
1668

UNCOV
1669
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1670

1671
            // loop through the update fields and compare to the version fields
1672
            // if exist add/replace values if not add entire dsf
1673
            Map<DatasetFieldType, List<DatasetField>> updatedFieldsGroupedByType = freshFieldsModel.stream()
×
1674
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1675

1676
            Map<DatasetFieldType, List<DatasetField>> oldFieldsGroupedByType = dsv.getDatasetFields().stream()
×
1677
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1678

UNCOV
1679
            ArrayList<DatasetField> fieldsToAdd = new ArrayList<>();
×
1680

1681
            for (Map.Entry<DatasetFieldType, List<DatasetField>> updatedFields : updatedFieldsGroupedByType.entrySet()) {
×
UNCOV
1682
                for (DatasetField updateField : updatedFields.getValue()) {
×
UNCOV
1683
                    for (DatasetField oldField : oldFieldsGroupedByType.get(updatedFields.getKey())) {
×
1684
                        if (oldField.isEmpty() || oldField.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1685
                            if (replaceData) {
×
1686
                                if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
1687
                                    oldField.getControlledVocabularyValues().clear();
×
1688
                                } else {
UNCOV
1689
                                    oldField.setFieldValue("");
×
UNCOV
1690
                                    oldField.setSingleControlledVocabularyValue(null);
×
1691
                                }
1692
                            }
UNCOV
1693
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1694
                                if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
UNCOV
1695
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
UNCOV
1696
                                        if (!oldField.getDisplayValue().contains(cvv.getStrValue())) {
×
UNCOV
1697
                                            oldField.getControlledVocabularyValues().add(cvv);
×
1698
                                        }
1699
                                    }
×
1700
                                } else {
1701
                                    oldField.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1702
                                }
1703
                            } else {
1704
                                if (updateField.getDatasetFieldType().isPrimitive()) {
×
1705
                                    if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
1706
                                        if (!oldField.getFieldValue().getOrElse("")
×
1707
                                                .equals(updateField.getFieldValue().getOrElse(""))) {
×
1708
                                            updateField.setDatasetVersion(dsv);
×
1709
                                            fieldsToAdd.add(updateField);
×
1710
                                        }
1711
                                    } else {
1712
                                        oldField.setFieldValue(updateField.getValue());
×
1713
                                    }
1714
                                } else {
1715
                                    if (!DatasetFieldUtil.joinAllValues(updateField)
×
1716
                                            .equals(DatasetFieldUtil.joinAllValues(oldField))) {
×
1717
                                        updateField.setDatasetVersion(dsv);
×
1718
                                        fieldsToAdd.add(updateField);
×
1719
                                    }
1720
                                }
1721
                            }
1722
                        } else {
UNCOV
1723
                            return error(Response.Status.BAD_REQUEST, String.format("You may not add data to a field that " +
×
1724
                                    "already has data and does not allow multiples. Use replace=true to replace existing data (%s)",
UNCOV
1725
                                    oldField.getDatasetFieldType().getDisplayName()));
×
1726
                        }
1727
                        break;
1728
                    }
1729

1730
                    updatedFieldsGroupedByType.entrySet().stream()
×
1731
                            .filter(fieldTypeListEntry -> !oldFieldsGroupedByType.containsKey(fieldTypeListEntry.getKey()))
×
1732
                            .map(Map.Entry::getValue)
×
1733
                            .forEach(fieldNotFound -> fieldNotFound.forEach(
×
1734
                                    datasetField -> {
1735
                                        datasetField.setDatasetVersion(dsv);
×
1736
                                        dsv.getDatasetFields().add(datasetField);
×
1737
                                    }));
×
UNCOV
1738
                    dsv.getDatasetFields().addAll(fieldsToAdd);
×
1739
                }
×
1740
            }
×
UNCOV
1741
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
UNCOV
1742
            DatasetVersion managedVersion = updateDraft
×
UNCOV
1743
                    ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion()
×
1744
                    : execCommand(new CreateDatasetVersionCommand(req, ds, dsv));
×
1745
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(managedVersion);
×
1746
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
UNCOV
1747
                    ? dto.clearEmailFields() : dto);
×
1748
        } catch (JsonParseException ex) {
×
UNCOV
1749
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
UNCOV
1750
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
UNCOV
1751
        } catch (WrappedResponse ex) {
×
UNCOV
1752
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1753
            return ex.getResponse();
×
1754
        }
1755
    }
1756

1757
    private String validateDatasetFieldValues(List<DatasetField> fields) {
UNCOV
1758
        StringBuilder error = new StringBuilder();
×
1759
        for (DatasetField dsf : fields) {
×
UNCOV
1760
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
UNCOV
1761
                    && dsf.getDatasetFieldsChildren().isEmpty() && dsf.getFieldValue().isEmpty()) {
×
1762
                error.append("Empty multiple value for field: ")
×
1763
                        .append(dsf.getDatasetFieldType().getDisplayName())
×
1764
                        .append(" ");
×
1765
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getDatasetFieldsChildren().isEmpty()) {
×
1766
                error.append("Empty value for field: ")
×
UNCOV
1767
                        .append(dsf.getDatasetFieldType().getDisplayName())
×
1768
                        .append(" ");
×
1769
            }
1770
        }
×
UNCOV
1771
        return !error.toString().isEmpty() ? error.toString() : "";
×
1772
    }
1773

1774
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds) throws WrappedResponse {
UNCOV
1775
        DatasetVersion dsv = execCommand(chooseCommandForVersionFinding(versionNumber, ds, req));
×
1776
        if (dsv == null || dsv.getId() == null) {
×
1777
            throw new WrappedResponse(notFound(String.format("Dataset version %s of dataset %d not found", versionNumber, ds.getId())));
×
1778
        }
1779
        return dsv;
×
1780
    }
1781

1782
    private Command<DatasetVersion> chooseCommandForVersionFinding(String versionId, Dataset ds, DataverseRequest req)
1783
            throws WrappedResponse {
UNCOV
1784
        switch (versionId) {
×
1785
            case ":latest":
1786
                return new GetLatestAccessibleDatasetVersionCommand(req, ds);
×
1787
            case ":draft":
1788
                return new GetDraftVersionIfExists(req, ds);
×
1789
            case ":latest-published":
UNCOV
1790
                return new GetLatestPublishedDatasetVersionCommand(req, ds);
×
1791
            default:
1792
                try {
1793
                    String[] versions = versionId.split("\\.");
×
UNCOV
1794
                    if (versions.length == 1) {
×
UNCOV
1795
                        return new GetSpecificPublishedDatasetVersionCommand(req, ds, Long.parseLong(versions[0]), 0L);
×
UNCOV
1796
                    } else if (versions.length == 2) {
×
UNCOV
1797
                        return new GetSpecificPublishedDatasetVersionCommand(req, ds, Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
1798
                    }
UNCOV
1799
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
UNCOV
1800
                } catch (NumberFormatException nfe) {
×
UNCOV
1801
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
1802
                }
1803
        }
1804
    }
1805

1806
    private boolean isOriginalFormatRequested(MultivaluedMap<String, String> queryParameters) {
UNCOV
1807
        return queryParameters
×
UNCOV
1808
                .keySet().stream()
×
UNCOV
1809
                .filter("format"::equals)
×
UNCOV
1810
                .map(queryParameters::getFirst)
×
UNCOV
1811
                .anyMatch("original"::equals);
×
1812
    }
1813

1814
    private RoleAssignee findAssignee(String identifier) {
1815
        try {
UNCOV
1816
            return roleAssigneeSvc.getRoleAssignee(identifier);
×
UNCOV
1817
        } catch (EJBException ex) {
×
UNCOV
1818
            Throwable cause = ex;
×
UNCOV
1819
            while (cause.getCause() != null) {
×
UNCOV
1820
                cause = cause.getCause();
×
1821
            }
UNCOV
1822
            logger.log(Level.INFO, "Exception caught looking up RoleAssignee based on identifier ''{0}'': {1}",
×
UNCOV
1823
                    new Object[] {identifier, cause.getMessage()});
×
UNCOV
1824
            return null;
×
1825
        }
1826
    }
1827
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc