• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

CeON / dataverse / 987

pending completion
987

push

jenkins

GitHub
Closes #2339: Change File Labels API (#2347)

124 of 124 new or added lines in 5 files covered. (100.0%)

21199 of 69091 relevant lines covered (30.68%)

0.31 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/dataverse-webapp/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
1
package edu.harvard.iq.dataverse.api;
2

3
import com.amazonaws.services.pi.model.InvalidArgumentException;
4
import edu.harvard.iq.dataverse.DataFileServiceBean;
5
import edu.harvard.iq.dataverse.DatasetDao;
6
import edu.harvard.iq.dataverse.DataverseDao;
7
import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
8
import edu.harvard.iq.dataverse.EjbDataverseEngine;
9
import edu.harvard.iq.dataverse.PermissionServiceBean;
10
import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
11
import edu.harvard.iq.dataverse.S3PackageImporter;
12
import edu.harvard.iq.dataverse.api.annotations.ApiWriteOperation;
13
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
14
import edu.harvard.iq.dataverse.api.dto.DatasetLockDTO;
15
import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
16
import edu.harvard.iq.dataverse.api.dto.FileLabelsChangeOptionsDTO;
17
import edu.harvard.iq.dataverse.api.dto.FileMetadataDTO;
18
import edu.harvard.iq.dataverse.api.dto.MetadataBlockWithFieldsDTO;
19
import edu.harvard.iq.dataverse.api.dto.PrivateUrlDTO;
20
import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
21
import edu.harvard.iq.dataverse.api.dto.SubmitForReviewDataDTO;
22
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
23
import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
24
import edu.harvard.iq.dataverse.common.BundleUtil;
25
import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
26
import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse;
27
import edu.harvard.iq.dataverse.datafile.DataFileCreator;
28
import edu.harvard.iq.dataverse.datafile.file.FileDownloadAPIHandler;
29
import edu.harvard.iq.dataverse.dataset.DatasetService;
30
import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
31
import edu.harvard.iq.dataverse.dataset.DatasetThumbnailService;
32
import edu.harvard.iq.dataverse.dataset.FileLabelInfo;
33
import edu.harvard.iq.dataverse.dataset.FileLabelsService;
34
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
35
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
36
import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
37
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
38
import edu.harvard.iq.dataverse.engine.command.Command;
39
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
40
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
41
import edu.harvard.iq.dataverse.engine.command.exception.NoDatasetFilesException;
42
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
43
import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
44
import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand;
45
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
46
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand;
47
import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
48
import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand;
49
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand;
50
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand;
51
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
52
import edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand;
53
import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand;
54
import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand;
55
import edu.harvard.iq.dataverse.engine.command.impl.GetDraftVersionIfExists;
56
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand;
57
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand;
58
import edu.harvard.iq.dataverse.engine.command.impl.GetPrivateUrlCommand;
59
import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand;
60
import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand;
61
import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand;
62
import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments;
63
import edu.harvard.iq.dataverse.engine.command.impl.ListVersionsCommand;
64
import edu.harvard.iq.dataverse.engine.command.impl.MoveDatasetCommand;
65
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
66
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult;
67
import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand;
68
import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
69
import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand;
70
import edu.harvard.iq.dataverse.engine.command.impl.SetDatasetCitationDateCommand;
71
import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand;
72
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetTargetURLCommand;
73
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand;
74
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
75
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand;
76
import edu.harvard.iq.dataverse.error.DataverseError;
77
import edu.harvard.iq.dataverse.export.ExportService;
78
import edu.harvard.iq.dataverse.export.ExporterType;
79
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
80
import edu.harvard.iq.dataverse.notification.NotificationObjectType;
81
import edu.harvard.iq.dataverse.notification.NotificationParameter;
82
import edu.harvard.iq.dataverse.notification.UserNotificationService;
83
import edu.harvard.iq.dataverse.persistence.datafile.DataFile;
84
import edu.harvard.iq.dataverse.persistence.dataset.ControlledVocabularyValue;
85
import edu.harvard.iq.dataverse.persistence.dataset.Dataset;
86
import edu.harvard.iq.dataverse.persistence.dataset.DatasetField;
87
import edu.harvard.iq.dataverse.persistence.dataset.DatasetFieldType;
88
import edu.harvard.iq.dataverse.persistence.dataset.DatasetFieldUtil;
89
import edu.harvard.iq.dataverse.persistence.dataset.DatasetLock;
90
import edu.harvard.iq.dataverse.persistence.dataset.DatasetVersion;
91
import edu.harvard.iq.dataverse.persistence.dataset.MetadataBlock;
92
import edu.harvard.iq.dataverse.persistence.dataverse.Dataverse;
93
import edu.harvard.iq.dataverse.persistence.user.AuthenticatedUser;
94
import edu.harvard.iq.dataverse.persistence.user.DataverseRole;
95
import edu.harvard.iq.dataverse.persistence.user.DataverseRole.BuiltInRole;
96
import edu.harvard.iq.dataverse.persistence.user.NotificationType;
97
import edu.harvard.iq.dataverse.persistence.user.Permission;
98
import edu.harvard.iq.dataverse.persistence.user.RoleAssignee;
99
import edu.harvard.iq.dataverse.persistence.user.User;
100
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
101
import edu.harvard.iq.dataverse.search.index.IndexServiceBean;
102
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
103
import edu.harvard.iq.dataverse.util.ArchiverUtil;
104
import edu.harvard.iq.dataverse.util.EjbUtil;
105
import edu.harvard.iq.dataverse.util.SystemConfig;
106
import edu.harvard.iq.dataverse.util.json.JsonParseException;
107
import io.vavr.control.Either;
108
import io.vavr.control.Try;
109
import org.apache.commons.cli.MissingArgumentException;
110
import org.apache.commons.io.IOUtils;
111
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
112
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
113
import org.glassfish.jersey.media.multipart.FormDataParam;
114

115
import javax.ejb.EJBException;
116
import javax.inject.Inject;
117
import javax.json.Json;
118
import javax.json.JsonArray;
119
import javax.json.JsonArrayBuilder;
120
import javax.json.JsonObject;
121
import javax.json.JsonObjectBuilder;
122
import javax.servlet.http.HttpServletRequest;
123
import javax.servlet.http.HttpServletResponse;
124
import javax.ws.rs.Consumes;
125
import javax.ws.rs.DELETE;
126
import javax.ws.rs.GET;
127
import javax.ws.rs.POST;
128
import javax.ws.rs.PUT;
129
import javax.ws.rs.Path;
130
import javax.ws.rs.PathParam;
131
import javax.ws.rs.Produces;
132
import javax.ws.rs.QueryParam;
133
import javax.ws.rs.core.Context;
134
import javax.ws.rs.core.MediaType;
135
import javax.ws.rs.core.MultivaluedMap;
136
import javax.ws.rs.core.Response;
137
import javax.ws.rs.core.StreamingOutput;
138
import javax.ws.rs.core.UriInfo;
139
import java.io.IOException;
140
import java.io.InputStream;
141
import java.io.StringReader;
142
import java.security.InvalidParameterException;
143
import java.sql.Timestamp;
144
import java.text.ParseException;
145
import java.text.SimpleDateFormat;
146
import java.time.Clock;
147
import java.util.ArrayList;
148
import java.util.Collections;
149
import java.util.Date;
150
import java.util.HashMap;
151
import java.util.HashSet;
152
import java.util.LinkedHashMap;
153
import java.util.LinkedList;
154
import java.util.List;
155
import java.util.Map;
156
import java.util.Optional;
157
import java.util.ResourceBundle;
158
import java.util.Set;
159
import java.util.function.Function;
160
import java.util.logging.Level;
161
import java.util.logging.Logger;
162
import java.util.stream.Collectors;
163

164
@Path("datasets")
165
public class Datasets extends AbstractApiBean {
166

167
    private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName());
×
168

169
    private DatasetDao datasetDao;
170
    private DataverseDao dataverseDao;
171
    private UserNotificationService userNotificationService;
172
    private PermissionServiceBean permissionService;
173
    private AuthenticationServiceBean authenticationServiceBean;
174
    private DataFileServiceBean fileService;
175
    private IngestServiceBean ingestService;
176
    private EjbDataverseEngine commandEngine;
177
    private IndexServiceBean indexService;
178
    private S3PackageImporter s3PackageImporter;
179
    private SettingsServiceBean settingsService;
180
    private ExportService exportService;
181
    private DatasetService datasetSvc;
182
    private DatasetsValidators datasetsValidators;
183
    private OptionalFileParams optionalFileParamsSvc;
184
    private DataFileCreator dataFileCreator;
185
    private DatasetThumbnailService datasetThumbnailService;
186
    private FileDownloadAPIHandler fileDownloadAPIHandler;
187
    private DataverseRoleServiceBean rolesSvc;
188
    private RoleAssigneeServiceBean roleAssigneeSvc;
189
    private PermissionServiceBean permissionSvc;
190
    private FileLabelsService fileLabelsService;
191

192
    // -------------------- CONSTRUCTORS --------------------
193

194
    public Datasets() { }
×
195

196
    @Inject
197
    public Datasets(DatasetDao datasetDao, DataverseDao dataverseDao,
198
                    UserNotificationService userNotificationService,
199
                    PermissionServiceBean permissionService, AuthenticationServiceBean authenticationServiceBean,
200
                    DataFileServiceBean fileService, IngestServiceBean ingestService,
201
                    EjbDataverseEngine commandEngine, IndexServiceBean indexService,
202
                    S3PackageImporter s3PackageImporter, SettingsServiceBean settingsService,
203
                    ExportService exportService, DatasetService datasetSvc,
204
                    DatasetsValidators datasetsValidators, OptionalFileParams optionalFileParamsSvc,
205
                    DataFileCreator dataFileCreator, DatasetThumbnailService datasetThumbnailService,
206
                    FileDownloadAPIHandler fileDownloadAPIHandler, DataverseRoleServiceBean rolesSvc,
207
                    RoleAssigneeServiceBean roleAssigneeSvc, PermissionServiceBean permissionSvc,
208
                    FileLabelsService fileLabelsService) {
×
209
        this.datasetDao = datasetDao;
×
210
        this.dataverseDao = dataverseDao;
×
211
        this.userNotificationService = userNotificationService;
×
212
        this.permissionService = permissionService;
×
213
        this.authenticationServiceBean = authenticationServiceBean;
×
214
        this.fileService = fileService;
×
215
        this.ingestService = ingestService;
×
216
        this.commandEngine = commandEngine;
×
217
        this.indexService = indexService;
×
218
        this.s3PackageImporter = s3PackageImporter;
×
219
        this.settingsService = settingsService;
×
220
        this.exportService = exportService;
×
221
        this.datasetSvc = datasetSvc;
×
222
        this.datasetsValidators = datasetsValidators;
×
223
        this.optionalFileParamsSvc = optionalFileParamsSvc;
×
224
        this.dataFileCreator = dataFileCreator;
×
225
        this.datasetThumbnailService = datasetThumbnailService;
×
226
        this.fileDownloadAPIHandler = fileDownloadAPIHandler;
×
227
        this.rolesSvc = rolesSvc;
×
228
        this.roleAssigneeSvc = roleAssigneeSvc;
×
229
        this.permissionSvc = permissionSvc;
×
230
        this.fileLabelsService = fileLabelsService;
×
231
    }
×
232

233
    // -------------------- LOGIC --------------------
234

235
    @GET
236
    @Path("{id}")
237
    public Response getDataset(@PathParam("id") String id) {
238
        return response(req -> {
×
239
            Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id)));
×
240
            DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
×
241
            DatasetDTO dataset = new DatasetDTO.Converter().convert(retrieved);
×
242
            if (latest != null) {
×
243
                DatasetVersionDTO latestVersion = new DatasetVersionDTO.Converter().convert(latest);
×
244
                latestVersion = settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
245
                        ? latestVersion.clearEmailFields() : latestVersion;
×
246
                Map<String, Object> dto = dataset.asMap();
×
247
                dto.put("latestVersion", latestVersion);
×
248
                return allowCors(ok(dto));
×
249
            } else {
250
                return allowCors(ok(dataset));
×
251
            }
252
        });
253
    }
254

255
    // TODO:
256
    // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand
257
    // to obtain the dataset that we are trying to export - which would handle
258
    // Auth in the process... For now, Auth isn't necessary - since export ONLY
259
    // WORKS on published datasets, which are open to the world. -- L.A. 4.5
260

261
    @GET
262
    @Path("/export")
263
    @Produces({"application/xml", "application/json"})
264
    public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter) {
265

266
        Optional<ExporterType> exporterConstant = ExporterType.fromPrefix(exporter);
×
267

268
        if (!exporterConstant.isPresent()) {
×
269
            return error(Response.Status.BAD_REQUEST, exporter + " is not a valid exporter");
×
270
        }
271

272
        Dataset dataset = datasetDao.findByGlobalId(persistentId);
×
273
        if (dataset == null) {
×
274
            return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
×
275
        }
276

277
        Either<DataverseError, String> exportedDataset
×
278
                = exportService.exportDatasetVersionAsString(dataset.getReleasedVersion(), exporterConstant.get());
×
279

280
        if (exportedDataset.isLeft()) {
×
281
            return error(Response.Status.FORBIDDEN, exportedDataset.getLeft().getErrorMsg());
×
282
        }
283

284
        String mediaType = exportService.getMediaType(exporterConstant.get());
×
285
        return allowCors(Response.ok()
×
286
                .entity(exportedDataset.get())
×
287
                .type(mediaType)
×
288
                .build());
×
289
    }
290

291
    @DELETE
292
    @ApiWriteOperation
293
    @Path("{id}")
294
    public Response deleteDataset(@PathParam("id") String id) {
295
        // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
296
        // (and there's a comment that says "TODO: remove this command")
297
        //  do we need an exposed API call for it?
298
        // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, if the dataset only has 1
299
        // version... In other words, the functionality currently provided by this API is covered between the
300
        // "deleteDraftVersion" and "destroyDataset" API calls. (The logic below follows the current implementation of
301
        // the underlying commands!)
302

303
        return response(req -> {
×
304
            Dataset doomed = findDatasetOrDie(id);
×
305
            DatasetVersion doomedVersion = doomed.getLatestVersion();
×
306
            User u = findUserOrDie();
×
307
            boolean destroy = false;
×
308

309
            if (doomed.getVersions().size() == 1) {
×
310
                if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
×
311
                    throw new WrappedResponse(
×
312
                            error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets"));
×
313
                }
314
                destroy = true;
×
315
            } else {
316
                if (!doomedVersion.isDraft()) {
×
317
                    throw new WrappedResponse(
×
318
                            error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. " +
×
319
                                    "This API can only delete the latest version if it is a DRAFT"));
320
                }
321
            }
322

323
            // Gather the locations of the physical files that will need to be deleted once the destroy command
324
            // execution has been finalized:
325
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy);
×
326
            execCommand(new DeleteDatasetCommand(req, findDatasetOrDie(id)));
×
327

328
            // If we have gotten this far, the destroy command has succeeded, so we can finalize it by permanently
329
            // deleting the physical files: (DataFileService will double-check that the datafiles no longer exist in the
330
            // database, before attempting to delete the physical files)
331
            if (!deleteStorageLocations.isEmpty()) {
×
332
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
333
            }
334

335
            return ok("Dataset " + id + " deleted");
×
336
        });
337
    }
338

339
    @DELETE
340
    @ApiWriteOperation
341
    @Path("{id}/destroy")
342
    public Response destroyDataset(@PathParam("id") String id) {
343
        return response(req -> {
×
344
            // first check if dataset is released, and if so, if user is a superuser
345
            Dataset doomed = findDatasetOrDie(id);
×
346
            User user = findUserOrDie();
×
347

348
            if (doomed.isReleased() && (!(user instanceof AuthenticatedUser) || !user.isSuperuser())) {
×
349
                throw new WrappedResponse(
×
350
                        error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
×
351
            }
352

353
            // Gather the locations of the physical files that will need to be deleted once the destroy command
354
            // execution has been finalized:
355
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
356
            execCommand(new DestroyDatasetCommand(doomed, req));
×
357

358
            // If we have gotten this far, the destroy command has succeeded, so we can finalize permanently deleting
359
            // the physical files: (DataFileService will double-check that the datafiles no longer exist in the
360
            // database, before attempting to delete the physical files)
361
            if (!deleteStorageLocations.isEmpty()) {
×
362
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
363
            }
364
            return ok("Dataset " + id + " destroyed");
×
365
        });
366
    }
367

368
    @DELETE
369
    @ApiWriteOperation
370
    @Path("{id}/versions/{versionId}")
371
    public Response deleteDraftVersion(@PathParam("id") String id, @PathParam("versionId") String versionId) {
372
        if (!":draft".equals(versionId)) {
×
373
            return badRequest("Only the :draft version can be deleted");
×
374
        }
375

376
        return response(req -> {
×
377
            Dataset dataset = findDatasetOrDie(id);
×
378
            DatasetVersion doomed = dataset.getLatestVersion();
×
379

380
            if (!doomed.isDraft()) {
×
381
                throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version"));
×
382
            }
383

384
            // Gather the locations of the physical files that will need to be deleted once the destroy command
385
            // execution has been finalized:
386

387
            Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed);
×
388

389
            execCommand(new DeleteDatasetVersionCommand(req, dataset));
×
390

391
            // If we have gotten this far, the delete command has succeeded - by either deleting the Draft version of a
392
            // published dataset, or destroying an unpublished one.
393
            // This means we can finalize permanently deleting the physical files: (DataFileService will double-check
394
            // that the datafiles no longer exist in the database, before attempting to delete the physical files)
395
            if (!deleteStorageLocations.isEmpty()) {
×
396
                fileService.finalizeFileDeletes(deleteStorageLocations);
×
397
            }
398

399
            return ok("Draft version of dataset " + id + " deleted");
×
400
        });
401
    }
402

403
    @DELETE
404
    @ApiWriteOperation
405
    @Path("{datasetId}/deleteLink/{linkedDataverseId}")
406
    public Response deleteDatasetLinkingDataverse(@PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
407
        boolean index = true;
×
408
        return response(req -> {
×
409
            execCommand(new DeleteDatasetLinkingDataverseCommand(
×
410
                    req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
×
411
            return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
×
412
        });
413
    }
414

415
    @PUT
416
    @ApiWriteOperation
417
    @Path("{id}/citationdate")
418
    public Response setCitationDate(@PathParam("id") String id, String dsfTypeName) {
419
        return response(req -> {
×
420
            if (dsfTypeName.trim().isEmpty()) {
×
421
                return badRequest("Please provide a dataset field type in the requst body.");
×
422
            }
423
            DatasetFieldType dsfType = null;
×
424
            if (!":publicationDate".equals(dsfTypeName)) {
×
425
                dsfType = datasetFieldSvc.findByName(dsfTypeName);
×
426
                if (dsfType == null) {
×
427
                    return badRequest("Dataset Field Type Name " + dsfTypeName + " not found.");
×
428
                }
429
            }
430

431
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
×
432
            return ok("Citation Date for dataset " + id + " set to: "
×
433
                    + (dsfType != null ? dsfType.getDisplayName() : "default"));
×
434
        });
435
    }
436

437
    @DELETE
438
    @ApiWriteOperation
439
    @Path("{id}/citationdate")
440
    public Response useDefaultCitationDate(@PathParam("id") String id) {
441
        return response(req -> {
×
442
            execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
×
443
            return ok("Citation Date for dataset " + id + " set to default");
×
444
        });
445
    }
446

447
    @GET
448
    @Path("{id}/versions")
449
    public Response listVersions(@PathParam("id") String id) {
450
        DatasetVersionDTO.Converter converter = new DatasetVersionDTO.Converter();
×
451
        boolean excludeEmails = settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport);
×
452
        return allowCors(response(req -> ok(
×
453
                execCommand(new ListVersionsCommand(req, findDatasetOrDie(id))).stream()
×
454
                        .map(v -> {
×
455
                            DatasetVersionDTO dto = converter.convert(v);
×
456
                            return excludeEmails ? dto.clearEmailFields() : dto;
×
457
                        })
458
                        .collect(Collectors.toList()))));
×
459
    }
460

461
    @GET
462
    @Path("{id}/versions/{versionId}")
463
    public Response getVersion(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
464
        return allowCors(response(req -> {
×
465
            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId));
×
466
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(datasetVersion);
×
467
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
468
                    ? dto.clearEmailFields() : dto);
×
469
        }));
470
    }
471

472
    @GET
473
    @Path("{id}/versions/{versionId}/files")
474
    public Response listVersionFiles(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
475
        return allowCors(response(req -> ok(new FileMetadataDTO.Converter().convert(
×
476
                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId)).getFileMetadatas()))));
×
477
    }
478

479
    @GET
480
    @Path("{id}/versions/{versionId}/files/download")
481
    @Produces({"application/zip"})
482
    @ApiWriteOperation
483
    public Response getVersionFiles(@PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("gbrecs") boolean gbrecs,
484
                                    @Context HttpServletResponse response, @Context UriInfo uriInfo) {
485

486
        User apiTokenUser = Try.of(this::findUserOrDie)
×
487
                               .onFailure(throwable -> logger.log(Level.FINE, "Failed finding user for apiToken: ", throwable))
×
488
                               .get();
×
489

490
        String finalVersionId = versionId;
×
491
        if (!versionId.matches("[0-9]+")) {
×
492
            DataverseRequest dataverseRequest = createDataverseRequest(apiTokenUser);
×
493
            try {
494
                Dataset dataset = findDatasetOrDie(datasetId);
×
495
                DatasetVersion datasetVersion = getDatasetVersionOrDie(dataverseRequest, versionId, dataset);
×
496
                finalVersionId = datasetVersion.getId().toString();
×
497
            } catch (WrappedResponse wr) {
×
498
                return wr.getResponse();
×
499
            }
×
500
        }
501

502
        boolean originalFormatRequested = isOriginalFormatRequested(uriInfo.getQueryParameters());
×
503

504
        response.setHeader("Content-disposition", "attachment; filename=\"dataverse_files.zip\"");
×
505
        response.setHeader("Content-Type", "application/zip; name=\"dataverse_files.zip\"");
×
506

507
        StreamingOutput fileStream = fileDownloadAPIHandler.downloadFiles(apiTokenUser, finalVersionId, originalFormatRequested, gbrecs);
×
508
        return Response.ok(fileStream).build();
×
509
    }
510

511
    @GET
512
    @Path("{id}/versions/{versionId}/metadata")
513
    public Response getVersionMetadata(@PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
514
        MetadataBlockWithFieldsDTO.Creator creator = new MetadataBlockWithFieldsDTO.Creator();
×
515
        return allowCors(response(r -> {
×
516
            List<DatasetField> fields = getDatasetVersionOrDie(r, versionId, findDatasetOrDie(datasetId)).getDatasetFields();
×
517
            Map<String, MetadataBlockWithFieldsDTO> dto = DatasetField.groupByBlock(fields)
×
518
                    .entrySet().stream()
×
519
                    .map(e -> creator.create(e.getKey(), e.getValue()))
×
520
                    .collect(Collectors.toMap(
×
521
                            MetadataBlockWithFieldsDTO::getDisplayName, Function.identity(),
×
522
                            (prev, next) -> next, LinkedHashMap::new));
×
523
            if (settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)) {
×
524
                dto.values().forEach(MetadataBlockWithFieldsDTO::clearEmailFields);
×
525
            }
526
            return ok(dto);
×
527
        }));
528
    }
529

530
    @GET
531
    @Path("{id}/versions/{versionNumber}/metadata/{block}")
532
    public Response getVersionMetadataBlock(@PathParam("id") String datasetId,
533
                                            @PathParam("versionNumber") String versionNumber,
534
                                            @PathParam("block") String blockName) {
535

536
        return allowCors(response(req -> {
×
537
            DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId));
×
538

539
            Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields());
×
540
            for (Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet()) {
×
541
                if (p.getKey().getName().equals(blockName)) {
×
542
                    MetadataBlockWithFieldsDTO blockWithFields = new MetadataBlockWithFieldsDTO.Creator().create(p.getKey(), p.getValue());
×
543
                    if (settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)) {
×
544
                        blockWithFields.clearEmailFields();
×
545
                    }
546
                    return ok(blockWithFields);
×
547
                }
548
            }
×
549
            return notFound("metadata block named " + blockName + " not found");
×
550
        }));
551
    }
552

553
    @GET
554
    @Path("{id}/modifyRegistration")
555
    public Response updateDatasetTargetURL(@PathParam("id") String id) {
556
        return response(req -> {
×
557
            execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
×
558
            return ok("Dataset " + id + " target url updated");
×
559
        });
560
    }
561

562
    @POST
563
    @ApiWriteOperation
564
    @Path("/modifyRegistrationAll")
565
    public Response updateDatasetTargetURLAll() {
566
        return response(req -> {
×
567
            datasetDao.findAll().forEach(ds -> {
×
568
                try {
569
                    execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req));
×
570
                } catch (WrappedResponse ex) {
×
571
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
572
                }
×
573
            });
×
574
            return ok("Update All Dataset target url completed");
×
575
        });
576
    }
577

578
    @POST
579
    @ApiWriteOperation
580
    @Path("{id}/modifyRegistrationMetadata")
581
    public Response updateDatasetPIDMetadata(@PathParam("id") String id) {
582
        try {
583
            Dataset dataset = findDatasetOrDie(id);
×
584
            if (!dataset.isReleased()) {
×
585
                return error(Response.Status.BAD_REQUEST,
×
586
                        BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
×
587
            }
588
        } catch (WrappedResponse ex) {
×
589
            Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
590
        }
×
591

592
        return response(req -> {
×
593
            execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req));
×
594
            List<String> args = Collections.singletonList(id);
×
595
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
×
596
        });
597
    }
598

599
    @GET
600
    @ApiWriteOperation
601
    @Path("/modifyRegistrationPIDMetadataAll")
602
    public Response updateDatasetPIDMetadataAll() {
603
        return response(req -> {
×
604
            datasetDao.findAll().forEach(ds -> {
×
605
                try {
606
                    execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
×
607
                } catch (WrappedResponse ex) {
×
608
                    Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
×
609
                }
×
610
            });
×
611
            return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
×
612
        });
613
    }
614

615
    @PUT
616
    @ApiWriteOperation
617
    @Path("{id}/versions/{versionId}")
618
    public Response updateDraftVersion(String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
619

620
        if (!":draft".equals(versionId)) {
×
621
            return error(Response.Status.BAD_REQUEST, "Only the :draft version can be updated");
×
622
        }
623

624
        try (StringReader rdr = new StringReader(jsonBody)) {
×
625
            DataverseRequest req = createDataverseRequest(findUserOrDie());
×
626
            Dataset ds = findDatasetOrDie(id);
×
627
            JsonObject json = Json.createReader(rdr).readObject();
×
628
            DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
×
629

630
            // clear possibly stale fields from the incoming dataset version.
631
            // creation and modification dates are updated by the commands.
632
            incomingVersion.setId(null);
×
633
            incomingVersion.setVersionNumber(null);
×
634
            incomingVersion.setMinorVersionNumber(null);
×
635
            incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
×
636
            incomingVersion.setDataset(ds);
×
637
            incomingVersion.setCreateTime(null);
×
638
            incomingVersion.setLastUpdateTime(null);
×
639
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
640

641
            DatasetVersion managedVersion;
642
            if (updateDraft) {
×
643
                final DatasetVersion editVersion = ds.getEditVersion();
×
644
                editVersion.setDatasetFields(incomingVersion.getDatasetFields());
×
645
                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
×
646
                managedVersion = managedDataset.getEditVersion();
×
647
            } else {
×
648
                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
×
649
            }
650
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(managedVersion);
×
651
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
652
                    ? dto.clearEmailFields() : dto);
×
653
        } catch (JsonParseException ex) {
×
654
            logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
×
655
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage());
×
656

657
        } catch (WrappedResponse ex) {
×
658
            return ex.getResponse();
×
659
        }
660
    }
661

662
    @PUT
663
    @ApiWriteOperation
664
    @Path("{id}/deleteMetadata")
665
    public Response deleteVersionMetadata(String jsonBody, @PathParam("id") String id) throws WrappedResponse {
666
        DataverseRequest req = createDataverseRequest(findUserOrDie());
×
667
        return processDatasetFieldDataDelete(jsonBody, id, req);
×
668
    }
669

670
    @PUT
671
    @ApiWriteOperation
672
    @Path("{id}/setEmbargo")
673
    public Response setEmbargoDate(@PathParam("id") String id, @QueryParam("date") String date) {
674
        try {
675
            Dataset dataset = findDatasetOrDie(id);
×
676
            SimpleDateFormat dateFormat = new SimpleDateFormat(settingsService.getValueForKey(SettingsServiceBean.Key.DefaultDateFormat));
×
677
            if(date == null) {
×
678
                throw new WrappedResponse(badRequest(BundleUtil.getStringFromBundle(
×
679
                        "datasets.api.setEmbargo.failure.badDate.missing",
680
                        settingsSvc.getValueForKey(SettingsServiceBean.Key.DefaultDateFormat))));
×
681
            }
682
            Date embargoDate = dateFormat.parse(date);
×
683
            datasetsValidators.validateEmbargoDate(embargoDate);
×
684
            dataset = datasetSvc.setDatasetEmbargoDate(dataset, embargoDate);
×
685
            return ok(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.success",
×
686
                    dataset.getGlobalId(), dataset.getEmbargoDate().get().toInstant()));
×
687
        } catch (WrappedResponse wr) {
×
688
            return wr.getResponse();
×
689
        } catch (ParseException pe) {
×
690
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.failure.badDate.format",
×
691
                    settingsSvc.getValueForKey(SettingsServiceBean.Key.DefaultDateFormat)));
×
692
        } catch (InvalidArgumentException iae) {
×
693
            return badRequest(iae.getMessage());
×
694
        } catch (EJBException ise) {
×
695
            return badRequest(ise.getCause().getMessage());
×
696
        } catch (PermissionException pe) {
×
697
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.failure.missingPermissions",
×
698
                    pe.getMissingPermissions().toString()));
×
699
        } catch (Exception e) {
×
700
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.setEmbargo.failure.unknown", e.getMessage()));
×
701
        }
702
    }
703

704
    @PUT
705
    @ApiWriteOperation
706
    @Path("{id}/liftEmbargo")
707
    public Response liftEmbargoDate(@PathParam("id") String id) {
708
        try {
709
            Dataset dataset = findDatasetOrDie(id);
×
710
            dataset = datasetSvc.liftDatasetEmbargoDate(dataset);
×
711
            return ok(BundleUtil.getStringFromBundle("datasets.api.liftEmbargo.success", dataset.getGlobalId()));
×
712
        } catch (WrappedResponse wr) {
×
713
            return wr.getResponse();
×
714
        } catch (EJBException ise) {
×
715
            return badRequest(ise.getCause().getMessage());
×
716
        } catch (PermissionException pe) {
×
717
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.liftEmbargo.failure.missingPermissions", pe.getMissingPermissions().toString()));
×
718
        } catch (Exception e) {
×
719
            return badRequest(BundleUtil.getStringFromBundle("datasets.api.liftEmbargo.failure.unknown", e.getMessage()));
×
720
        }
721
    }
722

723
    @PUT
724
    @ApiWriteOperation
725
    @Path("{id}/editMetadata")
726
    public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace)
727
            throws WrappedResponse {
728
        Boolean replaceData = replace != null;
×
729
        DataverseRequest req = createDataverseRequest(findUserOrDie());
×
730
        return processDatasetUpdate(jsonBody, id, req, replaceData);
×
731
    }
732

733
    /**
734
     * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
735
     */
736
    @GET
737
    @ApiWriteOperation
738
    @Path("{id}/actions/:publish")
739
    @Deprecated
740
    public Response publishDataseUsingGetDeprecated(@PathParam("id") String id, @QueryParam("type") String type) {
741
        logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
×
742
        return publishDataset(id, type);
×
743
    }
744

745
    @POST
746
    @ApiWriteOperation
747
    @Path("{id}/actions/:publish")
748
    public Response publishDataset(@PathParam("id") String id, @QueryParam("type") String type) {
749
        try {
750
            if (type == null) {
×
751
                return error(Response.Status.BAD_REQUEST,
×
752
                        "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
753
            }
754
            boolean updateCurrent = false;
×
755
            AuthenticatedUser user = findAuthenticatedUserOrDie();
×
756
            type = type.toLowerCase();
×
757
            boolean isMinor = false;
×
758
            switch (type) {
×
759
                case "minor":
760
                    isMinor = true;
×
761
                    break;
×
762
                case "major":
763
                    isMinor = false;
×
764
                    break;
×
765
                case "updatecurrent":
766
                    if (user.isSuperuser()) {
×
767
                        updateCurrent = true;
×
768
                    } else {
769
                        return error(Response.Status.FORBIDDEN, "Only superusers can update the current version");
×
770
                    }
771
                    break;
772
                default:
773
                    return error(Response.Status.BAD_REQUEST,
×
774
                                "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'.");
775
            }
776

777
            Dataset ds = findDatasetOrDie(id);
×
778
            if (updateCurrent) {
×
779
                /*
780
                 * Note: The code here mirrors that in the
781
                 * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes
782
                 * to the core logic (i.e. beyond updating the messaging about results) should
783
                 * be applied to the code there as well.
784
                 */
785
                String errorMsg = null;
×
786
                String successMsg = null;
×
787
                try {
788
                    CuratePublishedDatasetVersionCommand cmd =
×
789
                            new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user));
×
790
                    ds = commandEngine.submit(cmd);
×
791
                    successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success");
×
792

793
                    // If configured, update archive copy as well
794
                    String className = settingsService.getValueForKey(SettingsServiceBean.Key.ArchiverClassName);
×
795
                    DatasetVersion updateVersion = ds.getLatestVersion();
×
796
                    AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(
×
797
                            className, createDataverseRequest(user), updateVersion, authenticationServiceBean, Clock.systemUTC());
×
798
                    if (archiveCommand != null) {
×
799
                        // Delete the record of any existing copy since it is now out of date/incorrect
800
                        updateVersion.setArchivalCopyLocation(null);
×
801

802
                        // Then try to generate and submit an archival copy. Note that running this command within the
803
                        // CuratePublishedDatasetVersionCommand was causing an error:
804
                        // "The attribute [id] of class [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped
805
                        // to a primary key column in the database. Updates are not allowed."
806
                        // To avoid that, and to simplify reporting back to the GUI whether this optional step
807
                        // succeeded, I've pulled this out as a separate submit().
808
                        try {
809
                            updateVersion = commandEngine.submit(archiveCommand);
×
810
                            successMsg = BundleUtil.getStringFromBundle(updateVersion.getArchivalCopyLocation() != null
×
811
                                    ? "datasetversion.update.archive.success" : "datasetversion.update.archive.failure");
812
                        } catch (CommandException ex) {
×
813
                            successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString();
×
814
                            logger.severe(ex.getMessage());
×
815
                        }
×
816
                    }
817
                } catch (CommandException ex) {
×
818
                    errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString();
×
819
                    logger.severe(ex.getMessage());
×
820
                }
×
821
                return errorMsg != null
×
822
                        ? error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg)
×
823
                        : ok(new DatasetDTO.Converter().convert(ds), successMsg);
×
824
            } else {
825
                PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, createDataverseRequest(user), isMinor));
×
826
                DatasetDTO dto = new DatasetDTO.Converter().convert(res.getDataset());
×
827
                return res.isCompleted() ? ok(dto) : accepted(dto);
×
828
            }
829
        } catch (WrappedResponse ex) {
×
830
            return ex.getResponse();
×
831
        } catch (NoDatasetFilesException ex) {
×
832
            return error(Response.Status.INTERNAL_SERVER_ERROR, "Unable to publish dataset, since there are no files in it.");
×
833
        }
834
    }
835

836
    @POST
837
    @ApiWriteOperation
838
    @Path("{id}/move/{targetDataverseAlias}")
839
    public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias,
840
                                @QueryParam("forceMove") Boolean force) {
841
        try {
842
            User user = findUserOrDie();
×
843
            Dataset dataset = findDatasetOrDie(id);
×
844
            Dataverse target = dataverseDao.findByAlias(targetDataverseAlias);
×
845
            if (target == null) {
×
846
                return error(Response.Status.BAD_REQUEST, "Target Dataverse not found.");
×
847
            }
848
            //Command requires Super user - it will be tested by the command
849
            execCommand(new MoveDatasetCommand(createDataverseRequest(user), dataset, target, force));
×
850
            return ok("Dataset moved successfully");
×
851
        } catch (WrappedResponse ex) {
×
852
            return ex.getResponse();
×
853
        }
854
    }
855

856
    @PUT
857
    @ApiWriteOperation
858
    @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
859
    public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId,
860
                                @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
861
        try {
862
            User user = findUserOrDie();
×
863
            Dataset linked = findDatasetOrDie(linkedDatasetId);
×
864
            Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
×
865
            if (linked == null) {
×
866
                return error(Response.Status.BAD_REQUEST, "Linked Dataset not found.");
×
867
            }
868
            if (linking == null) {
×
869
                return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found.");
×
870
            }
871
            execCommand(new LinkDatasetCommand(createDataverseRequest(user), linking, linked));
×
872
            return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias());
×
873
        } catch (WrappedResponse ex) {
×
874
            return ex.getResponse();
×
875
        }
876
    }
877

878
    @GET
879
    @Path("{id}/links")
880
    public Response getLinks(@PathParam("id") String idSupplied) {
881
        try {
882
            User user = findUserOrDie();
×
883
            if (!user.isSuperuser()) {
×
884
                return error(Response.Status.FORBIDDEN, "Not a superuser");
×
885
            }
886
            Dataset dataset = findDatasetOrDie(idSupplied);
×
887
            long datasetId = dataset.getId();
×
888
            List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId);
×
889
            JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder();
×
890
            for (Dataverse dataverse : dvsThatLinkToThisDatasetId) {
×
891
                dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")");
×
892
            }
×
893
            JsonObjectBuilder response = Json.createObjectBuilder();
×
894
            response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder);
×
895
            return ok(response);
×
896
        } catch (WrappedResponse wr) {
×
897
            return wr.getResponse();
×
898
        }
899
    }
900

901
    /**
902
     * @todo Make this real. Currently only used for API testing. Copied from
903
     * the equivalent API endpoint for dataverses and simplified with values
904
     * hard coded.
905
     */
906
    @POST
907
    @ApiWriteOperation
908
    @Path("{identifier}/assignments")
909
    public Response createAssignment(String userOrGroup, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
910
        boolean apiTestingOnly = true;
×
911
        if (apiTestingOnly) {
×
912
            return error(Response.Status.FORBIDDEN, "This is only for API tests.");
×
913
        }
914
        try {
915
            Dataset dataset = findDatasetOrDie(id);
×
916
            RoleAssignee assignee = findAssignee(userOrGroup);
×
917
            if (assignee == null) {
×
918
                return error(Response.Status.BAD_REQUEST, "Assignee not found");
×
919
            }
920
            DataverseRole theRole = rolesSvc.findBuiltinRoleByAlias(BuiltInRole.ADMIN);
×
921
            String privateUrlToken = null;
×
922
            return ok(
×
923
                    new RoleAssignmentDTO.Converter().convert(execCommand(
×
924
                            new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(findUserOrDie()), privateUrlToken))));
×
925
        } catch (WrappedResponse ex) {
×
926
            logger.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage());
×
927
            return ex.getResponse();
×
928
        }
929
    }
930

931
    @GET
932
    @Path("{identifier}/assignments")
933
    public Response getAssignments(@PathParam("identifier") String id) {
934
        RoleAssignmentDTO.Converter converter = new RoleAssignmentDTO.Converter();
×
935
        return response(req -> ok(execCommand(new ListRoleAssignments(req,
×
936
                findDatasetOrDie(id))).stream()
×
937
                .map(converter::convert)
×
938
                .collect(Collectors.toList())));
×
939
    }
940

941
    @GET
942
    @Path("{id}/privateUrl")
943
    public Response getPrivateUrlData(@PathParam("id") String idSupplied) {
944
        return response(req -> {
×
945
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
×
946
            return privateUrl != null
×
947
                    ? ok(new PrivateUrlDTO.Converter().convert(privateUrl))
×
948
                    : error(Response.Status.NOT_FOUND, "Private URL not found.");
×
949
        });
950
    }
951

952
    @POST
953
    @ApiWriteOperation
954
    @Path("{id}/privateUrl")
955
    public Response createPrivateUrl(@PathParam("id") String idSupplied) {
956
        return response(req -> ok(
×
957
                new PrivateUrlDTO.Converter().convert(
×
958
                        execCommand(new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied))))));
×
959
    }
960

961
    @DELETE
962
    @ApiWriteOperation
963
    @Path("{id}/privateUrl")
964
    public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
965
        return response(req -> {
×
966
            Dataset dataset = findDatasetOrDie(idSupplied);
×
967
            PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
×
968
            if (privateUrl != null) {
×
969
                execCommand(new DeletePrivateUrlCommand(req, dataset));
×
970
                return ok("Private URL deleted.");
×
971
            } else {
972
                return notFound("No Private URL to delete.");
×
973
            }
974
        });
975
    }
976

977
    @GET
978
    @Path("{id}/thumbnail/candidates")
979
    public Response getDatasetThumbnailCandidates(@PathParam("id") String idSupplied) {
980
        try {
981
            Dataset dataset = findDatasetOrDie(idSupplied);
×
982
            boolean canUpdateThumbnail = false;
×
983
            try {
984
                canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset)
×
985
                        .canIssue(UpdateDatasetThumbnailCommand.class);
×
986
            } catch (WrappedResponse ex) {
×
987
                logger.info("Exception thrown while trying to figure out permissions while getting thumbnail for dataset id "
×
988
                        + dataset.getId() + ": " + ex.getLocalizedMessage());
×
989
            }
×
990
            if (!canUpdateThumbnail) {
×
991
                return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
×
992
            }
993
            JsonArrayBuilder data = Json.createArrayBuilder();
×
994
            for (DatasetThumbnail datasetThumbnail : datasetThumbnailService.getThumbnailCandidates(dataset, true)) {
×
995
                JsonObjectBuilder candidate = Json.createObjectBuilder();
×
996
                String base64image = datasetThumbnail.getBase64image();
×
997
                if (base64image != null) {
×
998
                    logger.fine("found a candidate!");
×
999
                    candidate.add("base64image", base64image);
×
1000
                }
1001
                DataFile dataFile = datasetThumbnail.getDataFile();
×
1002
                if (dataFile != null) {
×
1003
                    candidate.add("dataFileId", dataFile.getId());
×
1004
                }
1005
                data.add(candidate);
×
1006
            }
×
1007
            return ok(data);
×
1008
        } catch (WrappedResponse ex) {
×
1009
            return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
×
1010
        }
1011
    }
1012

1013
    @GET
1014
    @Produces({"image/png"})
1015
    @Path("{id}/thumbnail")
1016
    public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
1017
        try {
1018
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1019
            InputStream is = datasetThumbnailService.getThumbnailAsInputStream(dataset);
×
1020
            if (is == null) {
×
1021
                return notFound("Thumbnail not available");
×
1022
            }
1023
            return Response.ok(is).build();
×
1024
        } catch (WrappedResponse wr) {
×
1025
            return notFound("Thumbnail not available");
×
1026
        }
1027
    }
1028

1029
    // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
1030
    @POST
1031
    @ApiWriteOperation
1032
    @Path("{id}/thumbnail/{dataFileId}")
1033
    public Response setDataFileAsThumbnail(@PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
1034
        try {
1035
            DatasetThumbnail datasetThumbnail = execCommand(
×
1036
                    new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied),
×
1037
                            UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
×
1038
            return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
×
1039
        } catch (WrappedResponse wr) {
×
1040
            return wr.getResponse();
×
1041
        }
1042
    }
1043

1044
    @POST
1045
    @ApiWriteOperation
1046
    @Path("{id}/thumbnail")
1047
    @Consumes(MediaType.MULTIPART_FORM_DATA)
1048
    public Response uploadDatasetLogo(@PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream
1049
    ) {
1050
        try {
1051
            DatasetThumbnail datasetThumbnail = execCommand(
×
1052
                    new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied),
×
1053
                    UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
1054
            return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
×
1055
        } catch (WrappedResponse wr) {
×
1056
            return wr.getResponse();
×
1057
        }
1058
    }
1059

1060
    @DELETE
1061
    @ApiWriteOperation
1062
    @Path("{id}/thumbnail")
1063
    public Response removeDatasetLogo(@PathParam("id") String idSupplied) {
1064
        try {
1065
            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied),
×
1066
                            UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
1067
            return ok("Dataset thumbnail removed.");
×
1068
        } catch (WrappedResponse wr) {
×
1069
            return wr.getResponse();
×
1070
        }
1071
    }
1072

1073
    @GET
1074
    @ApiWriteOperation
1075
    @Path("{identifier}/dataCaptureModule/rsync")
1076
    public Response getRsync(@PathParam("identifier") String id) {
1077
        //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
1078
        if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
×
1079
            return error(Response.Status.METHOD_NOT_ALLOWED,
×
1080
                         SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
1081
        }
1082
        Dataset dataset;
1083
        try {
1084
            dataset = findDatasetOrDie(id);
×
1085
            AuthenticatedUser user = findAuthenticatedUserOrDie();
×
1086
            ScriptRequestResponse scriptRequestResponse = execCommand(
×
1087
                    new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
×
1088

1089
            DatasetLock lock = datasetDao.addDatasetLock(
×
1090
                    dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
×
1091
            if (lock == null) {
×
1092
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId());
×
1093
                return error(Response.Status.FORBIDDEN,
×
1094
                             "Failed to lock the dataset (dataset id=" + dataset.getId() + ")");
×
1095
            }
1096
            return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN));
×
1097
        } catch (WrappedResponse wr) {
×
1098
            return wr.getResponse();
×
1099
        } catch (EJBException ex) {
×
1100
            return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1101
                         "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex));
×
1102
        }
1103
    }
1104

1105
    /**
1106
     * This api endpoint triggers the creation of a "package" file in a dataset
1107
     * after that package has been moved onto the same filesystem via the Data Capture Module.
1108
     * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file.
1109
     * The "package" can be downloaded over RSAL.
1110
     * <p>
1111
     * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly.
1112
     * <p>
1113
     * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse.
1114
     * But due to the possibly immense number of files (millions) the package approach was taken.
1115
     * This is relevant because the posix ("file") code contains many remnants of that development work.
1116
     * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter.
1117
     * -MAD 4.9.1
1118
     */
1119
    @POST
1120
    @ApiWriteOperation
1121
    @Path("{identifier}/dataCaptureModule/checksumValidation")
1122
    public Response receiveChecksumValidationResults(@PathParam("identifier") String id, JsonObject jsonFromDcm) {
1123
        logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
×
1124
        AuthenticatedUser authenticatedUser;
1125
        try {
1126
            authenticatedUser = findAuthenticatedUserOrDie();
×
1127
        } catch (WrappedResponse ex) {
×
1128
            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
×
1129
        }
×
1130
        if (!authenticatedUser.isSuperuser()) {
×
1131
            return error(Response.Status.FORBIDDEN, "Superusers only.");
×
1132
        }
1133
        String statusMessageFromDcm = jsonFromDcm.getString("status");
×
1134
        try {
1135
            Dataset dataset = findDatasetOrDie(id);
×
1136
            if ("validation passed".equals(statusMessageFromDcm)) {
×
1137
                logger.log(Level.INFO, "Checksum Validation passed for DCM.");
×
1138

1139
                String storageDriver = (System.getProperty("dataverse.files.storage-driver-id") != null)
×
1140
                        ? System.getProperty("dataverse.files.storage-driver-id") : "file";
×
1141
                String uploadFolder = jsonFromDcm.getString("uploadFolder");
×
1142
                int totalSize = jsonFromDcm.getInt("totalSize");
×
1143

1144
                if (storageDriver.equals("file")) {
×
1145
                    logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId());
×
1146

1147
                    ImportMode importMode = ImportMode.MERGE;
×
1148
                    try {
1149
                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(
×
1150
                                createDataverseRequest(findUserOrDie()), dataset, uploadFolder, (long) totalSize, importMode));
×
1151
                        long jobId = jsonFromImportJobKickoff.getInt("executionId");
×
1152
                        String message = jsonFromImportJobKickoff.getString("message");
×
1153
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
1154
                        job.add("jobId", jobId);
×
1155
                        job.add("message", message);
×
1156
                        return ok(job);
×
1157
                    } catch (WrappedResponse wr) {
×
1158
                        String message = wr.getMessage();
×
1159
                        return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1160
                                     "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'.");
1161
                    }
1162
                } else if (storageDriver.equals("s3")) {
×
1163
                    logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId());
×
1164
                    try {
1165

1166
                        //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package
1167
                        s3PackageImporter.copyFromS3(dataset, uploadFolder);
×
1168
                        DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, totalSize);
×
1169

1170
                        if (packageFile == null) {
×
1171
                            logger.log(Level.SEVERE, "S3 File package import failed.");
×
1172
                            return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed.");
×
1173
                        }
1174
                        DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload);
×
1175
                        if (dcmLock == null) {
×
1176
                            logger.log(Level.WARNING, "Dataset not locked for DCM upload");
×
1177
                        } else {
1178
                            datasetDao.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload);
×
1179
                            dataset.removeLock(dcmLock);
×
1180
                        }
1181

1182
                        // update version using the command engine to enforce user permissions and constraints
1183
                        if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) {
×
1184
                            try {
1185
                                Command<Dataset> cmd;
1186
                                cmd = new UpdateDatasetVersionCommand(dataset,
×
1187
                                        new DataverseRequest(authenticatedUser, (HttpServletRequest) null));
1188
                                commandEngine.submit(cmd);
×
1189
                            } catch (CommandException ex) {
×
1190
                                return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1191
                                             "CommandException updating DatasetVersion from batch job: " + ex.getMessage());
×
1192
                            }
×
1193
                        } else {
1194
                            String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a "
×
1195
                                    + "single version in draft mode.";
1196
                            logger.log(Level.SEVERE, constraintError);
×
1197
                        }
1198
                        JsonObjectBuilder job = Json.createObjectBuilder();
×
1199
                        return ok(job);
×
1200
                    } catch (IOException e) {
×
1201
                        String message = e.getMessage();
×
1202
                        return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1203
                                     "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'.");
1204
                    }
1205
                } else {
1206
                    return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1207
                                 "Invalid storage driver in Dataverse, not compatible with dcm");
1208
                }
1209
            } else if ("validation failed".equals(statusMessageFromDcm)) {
×
1210
                Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(
×
1211
                        Permission.EditDataset,
1212
                        dataset);
1213
                distinctAuthors.values().forEach((value) -> userNotificationService.sendNotificationWithEmail(value,
×
1214
                        new Timestamp(new Date().getTime()), NotificationType.CHECKSUMFAIL, dataset.getId(), NotificationObjectType.DATASET));
×
1215
                List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
×
1216
                if (superUsers != null && !superUsers.isEmpty()) {
×
1217
                    superUsers.forEach((au) -> userNotificationService.sendNotificationWithEmail(au,
×
1218
                            new Timestamp(new Date().getTime()), NotificationType.CHECKSUMFAIL, dataset.getId(), NotificationObjectType.DATASET));
×
1219
                }
1220
                return ok("User notified about checksum validation failure.");
×
1221
            } else {
1222
                return error(Response.Status.BAD_REQUEST,
×
1223
                             "Unexpected status cannot be processed: " + statusMessageFromDcm);
1224
            }
1225
        } catch (WrappedResponse ex) {
×
1226
            return ex.getResponse();
×
1227
        }
1228
    }
1229

1230
    @POST
1231
    @ApiWriteOperation
1232
    @Path("{id}/submitForReview")
1233
    @Consumes(MediaType.APPLICATION_JSON)
1234
    public Response submitForReview(@PathParam("id") String idSupplied, SubmitForReviewDataDTO submitForReviewData) {
1235
        try {
1236
            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()),
×
1237
                    findDatasetOrDie(idSupplied), submitForReviewData.getComment()));
×
1238
            JsonObjectBuilder result = Json.createObjectBuilder();
×
1239
            result.add("inReview", updatedDataset.isLockedFor(DatasetLock.Reason.InReview));
×
1240
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review.");
×
1241
            return ok(result);
×
1242
        } catch (WrappedResponse wr) {
×
1243
            return wr.getResponse();
×
1244
        } catch (NoDatasetFilesException ex) {
×
1245
            return error(Response.Status.INTERNAL_SERVER_ERROR,
×
1246
                         "Unable to submit dataset for review, since there are no files in it.");
1247
        }
1248
    }
1249

1250
    @POST
1251
    @ApiWriteOperation
1252
    @Path("{id}/returnToAuthor")
1253
    public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBody) {
1254
        if (jsonBody == null || jsonBody.isEmpty()) {
×
1255
            return error(Response.Status.BAD_REQUEST,
×
1256
                         "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
1257
        }
1258
        StringReader rdr = new StringReader(jsonBody);
×
1259
        JsonObject json = Json.createReader(rdr).readObject();
×
1260
        try {
1261
            Dataset dataset = findDatasetOrDie(idSupplied);
×
1262
            String reasonForReturn;
1263
            reasonForReturn = json.getString("reasonForReturn");
×
1264
            // TODO: Once we add a box for the curator to type into, pass the reason for return to the
1265
            //  ReturnDatasetToAuthorCommand and delete this check and call to setReturnReason on the API side.
1266
            if (reasonForReturn == null || reasonForReturn.isEmpty()) {
×
1267
                return error(Response.Status.BAD_REQUEST,
×
1268
                             "You must enter a reason for returning a dataset to the author(s).");
1269
            }
1270
            AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie();
×
1271
            Map<String, String> params = new HashMap<>();
×
1272
            params.put(NotificationParameter.MESSAGE.key(), reasonForReturn);
×
1273
            params.put(NotificationParameter.REPLY_TO.key(), authenticatedUser.getEmail());
×
1274
            Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(
×
1275
                    authenticatedUser), dataset, params));
1276

1277
            JsonObjectBuilder result = Json.createObjectBuilder();
×
1278
            result.add("inReview", false);
×
1279
            result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s).");
×
1280
            return ok(result);
×
1281
        } catch (WrappedResponse wr) {
×
1282
            return wr.getResponse();
×
1283
        }
1284
    }
1285

1286
    /**
1287
     * Add a File to an existing Dataset
1288
     */
1289
    @POST
1290
    @ApiWriteOperation
1291
    @Path("{id}/add")
1292
    @Consumes(MediaType.MULTIPART_FORM_DATA)
1293
    public Response addFileToDataset(@PathParam("id") String idSupplied,
1294
                                     @FormDataParam("jsonData") String jsonData,
1295
                                     @FormDataParam("file") InputStream fileInputStream,
1296
                                     @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
1297
                                     @FormDataParam("file") final FormDataBodyPart formDataBodyPart) {
1298
        if (!systemConfig.isHTTPUpload()) {
×
1299
            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
×
1300
        }
1301

1302
        // (1) Get the user from the API key
1303
        User authUser;
1304
        try {
1305
            authUser = findUserOrDie();
×
1306
        } catch (WrappedResponse ex) {
×
1307
            return error(Response.Status.FORBIDDEN,
×
1308
                         BundleUtil.getStringFromBundle("file.addreplace.error.auth")
×
1309
            );
1310
        }
×
1311

1312
        // (2) Get the Dataset Id
1313
        Dataset dataset;
1314
        try {
1315
            dataset = findDatasetOrDie(idSupplied);
×
1316
        } catch (WrappedResponse wr) {
×
1317
            return wr.getResponse();
×
1318
        }
×
1319

1320
        // (2a) Make sure dataset does not have package file
1321
        for (DatasetVersion dv : dataset.getVersions()) {
×
1322
            if (dv.isHasPackageFile()) {
×
1323
                return error(Response.Status.FORBIDDEN,
×
1324
                             ResourceBundle.getBundle("Bundle").getString("file.api.alreadyHasPackageFile")
×
1325
                );
1326
            }
1327
        }
×
1328

1329
        // (3) Get the file name and content type
1330
        String newFilename = contentDispositionHeader.getFileName();
×
1331
        String newFileContentType = formDataBodyPart.getMediaType().toString();
×
1332

1333
        // (2a) Load up optional params via JSON
1334
        OptionalFileParams optionalFileParams;
1335
        logger.fine("Loading (api) jsonData: " + jsonData);
×
1336

1337
        try {
1338
            optionalFileParams = optionalFileParamsSvc.create(jsonData);
×
1339
        } catch (DataFileTagException ex) {
×
1340
            return error(Response.Status.BAD_REQUEST, ex.getMessage());
×
1341
        }
×
1342

1343
        try {
1344
            datasetsValidators.validateFileTermsOfUseDTO(optionalFileParams.getFileTermsOfUseDTO());
×
1345
        } catch (MissingArgumentException | InvalidParameterException pe) {
×
1346
            return error(Response.Status.BAD_REQUEST, pe.getMessage());
×
1347
        } catch (EJBException ejbe) {
×
1348
            return error(Response.Status.BAD_REQUEST, ejbe.getCause().getMessage());
×
1349
        }
×
1350

1351
        // (3) Create the AddReplaceFileHelper object
1352
        DataverseRequest dvRequest2 = createDataverseRequest(authUser);
×
1353
        AddReplaceFileHelper addFileHelper =
×
1354
                new AddReplaceFileHelper(dvRequest2, ingestService, fileService, dataFileCreator, permissionSvc, commandEngine, optionalFileParamsSvc);
1355

1356
        // (4) Run "runAddFileByDatasetId"
1357
        try {
1358
            addFileHelper.runAddFileByDataset(dataset, newFilename, newFileContentType, fileInputStream, optionalFileParams);
×
1359
        } finally {
1360
            IOUtils.closeQuietly(fileInputStream);
×
1361
        }
1362

1363
        if (addFileHelper.hasError()) {
×
1364
            return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
×
1365
        } else {
1366
            String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add");
×
1367
            try {
1368
                // Todo We need a consistent, sane way to communicate a human readable message to an API client suitable
1369
                // for human consumption. Imagine if the UI were built in Angular or React and we want to return a
1370
                // message from the API as-is to the user. Human readable.
1371
                logger.fine("successMsg: " + successMsg);
×
1372
                return ok(addFileHelper.getSuccessResult());
×
1373
                // "Look at that!  You added a file! (hey hey, it may have worked)");
1374
            } catch (NoFilesException ex) {
×
1375
                Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
×
1376
                return error(Response.Status.BAD_REQUEST, "NoFileException!  Serious Error! See administrator!");
×
1377
            }
1378
        }
1379
    }
1380

1381
    @GET
1382
    @Path("{identifier}/locks")
1383
    public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
1384
        Dataset dataset;
1385
        try {
1386
            dataset = findDatasetOrDie(id);
×
1387
            Set<DatasetLock> locks;
1388
            if (lockType == null) {
×
1389
                locks = dataset.getLocks();
×
1390
            } else {
1391
                // request for a specific type lock:
1392
                DatasetLock lock = dataset.getLockFor(lockType);
×
1393
                locks = new HashSet<>();
×
1394
                if (lock != null) {
×
1395
                    locks.add(lock);
×
1396
                }
1397
            }
1398
            List<DatasetLockDTO> allLocks = locks.stream()
×
1399
                    .map(l -> new DatasetLockDTO.Converter().convert(l))
×
1400
                    .collect(Collectors.toList());
×
1401
            return ok(allLocks);
×
1402
        } catch (WrappedResponse wr) {
×
1403
            return wr.getResponse();
×
1404
        }
1405
    }
1406

1407
    @DELETE
1408
    @ApiWriteOperation
1409
    @Path("{identifier}/locks")
1410
    public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
1411
        return response(req -> {
×
1412
            try {
1413
                AuthenticatedUser user = findAuthenticatedUserOrDie();
×
1414
                if (!user.isSuperuser()) {
×
1415
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
1416
                }
1417
                Dataset dataset = findDatasetOrDie(id);
×
1418

1419
                if (lockType == null) {
×
1420
                    Set<DatasetLock.Reason> locks = new HashSet<>();
×
1421
                    for (DatasetLock lock : dataset.getLocks()) {
×
1422
                        locks.add(lock.getReason());
×
1423
                    }
×
1424
                    if (!locks.isEmpty()) {
×
1425
                        for (DatasetLock.Reason locktype : locks) {
×
1426
                            execCommand(new RemoveLockCommand(req, dataset, locktype));
×
1427
                            // refresh the dataset:
1428
                            dataset = findDatasetOrDie(id);
×
1429
                        }
×
1430
                        // kick of dataset reindexing, in case the locks removed affected the search card:
1431
                        indexService.indexDataset(dataset, true);
×
1432
                        return ok("locks removed");
×
1433
                    }
1434
                    return ok("dataset not locked");
×
1435
                }
1436
                // request for a specific type lock:
1437
                DatasetLock lock = dataset.getLockFor(lockType);
×
1438
                if (lock != null) {
×
1439
                    execCommand(new RemoveLockCommand(req, dataset, lock.getReason()));
×
1440
                    // refresh the dataset:
1441
                    dataset = findDatasetOrDie(id);
×
1442
                    // ... and kick of dataset reindexing, in case the lock removed affected the search card:
1443
                    indexService.indexDataset(dataset, true);
×
1444
                    return ok("lock type " + lock.getReason() + " removed");
×
1445
                }
1446
                return ok("no lock type " + lockType + " on the dataset");
×
1447
            } catch (WrappedResponse wr) {
×
1448
                return wr.getResponse();
×
1449
            }
1450
        });
1451
    }
1452

1453
    @POST
1454
    @ApiWriteOperation
1455
    @Path("{identifier}/lock/{type}")
1456
    public Response lockDataset(@PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
1457
        return response(req -> {
×
1458
            try {
1459
                AuthenticatedUser user = findAuthenticatedUserOrDie();
×
1460
                if (!user.isSuperuser()) {
×
1461
                    return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
×
1462
                }
1463
                Dataset dataset = findDatasetOrDie(id);
×
1464
                DatasetLock lock = dataset.getLockFor(lockType);
×
1465
                if (lock != null) {
×
1466
                    return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType);
×
1467
                }
1468
                lock = new DatasetLock(lockType, user);
×
1469
                execCommand(new AddLockCommand(req, dataset, lock));
×
1470
                // refresh the dataset:
1471
                dataset = findDatasetOrDie(id);
×
1472
                // ... and kick of dataset reindexing:
1473
                indexService.indexDataset(dataset, true);
×
1474
                return ok("dataset locked with lock type " + lockType);
×
1475
            } catch (WrappedResponse wr) {
×
1476
                return wr.getResponse();
×
1477
            }
1478
        });
1479
    }
1480

1481
    @GET
1482
    @Path("{id}/filelabels")
1483
    @Produces(MediaType.APPLICATION_JSON)
1484
    public Response listLabels(@PathParam("id") String datasetId) throws WrappedResponse {
1485
        Dataset dataset = findDatasetOrDie(datasetId);
×
1486
        return ok(fileLabelsService.prepareFileLabels(dataset, new FileLabelsChangeOptionsDTO()));
×
1487
    }
1488

1489
    @POST
1490
    @ApiWriteOperation
1491
    @Path("{id}/filelabels")
1492
    @Consumes(MediaType.APPLICATION_JSON)
1493
    @Produces(MediaType.APPLICATION_JSON)
1494
    public Response changeLabels(@PathParam("id") String datasetId, FileLabelsChangeOptionsDTO options) throws WrappedResponse {
1495
        Dataset dataset = findDatasetOrDie(datasetId);
×
1496
        List<FileLabelInfo> changedLabels;
1497
        try {
1498
            changedLabels = fileLabelsService.changeLabels(fileLabelsService.prepareFileLabels(dataset, options), options);
×
1499
            List<FileLabelInfo> result = fileLabelsService.updateDataset(dataset, changedLabels, options);
×
1500
            return ok(result.stream().filter(FileLabelInfo::isAffected).collect(Collectors.toList()));
×
1501
        } catch (EJBException ee) {
×
1502
            if (ee.getCause() instanceof IllegalStateException) {
×
1503
                throw new WrappedResponse(badRequest("Error occurred – probably input contained duplicated filenames"));
×
1504
            } else {
1505
                throw ee;
×
1506
            }
1507
        }
1508
    }
1509

1510
    // -------------------- PRIVATE --------------------
1511

1512
    private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
1513
        try (StringReader rdr = new StringReader(jsonBody)) {
×
1514

1515
            Dataset dataset = findDatasetOrDie(id);
×
1516
            JsonObject json = Json.createReader(rdr).readObject();
×
1517
            DatasetVersion dsv = dataset.getEditVersion();
×
1518

1519
            List<DatasetField> fields;
1520

1521
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1522
            if (fieldsJson == null) {
×
1523
                fields = new LinkedList<>(jsonParser().parseField(json, Boolean.FALSE));
×
1524
            } else {
1525
                fields = jsonParser().parseMultipleFields(json);
×
1526
            }
1527

1528
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1529

1530
            List<DatasetField> dsfChildsToRemove = new ArrayList<>();
×
1531

1532
            Map<DatasetFieldType, List<DatasetField>> fieldsToRemoveGroupedByType = fields.stream()
×
1533
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1534

1535
            Map<DatasetFieldType, List<DatasetField>> oldFieldsGroupedByType = dsv.getDatasetFields().stream()
×
1536
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1537

1538
            for (Map.Entry<DatasetFieldType, List<DatasetField>> fieldsToRemoveEntry : fieldsToRemoveGroupedByType.entrySet()) {
×
1539
                for (DatasetField removableField : fieldsToRemoveEntry.getValue()) {
×
1540
                    boolean valueFound = false;
×
1541
                    for (DatasetField oldField : oldFieldsGroupedByType.get(fieldsToRemoveEntry.getKey())) {
×
1542
                        if (oldField.getDatasetFieldType().isControlledVocabulary()) {
×
1543
                            List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<>();
×
1544
                            if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
1545
                                for (ControlledVocabularyValue cvv : removableField.getControlledVocabularyValues()) {
×
1546
                                    for (ControlledVocabularyValue existing : oldField.getControlledVocabularyValues()) {
×
1547
                                        if (existing.getStrValue().equals(cvv.getStrValue())) {
×
1548
                                            controlledVocabularyItemsToRemove.add(existing);
×
1549
                                            valueFound = true;
×
1550
                                        }
1551
                                    }
×
1552
                                    if (!controlledVocabularyItemsToRemove.contains(cvv)) {
×
1553
                                        logger.log(Level.SEVERE, String.format("Delete metadata failed: %s: %s not found.",
×
1554
                                                cvv.getDatasetFieldType().getDisplayName(), cvv.getStrValue()));
×
1555
                                        return error(Response.Status.BAD_REQUEST,
×
1556
                                                String.format("Delete metadata failed: %s: %s not found.",
×
1557
                                                        cvv.getDatasetFieldType().getDisplayName(), cvv.getStrValue()));
×
1558
                                    }
1559
                                }
×
1560
                                for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) {
×
1561
                                    oldField.getControlledVocabularyValues().remove(remove);
×
1562
                                }
×
1563
                            } else {
1564
                                if (oldField.getSingleControlledVocabularyValue().getStrValue().equals(
×
1565
                                        removableField.getSingleControlledVocabularyValue().getStrValue())) {
×
1566
                                    oldField.setSingleControlledVocabularyValue(null);
×
1567
                                    valueFound = true;
×
1568
                                }
1569
                            }
1570
                        } else {
×
1571
                            if (removableField.getDatasetFieldType().isPrimitive()) {
×
1572
                                if (oldField.getFieldValue().getOrElse("")
×
1573
                                        .equals(removableField.getFieldValue().getOrElse(""))) {
×
1574
                                    oldField.setFieldValue(null);
×
1575
                                    valueFound = true;
×
1576
                                }
1577
                            } else {
1578
                                if (DatasetFieldUtil.joinAllValues(removableField)
×
1579
                                        .equals(DatasetFieldUtil.joinAllValues(oldField))) {
×
1580
                                    dsfChildsToRemove.addAll(oldField.getDatasetFieldsChildren());
×
1581
                                    valueFound = true;
×
1582
                                }
1583
                            }
1584
                        }
1585
                    }
×
1586
                    if (!valueFound) {
×
1587
                        String displayValue = !removableField.getDisplayValue().isEmpty()
×
1588
                                ? removableField.getDisplayValue() : removableField.getCompoundDisplayValue();
×
1589
                        logger.log(Level.SEVERE, String.format("Delete metadata failed: %s: %s not found.",
×
1590
                                removableField.getDatasetFieldType().getDisplayName(), displayValue));
×
1591
                        return error(Response.Status.BAD_REQUEST, String.format("Delete metadata failed: %s: %s not found.",
×
1592
                                removableField.getDatasetFieldType().getDisplayName(), displayValue));
×
1593
                    }
1594
                }
×
1595
            }
×
1596

1597
            fields.stream()
×
1598
                    .map(DatasetField::getDatasetFieldsChildren)
×
1599
                    .forEach(datasetFields -> datasetFields.removeAll(dsfChildsToRemove));
×
1600

1601
            boolean updateDraft = dataset.getLatestVersion().isDraft();
×
1602
            DatasetVersion managedVersion = updateDraft
×
1603
                    ? execCommand(new UpdateDatasetVersionCommand(dataset, req)).getEditVersion()
×
1604
                    : execCommand(new CreateDatasetVersionCommand(req, dataset, dsv));
×
1605
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(managedVersion);
×
1606
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
1607
                    ? dto.clearEmailFields() : dto);
×
1608
        } catch (JsonParseException ex) {
×
1609
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1610
            return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage());
×
1611
        } catch (WrappedResponse ex) {
×
1612
            logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex);
×
1613
            return ex.getResponse();
×
1614
        }
1615
    }
1616

1617
    private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData) {
1618
        try (StringReader rdr = new StringReader(jsonBody)) {
×
1619

1620
            Dataset ds = findDatasetOrDie(id);
×
1621
            JsonObject json = Json.createReader(rdr).readObject();
×
1622
            DatasetVersion dsv = ds.getEditVersion();
×
1623

1624
            List<DatasetField> freshFieldsModel;
1625

1626
            JsonArray fieldsJson = json.getJsonArray("fields");
×
1627
            freshFieldsModel = fieldsJson == null
×
1628
                    ? new LinkedList<>(jsonParser().parseField(json, Boolean.FALSE))
×
1629
                    : jsonParser().parseMultipleFields(json);
×
1630

1631
            String valdationErrors = validateDatasetFieldValues(freshFieldsModel);
×
1632

1633
            if (!valdationErrors.isEmpty()) {
×
1634
                logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors);
×
1635
                return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors);
×
1636
            }
1637

1638
            dsv.setVersionState(DatasetVersion.VersionState.DRAFT);
×
1639

1640
            // loop through the update fields and compare to the version fields
1641
            // if exist add/replace values if not add entire dsf
1642
            Map<DatasetFieldType, List<DatasetField>> updatedFieldsGroupedByType = freshFieldsModel.stream()
×
1643
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1644

1645
            Map<DatasetFieldType, List<DatasetField>> oldFieldsGroupedByType = dsv.getDatasetFields().stream()
×
1646
                    .collect(Collectors.groupingBy(DatasetField::getDatasetFieldType));
×
1647

1648
            ArrayList<DatasetField> fieldsToAdd = new ArrayList<>();
×
1649

1650
            for (Map.Entry<DatasetFieldType, List<DatasetField>> updatedFields : updatedFieldsGroupedByType.entrySet()) {
×
1651
                for (DatasetField updateField : updatedFields.getValue()) {
×
1652
                    for (DatasetField oldField : oldFieldsGroupedByType.get(updatedFields.getKey())) {
×
1653
                        if (oldField.isEmpty() || oldField.getDatasetFieldType().isAllowMultiples() || replaceData) {
×
1654
                            if (replaceData) {
×
1655
                                if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
1656
                                    oldField.getControlledVocabularyValues().clear();
×
1657
                                } else {
1658
                                    oldField.setFieldValue("");
×
1659
                                    oldField.setSingleControlledVocabularyValue(null);
×
1660
                                }
1661
                            }
1662
                            if (updateField.getDatasetFieldType().isControlledVocabulary()) {
×
1663
                                if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
1664
                                    for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) {
×
1665
                                        if (!oldField.getDisplayValue().contains(cvv.getStrValue())) {
×
1666
                                            oldField.getControlledVocabularyValues().add(cvv);
×
1667
                                        }
1668
                                    }
×
1669
                                } else {
1670
                                    oldField.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue());
×
1671
                                }
1672
                            } else {
1673
                                if (updateField.getDatasetFieldType().isPrimitive()) {
×
1674
                                    if (oldField.getDatasetFieldType().isAllowMultiples()) {
×
1675
                                        if (!oldField.getFieldValue().getOrElse("")
×
1676
                                                .equals(updateField.getFieldValue().getOrElse(""))) {
×
1677
                                            updateField.setDatasetVersion(dsv);
×
1678
                                            fieldsToAdd.add(updateField);
×
1679
                                        }
1680
                                    } else {
1681
                                        oldField.setFieldValue(updateField.getValue());
×
1682
                                    }
1683
                                } else {
1684
                                    if (!DatasetFieldUtil.joinAllValues(updateField)
×
1685
                                            .equals(DatasetFieldUtil.joinAllValues(oldField))) {
×
1686
                                        updateField.setDatasetVersion(dsv);
×
1687
                                        fieldsToAdd.add(updateField);
×
1688
                                    }
1689
                                }
1690
                            }
1691
                        } else {
1692
                            return error(Response.Status.BAD_REQUEST, String.format("You may not add data to a field that " +
×
1693
                                    "already has data and does not allow multiples. Use replace=true to replace existing data (%s)",
1694
                                    oldField.getDatasetFieldType().getDisplayName()));
×
1695
                        }
1696
                        break;
1697
                    }
1698

1699
                    updatedFieldsGroupedByType.entrySet().stream()
×
1700
                            .filter(fieldTypeListEntry -> !oldFieldsGroupedByType.containsKey(fieldTypeListEntry.getKey()))
×
1701
                            .map(Map.Entry::getValue)
×
1702
                            .forEach(fieldNotFound -> fieldNotFound.forEach(
×
1703
                                    datasetField -> {
1704
                                        datasetField.setDatasetVersion(dsv);
×
1705
                                        dsv.getDatasetFields().add(datasetField);
×
1706
                                    }));
×
1707
                    dsv.getDatasetFields().addAll(fieldsToAdd);
×
1708
                }
×
1709
            }
×
1710
            boolean updateDraft = ds.getLatestVersion().isDraft();
×
1711
            DatasetVersion managedVersion = updateDraft
×
1712
                    ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion()
×
1713
                    : execCommand(new CreateDatasetVersionCommand(req, ds, dsv));
×
1714
            DatasetVersionDTO dto = new DatasetVersionDTO.Converter().convert(managedVersion);
×
1715
            return ok(settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport)
×
1716
                    ? dto.clearEmailFields() : dto);
×
1717
        } catch (JsonParseException ex) {
×
1718
            logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
×
1719
            return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage());
×
1720
        } catch (WrappedResponse ex) {
×
1721
            logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex);
×
1722
            return ex.getResponse();
×
1723
        }
1724
    }
1725

1726
    private String validateDatasetFieldValues(List<DatasetField> fields) {
1727
        StringBuilder error = new StringBuilder();
×
1728
        for (DatasetField dsf : fields) {
×
1729
            if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty()
×
1730
                    && dsf.getDatasetFieldsChildren().isEmpty() && dsf.getFieldValue().isEmpty()) {
×
1731
                error.append("Empty multiple value for field: ")
×
1732
                        .append(dsf.getDatasetFieldType().getDisplayName())
×
1733
                        .append(" ");
×
1734
            } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getDatasetFieldsChildren().isEmpty()) {
×
1735
                error.append("Empty value for field: ")
×
1736
                        .append(dsf.getDatasetFieldType().getDisplayName())
×
1737
                        .append(" ");
×
1738
            }
1739
        }
×
1740
        return !error.toString().isEmpty() ? error.toString() : "";
×
1741
    }
1742

1743
    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds) throws WrappedResponse {
1744
        DatasetVersion dsv = execCommand(chooseCommandForVersionFinding(versionNumber, ds, req));
×
1745
        if (dsv == null || dsv.getId() == null) {
×
1746
            throw new WrappedResponse(notFound(String.format("Dataset version %s of dataset %d not found", versionNumber, ds.getId())));
×
1747
        }
1748
        return dsv;
×
1749
    }
1750

1751
    private Command<DatasetVersion> chooseCommandForVersionFinding(String versionId, Dataset ds, DataverseRequest req)
1752
            throws WrappedResponse {
1753
        switch (versionId) {
×
1754
            case ":latest":
1755
                return new GetLatestAccessibleDatasetVersionCommand(req, ds);
×
1756
            case ":draft":
1757
                return new GetDraftVersionIfExists(req, ds);
×
1758
            case ":latest-published":
1759
                return new GetLatestPublishedDatasetVersionCommand(req, ds);
×
1760
            default:
1761
                try {
1762
                    String[] versions = versionId.split("\\.");
×
1763
                    if (versions.length == 1) {
×
1764
                        return new GetSpecificPublishedDatasetVersionCommand(req, ds, Long.parseLong(versions[0]), 0L);
×
1765
                    } else if (versions.length == 2) {
×
1766
                        return new GetSpecificPublishedDatasetVersionCommand(req, ds, Long.parseLong(versions[0]), Long.parseLong(versions[1]));
×
1767
                    }
1768
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
1769
                } catch (NumberFormatException nfe) {
×
1770
                    throw new WrappedResponse(error(Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'"));
×
1771
                }
1772
        }
1773
    }
1774

1775
    private boolean isOriginalFormatRequested(MultivaluedMap<String, String> queryParameters) {
1776
        return queryParameters
×
1777
                .keySet().stream()
×
1778
                .filter("format"::equals)
×
1779
                .map(queryParameters::getFirst)
×
1780
                .anyMatch("original"::equals);
×
1781
    }
1782

1783
    private RoleAssignee findAssignee(String identifier) {
1784
        try {
1785
            return roleAssigneeSvc.getRoleAssignee(identifier);
×
1786
        } catch (EJBException ex) {
×
1787
            Throwable cause = ex;
×
1788
            while (cause.getCause() != null) {
×
1789
                cause = cause.getCause();
×
1790
            }
1791
            logger.log(Level.INFO, "Exception caught looking up RoleAssignee based on identifier ''{0}'': {1}",
×
1792
                    new Object[] {identifier, cause.getMessage()});
×
1793
            return null;
×
1794
        }
1795
    }
1796
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc