• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #5238

31 Jul 2024 10:12AM UTC coverage: 77.089% (-0.007%) from 77.096%
#5238

push

web-flow
DCJ-539: New API for returning dataset + study summaries (#2368)

Co-authored-by: Olivia Kotsopoulos <okotsopo@broadinstitute.org>

13 of 16 new or added lines in 4 files covered. (81.25%)

1 existing line in 1 file now uncovered.

10030 of 13011 relevant lines covered (77.09%)

0.77 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

52.47
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.inject.Inject;
6
import jakarta.ws.rs.BadRequestException;
7
import jakarta.ws.rs.NotAuthorizedException;
8
import jakarta.ws.rs.NotFoundException;
9
import java.sql.Timestamp;
10
import java.time.Instant;
11
import java.util.ArrayList;
12
import java.util.Collection;
13
import java.util.Collections;
14
import java.util.Date;
15
import java.util.HashSet;
16
import java.util.List;
17
import java.util.Objects;
18
import java.util.Optional;
19
import java.util.Set;
20
import java.util.UUID;
21
import java.util.stream.Collectors;
22
import java.util.stream.Stream;
23
import org.apache.commons.collections4.CollectionUtils;
24
import org.broadinstitute.consent.http.db.DaaDAO;
25
import org.broadinstitute.consent.http.db.DacDAO;
26
import org.broadinstitute.consent.http.db.DatasetDAO;
27
import org.broadinstitute.consent.http.db.StudyDAO;
28
import org.broadinstitute.consent.http.db.UserDAO;
29
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
30
import org.broadinstitute.consent.http.enumeration.PropertyType;
31
import org.broadinstitute.consent.http.enumeration.UserRoles;
32
import org.broadinstitute.consent.http.models.ApprovedDataset;
33
import org.broadinstitute.consent.http.models.Dac;
34
import org.broadinstitute.consent.http.models.DataUse;
35
import org.broadinstitute.consent.http.models.Dataset;
36
import org.broadinstitute.consent.http.models.DatasetProperty;
37
import org.broadinstitute.consent.http.models.DatasetStudySummary;
38
import org.broadinstitute.consent.http.models.DatasetSummary;
39
import org.broadinstitute.consent.http.models.Dictionary;
40
import org.broadinstitute.consent.http.models.Study;
41
import org.broadinstitute.consent.http.models.StudyConversion;
42
import org.broadinstitute.consent.http.models.StudyProperty;
43
import org.broadinstitute.consent.http.models.User;
44
import org.broadinstitute.consent.http.models.dataset_registration_v1.ConsentGroup.AccessManagement;
45
import org.broadinstitute.consent.http.models.dto.DatasetDTO;
46
import org.broadinstitute.consent.http.models.dto.DatasetPropertyDTO;
47
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
48
import org.broadinstitute.consent.http.util.ConsentLogger;
49
import org.slf4j.Logger;
50
import org.slf4j.LoggerFactory;
51

52

53
public class DatasetService implements ConsentLogger {
54

55
  private final Logger logger = LoggerFactory.getLogger(this.getClass());
1✔
56
  public static final String DATASET_NAME_KEY = "Dataset Name";
57
  private final DatasetDAO datasetDAO;
58
  private final DaaDAO daaDAO;
59
  private final DacDAO dacDAO;
60
  private final EmailService emailService;
61
  private final OntologyService ontologyService;
62
  private final StudyDAO studyDAO;
63
  private final DatasetServiceDAO datasetServiceDAO;
64
  private final UserDAO userDAO;
65

66
  @Inject
67
  public DatasetService(DatasetDAO dataSetDAO, DaaDAO daaDAO, DacDAO dacDAO, EmailService emailService,
68
      OntologyService ontologyService, StudyDAO studyDAO,
69
      DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
70
    this.datasetDAO = dataSetDAO;
1✔
71
    this.daaDAO = daaDAO;
1✔
72
    this.dacDAO = dacDAO;
1✔
73
    this.emailService = emailService;
1✔
74
    this.ontologyService = ontologyService;
1✔
75
    this.studyDAO = studyDAO;
1✔
76
    this.datasetServiceDAO = datasetServiceDAO;
1✔
77
    this.userDAO = userDAO;
1✔
78
  }
1✔
79

80
  public Collection<DatasetDTO> describeDataSetsByReceiveOrder(List<Integer> dataSetId) {
81
    return datasetDAO.findDatasetsByReceiveOrder(dataSetId);
1✔
82
  }
83

84
  @Deprecated
85
  public Collection<Dictionary> describeDictionaryByReceiveOrder() {
86
    return datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
87
  }
88

89
  public Set<DatasetDTO> findDatasetsByDacIds(List<Integer> dacIds) {
90
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
91
      throw new BadRequestException("No dataset IDs provided");
1✔
92
    }
93
    return datasetDAO.findDatasetsByDacIds(dacIds);
1✔
94
  }
95

96
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
97
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
98
      throw new BadRequestException("No dataset IDs provided");
1✔
99
    }
100
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
101
  }
102

103
  /**
104
   * TODO: Refactor this to throw a NotFoundException instead of returning null
105
   * Finds a Dataset by a formatted dataset identifier.
106
   *
107
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
108
   * @return the Dataset with the given identifier, if found.
109
   * @throws IllegalArgumentException if datasetIdentifier is invalid
110
   */
111
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
112
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
113
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
114
    if (d == null) {
1✔
115
      return null;
1✔
116
    }
117

118
    // technically, it is possible to have two dataset identifiers which
119
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
120
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
121
      return null;
1✔
122
    }
123
    return d;
1✔
124
  }
125

126
  public DatasetDTO createDatasetFromDatasetDTO(DatasetDTO dataset, String name, Integer userId) {
127
    if (getDatasetByName(name) != null) {
1✔
128
      throw new IllegalArgumentException("Dataset name: " + name + " is already in use");
×
129
    }
130
    Timestamp now = new Timestamp(new Date().getTime());
1✔
131
    Integer createdDatasetId = datasetDAO.inTransaction(h -> {
1✔
132
      try {
133
        Integer id = h.insertDataset(name, now, userId, dataset.getObjectId(),
×
134
            dataset.getDataUse().toString(), dataset.getDacId());
×
135
        List<DatasetProperty> propertyList = processDatasetProperties(id, dataset.getProperties());
×
136
        h.insertDatasetProperties(propertyList);
×
137
        return id;
×
138
      } catch (Exception e) {
×
139
        if (h != null) {
×
140
          h.rollback();
×
141
        }
142
        logger.error("Exception creating dataset with consent: " + e.getMessage());
×
143
        throw e;
×
144
      }
145
    });
146
    dataset.setDataSetId(createdDatasetId);
1✔
147
    return getDatasetDTO(createdDatasetId);
1✔
148
  }
149

150
  public Dataset getDatasetByName(String name) {
151
    String lowercaseName = name.toLowerCase();
1✔
152
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
153
  }
154

155
  public Set<String> findAllStudyNames() {
156
    return datasetDAO.findAllStudyNames();
1✔
157
  }
158

159
  public List<String> findAllDatasetNames() {
160
    return datasetDAO.findAllDatasetNames();
×
161
  }
162

163
  public Study findStudyById(Integer id) {
164
    return studyDAO.findStudyById(id);
×
165
  }
166

167
  public Dataset findDatasetById(Integer id) {
168
    return datasetDAO.findDatasetById(id);
1✔
169
  }
170

171
  public Optional<Dataset> updateDataset(DatasetDTO dataset, Integer datasetId, Integer userId) {
172
    Timestamp now = new Timestamp(new Date().getTime());
1✔
173

174
    if (dataset.getDatasetName() == null) {
1✔
175
      throw new IllegalArgumentException("Dataset 'Name' cannot be null");
×
176
    }
177

178
    Dataset old = findDatasetById(datasetId);
1✔
179
    Set<DatasetProperty> oldProperties = old.getProperties();
1✔
180

181
    List<DatasetPropertyDTO> updateDatasetPropertyDTOs = dataset.getProperties();
1✔
182
    List<DatasetProperty> updateDatasetProperties = processDatasetProperties(datasetId,
1✔
183
        updateDatasetPropertyDTOs);
184

185
    List<DatasetProperty> propertiesToAdd = updateDatasetProperties.stream()
1✔
186
        .filter(p -> oldProperties.stream()
1✔
187
            .noneMatch(op -> op.getPropertyName().equals(p.getPropertyName())))
1✔
188
        .toList();
1✔
189

190
    List<DatasetProperty> propertiesToUpdate = updateDatasetProperties.stream()
1✔
191
        .filter(p -> oldProperties.stream()
1✔
192
            .noneMatch(p::equals))
1✔
193
        .toList();
1✔
194

195
    if (propertiesToAdd.isEmpty() && propertiesToUpdate.isEmpty() &&
1✔
196
        dataset.getDatasetName().equals(old.getName())) {
1✔
197
      return Optional.empty();
1✔
198
    }
199

200
    updateDatasetProperties(propertiesToUpdate, List.of(), propertiesToAdd);
1✔
201
    datasetDAO.updateDataset(datasetId, dataset.getDatasetName(), now, userId,
1✔
202
        dataset.getDacId());
1✔
203
    Dataset updatedDataset = findDatasetById(datasetId);
1✔
204
    return Optional.of(updatedDataset);
1✔
205
  }
206

207
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
208
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
209
    if (d == null) {
1✔
210
      throw new NotFoundException("Dataset not found: " + datasetId);
×
211
    }
212
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
213
      throw new IllegalArgumentException("Admin use only");
1✔
214
    }
215
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
216
    return datasetDAO.findDatasetById(datasetId);
1✔
217
  }
218

219
  public Dataset syncDatasetDataUseTranslation(Integer datasetId) {
220
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
221
    if (dataset == null) {
1✔
222
      throw new NotFoundException("Dataset not found");
1✔
223
    }
224

225
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
226
        DataUseTranslationType.DATASET);
227
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
228

229
    return datasetDAO.findDatasetById(datasetId);
1✔
230
  }
231

232
  private void updateDatasetProperties(List<DatasetProperty> updateProperties,
233
      List<DatasetProperty> deleteProperties, List<DatasetProperty> addProperties) {
234
    updateProperties.forEach(p -> datasetDAO
1✔
235
        .updateDatasetProperty(p.getDataSetId(), p.getPropertyKey(),
1✔
236
            p.getPropertyValue().toString()));
1✔
237
    deleteProperties.forEach(
1✔
238
        p -> datasetDAO.deleteDatasetPropertyByKey(p.getDataSetId(), p.getPropertyKey()));
×
239
    datasetDAO.insertDatasetProperties(addProperties);
1✔
240
  }
1✔
241

242
  public DatasetDTO getDatasetDTO(Integer datasetId) {
243
    Set<DatasetDTO> dataset = datasetDAO.findDatasetDTOWithPropertiesByDatasetId(datasetId);
1✔
244
    DatasetDTO result = new DatasetDTO();
1✔
245
    if (dataset != null && !dataset.isEmpty()) {
1✔
246
      result = dataset.iterator().next();
1✔
247
    }
248
    if (result.getDataSetId() == null) {
1✔
249
      throw new NotFoundException("Unable to find dataset with id: " + datasetId);
1✔
250
    }
251
    return result;
1✔
252
  }
253

254

255
  @Deprecated // Use synchronizeDatasetProperties() instead
256
  public List<DatasetProperty> processDatasetProperties(Integer datasetId,
257
      List<DatasetPropertyDTO> properties) {
258
    Date now = new Date();
1✔
259
    List<Dictionary> dictionaries = datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
260
    List<String> keys = dictionaries.stream().map(Dictionary::getKey)
1✔
261
        .collect(Collectors.toList());
1✔
262

263
    return properties.stream()
1✔
264
        .filter(p -> keys.contains(p.getPropertyName()) && !p.getPropertyName()
1✔
265
            .equals(DATASET_NAME_KEY))
1✔
266
        .map(p ->
1✔
267
            new DatasetProperty(datasetId,
1✔
268
                dictionaries.get(keys.indexOf(p.getPropertyName())).getKeyId(),
1✔
269
                p.getPropertyValue(),
1✔
270
                PropertyType.String,
271
                now)
272
        )
273
        .collect(Collectors.toList());
1✔
274
  }
275

276
  public List<DatasetPropertyDTO> findInvalidProperties(List<DatasetPropertyDTO> properties) {
277
    List<Dictionary> dictionaries = datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
278
    List<String> keys = dictionaries.stream().map(Dictionary::getKey)
1✔
279
        .collect(Collectors.toList());
1✔
280

281
    return properties.stream()
1✔
282
        .filter(p -> !keys.contains(p.getPropertyName()))
1✔
283
        .collect(Collectors.toList());
1✔
284
  }
285

286
  public List<DatasetPropertyDTO> findDuplicateProperties(List<DatasetPropertyDTO> properties) {
287
    Set<String> uniqueKeys = properties.stream()
1✔
288
        .map(DatasetPropertyDTO::getPropertyName)
1✔
289
        .collect(Collectors.toSet());
1✔
290
    if (uniqueKeys.size() != properties.size()) {
1✔
291
      List<DatasetPropertyDTO> allDuplicateProperties = new ArrayList<>();
1✔
292
      uniqueKeys.forEach(key -> {
1✔
293
        List<DatasetPropertyDTO> propertiesPerKey = properties.stream()
1✔
294
            .filter(property -> property.getPropertyName().equals(key))
1✔
295
            .collect(Collectors.toList());
1✔
296
        if (propertiesPerKey.size() > 1) {
1✔
297
          allDuplicateProperties.addAll(propertiesPerKey);
1✔
298
        }
299
      });
1✔
300
      return allDuplicateProperties;
1✔
301
    }
302
    return Collections.emptyList();
×
303
  }
304

305
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
306
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
307
    if (dataset != null) {
×
308
      datasetServiceDAO.deleteDataset(dataset, userId);
×
309
    }
310
  }
×
311

312
  public void deleteStudy(Study study, User user) throws Exception {
313
    datasetServiceDAO.deleteStudy(study, user);
×
314
  }
×
315

316
  public List<Dataset> searchDatasets(String query, AccessManagement accessManagement, User user) {
317
    List<Dataset> datasets = findAllDatasetsByUser(user);
1✔
318
    return datasets.stream().filter(ds -> ds.isDatasetMatch(query, accessManagement)).toList();
1✔
319
  }
320

321
  public List<DatasetSummary> searchDatasetSummaries(String query) {
322
    return datasetDAO.findDatasetSummariesByQuery(query);
×
323
  }
324

325
  public List<DatasetStudySummary> findAllDatasetStudySummaries() {
NEW
326
    return datasetDAO.findAllDatasetStudySummaries();
×
327
  }
328

329
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
330
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
331
    Integer datasetId = dataset.getDataSetId();
1✔
332
    Dataset datasetReturn = dataset;
1✔
333
    //Only update and fetch the dataset if it hasn't already been approved
334
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
335
    if (currentApprovalState == null || !currentApprovalState) {
1✔
336
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
337
      datasetReturn = datasetDAO.findDatasetById(datasetId);
1✔
338
    } else {
339
      if (approval == null || !approval) {
1✔
340
        throw new IllegalArgumentException("Dataset is already approved");
1✔
341
      }
342
    }
343

344
    try {
345
      // if approval state changed
346
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
347
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
348
      }
349
    } catch (Exception e) {
×
350
      logger.error("Unable to notifier Data Submitter of dataset approval status: "
×
351
          + dataset.getDatasetIdentifier());
×
352
    }
1✔
353
    return datasetReturn;
1✔
354
  }
355

356
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, Boolean approval)
357
      throws Exception {
358
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
359
    if (approval) {
1✔
360
      emailService.sendDatasetApprovedMessage(
1✔
361
          user,
362
          dac.getName(),
1✔
363
          dataset.getDatasetIdentifier());
1✔
364
    } else {
365
      if (dac.getEmail() != null) {
1✔
366
        String dacEmail = dac.getEmail();
1✔
367
        emailService.sendDatasetDeniedMessage(
1✔
368
            user,
369
            dac.getName(),
1✔
370
            dataset.getDatasetIdentifier(),
1✔
371
            dacEmail);
372
      }
1✔
373
      else {
374
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
375
      }
376
    }
377

378
  }
1✔
379

380
  public List<Dataset> findAllDatasetsByUser(User user) {
381
    if (user.hasUserRole(UserRoles.ADMIN)) {
1✔
382
      return datasetDAO.findAllDatasets();
1✔
383
    } else {
384
      List<Dataset> datasets = datasetDAO.getDatasets();
1✔
385
      if (user.hasUserRole(UserRoles.CHAIRPERSON)) {
1✔
386
        List<Dataset> chairDatasets = datasetDAO.findDatasetsByAuthUserEmail(user.getEmail());
1✔
387
        return Stream
1✔
388
            .concat(chairDatasets.stream(), datasets.stream())
1✔
389
            .distinct()
1✔
390
            .collect(Collectors.toList());
1✔
391
      }
392
      return datasets;
1✔
393
    }
394
  }
395

396
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
397
    return datasetDAO.findDatasetsByIdList(datasetIds);
×
398
  }
399

400
  public List<Dataset> findAllDatasets() {
401
    return datasetDAO.findAllDatasets();
×
402
  }
403

404
  public List<Dataset> findDatasetsForChairperson(User user) {
405
    List<Dac> dacs = dacDAO.findDacsForEmail(user.getEmail());
×
406

407
    return datasetDAO.findDatasetsForChairperson(dacs.stream().map(Dac::getDacId).toList());
×
408
  }
409

410
  public List<Dataset> findDatasetsByCustodian(User user) {
411
    return datasetDAO.findDatasetsByCustodian(user.getUserId(), user.getEmail());
×
412
  }
413

414
  public List<Dataset> findDatasetsForDataSubmitter(User user) {
415
    return datasetDAO.findDatasetsForDataSubmitter(user.getUserId(), user.getEmail());
×
416
  }
417

418
  public List<Dataset> findPublicDatasets() {
419
    return datasetDAO.findPublicDatasets();
×
420
  }
421

422
  public Study getStudyWithDatasetsById(Integer studyId) {
423
    try {
424
      Study study = studyDAO.findStudyById(studyId);
×
425
      if (study == null) {
×
426
        throw new NotFoundException("Study not found");
×
427
      }
428
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
×
429
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
430
        study.addDatasets(datasets);
×
431
      }
432
      return study;
×
433
    } catch (Exception e) {
×
434
      logger.error(e.getMessage());
×
435
      throw e;
×
436
    }
437

438
  }
439

440
  public List<ApprovedDataset> getApprovedDatasets(User user) {
441
    try {
442
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
443
      return approvedDatasets;
1✔
444
    } catch (Exception e) {
×
445
      logger.error(e.getMessage());
×
446
      throw e;
×
447
    }
448
  }
449

450
  /**
451
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
452
   * the dataset, study, and associated properties with new values. This is an admin function only.
453
   *
454
   * @param dataset         The dataset
455
   * @param studyConversion Study Conversion object
456
   * @return Updated/created study
457
   */
458
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
459
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
460
      throw new NotAuthorizedException("Admin use only");
×
461
    }
462
    // Study updates:
463
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
464

465
    // Dataset updates
466
    if (studyConversion.getDacId() != null) {
×
467
      datasetDAO.updateDatasetDacId(dataset.getDataSetId(), studyConversion.getDacId());
×
468
    }
469
    if (studyConversion.getDataUse() != null) {
×
470
      datasetDAO.updateDatasetDataUse(dataset.getDataSetId(),
×
471
          studyConversion.getDataUse().toString());
×
472
    }
473
    if (studyConversion.getDataUse() != null) {
×
474
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
475
          DataUseTranslationType.DATASET);
476
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDataSetId(), translation);
×
477
    }
478
    if (studyConversion.getDatasetName() != null) {
×
479
      datasetDAO.updateDatasetName(dataset.getDataSetId(), studyConversion.getDatasetName());
×
480
    }
481

482
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
483
    // Handle "Phenotype/Indication"
484
    if (studyConversion.getPhenotype() != null) {
×
485
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
486
          studyConversion.getPhenotype());
×
487
    }
488

489
    // Handle "Species"
490
    if (studyConversion.getSpecies() != null) {
×
491
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
492
          studyConversion.getSpecies());
×
493
    }
494

495
    if (studyConversion.getNumberOfParticipants() != null) {
×
496
      // Handle "# of participants"
497
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants", PropertyType.Number,
×
498
          studyConversion.getNumberOfParticipants().toString());
×
499
    }
500

501
    // Handle "Data Location"
502
    if (studyConversion.getDataLocation() != null) {
×
503
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
504
          studyConversion.getDataLocation());
×
505
    }
506

507
    if (studyConversion.getUrl() != null) {
×
508
      // Handle "URL"
509
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
510
          studyConversion.getUrl());
×
511
    }
512

513
    // Handle "Data Submitter User ID"
514
    if (studyConversion.getDataSubmitterEmail() != null) {
×
515
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
516
      if (submitter != null) {
×
517
        datasetDAO.updateDatasetCreateUserId(dataset.getDataSetId(), user.getUserId());
×
518
      }
519
    }
520

521
    return studyDAO.findStudyById(studyId);
×
522
  }
523

524
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
525
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s", user.getEmail(), studyId, custodians));
1✔
526
    Study study = studyDAO.findStudyById(studyId);
1✔
527
    if (study == null) {
1✔
528
      throw new NotFoundException("Study not found");
×
529
    }
530
    Optional<StudyProperty> optionalProp = study.getProperties() == null ?
1✔
531
        Optional.empty() :
1✔
532
        study
533
        .getProperties()
1✔
534
        .stream()
1✔
535
        .filter(p -> p.getKey().equals(dataCustodianEmail))
1✔
536
        .findFirst();
1✔
537
    if (optionalProp.isPresent()) {
1✔
538
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
539
    } else {
540
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
541
    }
542
    return studyDAO.findStudyById(studyId);
1✔
543
  }
544

545
  /**
546
   * Ensure that all requested datasetIds exist in the user's list of accepted DAAs
547
   * @param user The requesting User
548
   * @param datasetIds The list of dataset ids the user is requesting access to
549
   */
550
  public void enforceDAARestrictions(User user, List<Integer> datasetIds) {
551
    List<Integer> userDaaDatasetIds = daaDAO.findDaaDatasetIdsByUserId(user.getUserId());
1✔
552
    boolean containsAll = new HashSet<>(userDaaDatasetIds).containsAll(datasetIds);
1✔
553
    if (!containsAll) {
1✔
554
      throw new BadRequestException("User does not have appropriate Data Access Agreements for provided datasets");
1✔
555
    }
556
  }
1✔
557

558
  /**
559
   * This method is used to synchronize a new dataset property with values from the study
560
   * conversion
561
   *
562
   * @param dictionaries   List<Dictionary>
563
   * @param dataset        Dataset
564
   * @param dictionaryName Name to look for in dictionaries
565
   * @param schemaProperty Schema Property to look for in properties
566
   * @param propertyType   Property Type of new value
567
   * @param propValue      New property value
568
   */
569
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
570
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
571
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
572
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
573
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
574
        .findFirst();
×
575
    if (maybeProp.isPresent()) {
×
576
      datasetDAO.updateDatasetProperty(dataset.getDataSetId(), maybeProp.get().getPropertyKey(),
×
577
          propValue);
578
    } else {
579
      dictionaries.stream()
×
580
          .filter(d -> d.getKey().equals(dictionaryName))
×
581
          .findFirst()
×
582
          .ifPresent(dictionary -> {
×
583
            DatasetProperty prop = new DatasetProperty();
×
584
            prop.setDataSetId(dataset.getDataSetId());
×
585
            prop.setPropertyKey(dictionary.getKeyId());
×
586
            prop.setSchemaProperty(schemaProperty);
×
587
            prop.setPropertyValue(propValue);
×
588
            prop.setPropertyType(propertyType);
×
589
            prop.setCreateDate(new Date());
×
590
            datasetDAO.insertDatasetProperties(List.of(prop));
×
591
          });
×
592
    }
593
  }
×
594

595
  /**
596
   * This method is used to synchronize a legacy dataset property with values from the study
597
   * conversion
598
   *
599
   * @param dictionaries   List<Dictionary>
600
   * @param dataset        Dataset
601
   * @param dictionaryName Name to look for in dictionaries
602
   * @param schemaProperty Schema Property to update if necessary
603
   * @param propertyType   Property Type of new value
604
   * @param propValue      New property value
605
   */
606
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
607
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
608
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
609
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
610
        .findFirst();
×
611
    Optional<Dictionary> dictionary = dictionaries.stream()
×
612
        .filter(d -> d.getKey().equals(dictionaryName))
×
613
        .findFirst();
×
614
    // Legacy property exists, update it.
615
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
616
      datasetDAO.updateDatasetProperty(dataset.getDataSetId(), dictionary.get().getKeyId(),
×
617
          propValue);
618
    }
619
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
620
    else if (dictionary.isPresent()) {
×
621
      DatasetProperty prop = new DatasetProperty();
×
622
      prop.setDataSetId(dataset.getDataSetId());
×
623
      prop.setPropertyKey(dictionary.get().getKeyId());
×
624
      prop.setSchemaProperty(schemaProperty);
×
625
      prop.setPropertyValue(propValue);
×
626
      prop.setPropertyType(propertyType);
×
627
      prop.setCreateDate(new Date());
×
628
      datasetDAO.insertDatasetProperties(List.of(prop));
×
629
    }
×
630
    // Neither legacy property nor dictionary term does not exist, log a warning.
631
    else {
632
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
633
    }
634
  }
×
635

636
  private Integer updateStudyFromConversion(User user, Dataset dataset,
637
      StudyConversion studyConversion) {
638
    // Ensure that we are not trying to create a new study with an existing name
639
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
640
    Integer studyId;
641
    Integer userId =
642
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
643
    // Create or update the study:
644
    if (study == null) {
×
645
      study = studyConversion.createNewStudyStub();
×
646
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
647
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
648
          UUID.randomUUID());
×
649
      study.setStudyId(studyId);
×
650
    } else {
651
      studyId = study.getStudyId();
×
652
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
653
          studyConversion.getDescription(), studyConversion.getPiName(),
×
654
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
655
          Instant.now());
×
656
    }
657
    datasetDAO.updateStudyId(dataset.getDataSetId(), studyId);
×
658

659
    // Create or update study properties:
660
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
661
    // If we don't have any props, we need to add all of the new ones
662
    if (existingProps == null || existingProps.isEmpty()) {
×
663
      studyConversion.getStudyProperties().stream()
×
664
          .filter(Objects::nonNull)
×
665
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
666
              p.getValue().toString()));
×
667
    } else {
668
      // Study props to add:
669
      studyConversion.getStudyProperties().stream()
×
670
          .filter(Objects::nonNull)
×
671
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
672
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
673
              p.getValue().toString()));
×
674
      // Study props to update:
675
      studyConversion.getStudyProperties().stream()
×
676
          .filter(Objects::nonNull)
×
677
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
678
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
679
              p.getValue().toString()));
×
680
    }
681
    return studyId;
×
682
  }
683

684
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc