• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #5075

03 May 2024 08:00PM UTC coverage: 76.256% (+0.03%) from 76.226%
#5075

push

web-flow
[DCJ-302][risk=no] Remove duplicate data submitter property (#2321)

0 of 3 new or added lines in 1 file covered. (0.0%)

1 existing line in 1 file now uncovered.

9667 of 12677 relevant lines covered (76.26%)

0.76 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

51.74
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.inject.Inject;
6
import jakarta.ws.rs.BadRequestException;
7
import jakarta.ws.rs.NotAuthorizedException;
8
import jakarta.ws.rs.NotFoundException;
9
import java.sql.Timestamp;
10
import java.time.Instant;
11
import java.util.ArrayList;
12
import java.util.Collection;
13
import java.util.Collections;
14
import java.util.Date;
15
import java.util.List;
16
import java.util.Objects;
17
import java.util.Optional;
18
import java.util.Set;
19
import java.util.UUID;
20
import java.util.stream.Collectors;
21
import java.util.stream.Stream;
22
import org.apache.commons.collections4.CollectionUtils;
23
import org.broadinstitute.consent.http.db.DacDAO;
24
import org.broadinstitute.consent.http.db.DatasetDAO;
25
import org.broadinstitute.consent.http.db.StudyDAO;
26
import org.broadinstitute.consent.http.db.UserDAO;
27
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
28
import org.broadinstitute.consent.http.enumeration.PropertyType;
29
import org.broadinstitute.consent.http.enumeration.UserRoles;
30
import org.broadinstitute.consent.http.models.ApprovedDataset;
31
import org.broadinstitute.consent.http.models.Dac;
32
import org.broadinstitute.consent.http.models.DataUse;
33
import org.broadinstitute.consent.http.models.Dataset;
34
import org.broadinstitute.consent.http.models.DatasetProperty;
35
import org.broadinstitute.consent.http.models.DatasetSummary;
36
import org.broadinstitute.consent.http.models.Dictionary;
37
import org.broadinstitute.consent.http.models.Study;
38
import org.broadinstitute.consent.http.models.StudyConversion;
39
import org.broadinstitute.consent.http.models.StudyProperty;
40
import org.broadinstitute.consent.http.models.User;
41
import org.broadinstitute.consent.http.models.dataset_registration_v1.ConsentGroup.AccessManagement;
42
import org.broadinstitute.consent.http.models.dto.DatasetDTO;
43
import org.broadinstitute.consent.http.models.dto.DatasetPropertyDTO;
44
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
45
import org.broadinstitute.consent.http.util.ConsentLogger;
46
import org.slf4j.Logger;
47
import org.slf4j.LoggerFactory;
48

49

50
public class DatasetService implements ConsentLogger {
51

52
  private final Logger logger = LoggerFactory.getLogger(this.getClass());
1✔
53
  public static final String DATASET_NAME_KEY = "Dataset Name";
54
  private final DatasetDAO datasetDAO;
55
  private final DacDAO dacDAO;
56
  private final EmailService emailService;
57
  private final OntologyService ontologyService;
58
  private final StudyDAO studyDAO;
59
  private final DatasetServiceDAO datasetServiceDAO;
60
  private final UserDAO userDAO;
61

62
  @Inject
63
  public DatasetService(DatasetDAO dataSetDAO, DacDAO dacDAO,
64
      EmailService emailService, OntologyService ontologyService, StudyDAO studyDAO,
65
      DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
66
    this.datasetDAO = dataSetDAO;
1✔
67
    this.dacDAO = dacDAO;
1✔
68
    this.emailService = emailService;
1✔
69
    this.ontologyService = ontologyService;
1✔
70
    this.studyDAO = studyDAO;
1✔
71
    this.datasetServiceDAO = datasetServiceDAO;
1✔
72
    this.userDAO = userDAO;
1✔
73
  }
1✔
74

75
  public Collection<DatasetDTO> describeDataSetsByReceiveOrder(List<Integer> dataSetId) {
76
    return datasetDAO.findDatasetsByReceiveOrder(dataSetId);
1✔
77
  }
78

79
  @Deprecated
80
  public Collection<Dictionary> describeDictionaryByReceiveOrder() {
81
    return datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
82
  }
83

84
  public Set<DatasetDTO> findDatasetsByDacIds(List<Integer> dacIds) {
85
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
86
      throw new BadRequestException("No dataset IDs provided");
1✔
87
    }
88
    return datasetDAO.findDatasetsByDacIds(dacIds);
1✔
89
  }
90

91
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
92
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
93
      throw new BadRequestException("No dataset IDs provided");
1✔
94
    }
95
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
96
  }
97

98
  /**
99
   * TODO: Refactor this to throw a NotFoundException instead of returning null
100
   * Finds a Dataset by a formatted dataset identifier.
101
   *
102
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
103
   * @return the Dataset with the given identifier, if found.
104
   * @throws IllegalArgumentException if datasetIdentifier is invalid
105
   */
106
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
107
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
108
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
109
    if (d == null) {
1✔
110
      return null;
1✔
111
    }
112

113
    // technically, it is possible to have two dataset identifiers which
114
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
115
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
116
      return null;
1✔
117
    }
118
    return d;
1✔
119
  }
120

121
  public DatasetDTO createDatasetFromDatasetDTO(DatasetDTO dataset, String name, Integer userId) {
122
    if (getDatasetByName(name) != null) {
1✔
123
      throw new IllegalArgumentException("Dataset name: " + name + " is already in use");
×
124
    }
125
    Timestamp now = new Timestamp(new Date().getTime());
1✔
126
    Integer createdDatasetId = datasetDAO.inTransaction(h -> {
1✔
127
      try {
128
        Integer id = h.insertDataset(name, now, userId, dataset.getObjectId(),
×
129
            dataset.getDataUse().toString(), dataset.getDacId());
×
130
        List<DatasetProperty> propertyList = processDatasetProperties(id, dataset.getProperties());
×
131
        h.insertDatasetProperties(propertyList);
×
132
        return id;
×
133
      } catch (Exception e) {
×
134
        if (h != null) {
×
135
          h.rollback();
×
136
        }
137
        logger.error("Exception creating dataset with consent: " + e.getMessage());
×
138
        throw e;
×
139
      }
140
    });
141
    dataset.setDataSetId(createdDatasetId);
1✔
142
    return getDatasetDTO(createdDatasetId);
1✔
143
  }
144

145
  public Dataset getDatasetByName(String name) {
146
    String lowercaseName = name.toLowerCase();
1✔
147
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
148
  }
149

150
  public Set<String> findAllStudyNames() {
151
    return datasetDAO.findAllStudyNames();
1✔
152
  }
153

154
  public List<String> findAllDatasetNames() {
155
    return datasetDAO.findAllDatasetNames();
×
156
  }
157

158
  public Study findStudyById(Integer id) {
159
    return studyDAO.findStudyById(id);
×
160
  }
161

162
  public Dataset findDatasetById(Integer id) {
163
    return datasetDAO.findDatasetById(id);
1✔
164
  }
165

166
  public Optional<Dataset> updateDataset(DatasetDTO dataset, Integer datasetId, Integer userId) {
167
    Timestamp now = new Timestamp(new Date().getTime());
1✔
168

169
    if (dataset.getDatasetName() == null) {
1✔
170
      throw new IllegalArgumentException("Dataset 'Name' cannot be null");
×
171
    }
172

173
    Dataset old = findDatasetById(datasetId);
1✔
174
    Set<DatasetProperty> oldProperties = old.getProperties();
1✔
175

176
    List<DatasetPropertyDTO> updateDatasetPropertyDTOs = dataset.getProperties();
1✔
177
    List<DatasetProperty> updateDatasetProperties = processDatasetProperties(datasetId,
1✔
178
        updateDatasetPropertyDTOs);
179

180
    List<DatasetProperty> propertiesToAdd = updateDatasetProperties.stream()
1✔
181
        .filter(p -> oldProperties.stream()
1✔
182
            .noneMatch(op -> op.getPropertyName().equals(p.getPropertyName())))
1✔
183
        .toList();
1✔
184

185
    List<DatasetProperty> propertiesToUpdate = updateDatasetProperties.stream()
1✔
186
        .filter(p -> oldProperties.stream()
1✔
187
            .noneMatch(p::equals))
1✔
188
        .toList();
1✔
189

190
    if (propertiesToAdd.isEmpty() && propertiesToUpdate.isEmpty() &&
1✔
191
        dataset.getDatasetName().equals(old.getName())) {
1✔
192
      return Optional.empty();
1✔
193
    }
194

195
    updateDatasetProperties(propertiesToUpdate, List.of(), propertiesToAdd);
1✔
196
    datasetDAO.updateDataset(datasetId, dataset.getDatasetName(), now, userId,
1✔
197
        dataset.getDacId());
1✔
198
    Dataset updatedDataset = findDatasetById(datasetId);
1✔
199
    return Optional.of(updatedDataset);
1✔
200
  }
201

202
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
203
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
204
    if (d == null) {
1✔
205
      throw new NotFoundException("Dataset not found: " + datasetId);
×
206
    }
207
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
208
      throw new IllegalArgumentException("Admin use only");
1✔
209
    }
210
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
211
    return datasetDAO.findDatasetById(datasetId);
1✔
212
  }
213

214
  public Dataset syncDatasetDataUseTranslation(Integer datasetId) {
215
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
216
    if (dataset == null) {
1✔
217
      throw new NotFoundException("Dataset not found");
1✔
218
    }
219

220
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
221
        DataUseTranslationType.DATASET);
222
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
223

224
    return datasetDAO.findDatasetById(datasetId);
1✔
225
  }
226

227
  private void updateDatasetProperties(List<DatasetProperty> updateProperties,
228
      List<DatasetProperty> deleteProperties, List<DatasetProperty> addProperties) {
229
    updateProperties.forEach(p -> datasetDAO
1✔
230
        .updateDatasetProperty(p.getDataSetId(), p.getPropertyKey(),
1✔
231
            p.getPropertyValue().toString()));
1✔
232
    deleteProperties.forEach(
1✔
233
        p -> datasetDAO.deleteDatasetPropertyByKey(p.getDataSetId(), p.getPropertyKey()));
×
234
    datasetDAO.insertDatasetProperties(addProperties);
1✔
235
  }
1✔
236

237
  public DatasetDTO getDatasetDTO(Integer datasetId) {
238
    Set<DatasetDTO> dataset = datasetDAO.findDatasetDTOWithPropertiesByDatasetId(datasetId);
1✔
239
    DatasetDTO result = new DatasetDTO();
1✔
240
    if (dataset != null && !dataset.isEmpty()) {
1✔
241
      result = dataset.iterator().next();
1✔
242
    }
243
    if (result.getDataSetId() == null) {
1✔
244
      throw new NotFoundException("Unable to find dataset with id: " + datasetId);
1✔
245
    }
246
    return result;
1✔
247
  }
248

249

250
  @Deprecated // Use synchronizeDatasetProperties() instead
251
  public List<DatasetProperty> processDatasetProperties(Integer datasetId,
252
      List<DatasetPropertyDTO> properties) {
253
    Date now = new Date();
1✔
254
    List<Dictionary> dictionaries = datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
255
    List<String> keys = dictionaries.stream().map(Dictionary::getKey)
1✔
256
        .collect(Collectors.toList());
1✔
257

258
    return properties.stream()
1✔
259
        .filter(p -> keys.contains(p.getPropertyName()) && !p.getPropertyName()
1✔
260
            .equals(DATASET_NAME_KEY))
1✔
261
        .map(p ->
1✔
262
            new DatasetProperty(datasetId,
1✔
263
                dictionaries.get(keys.indexOf(p.getPropertyName())).getKeyId(),
1✔
264
                p.getPropertyValue(),
1✔
265
                PropertyType.String,
266
                now)
267
        )
268
        .collect(Collectors.toList());
1✔
269
  }
270

271
  public List<DatasetPropertyDTO> findInvalidProperties(List<DatasetPropertyDTO> properties) {
272
    List<Dictionary> dictionaries = datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
273
    List<String> keys = dictionaries.stream().map(Dictionary::getKey)
1✔
274
        .collect(Collectors.toList());
1✔
275

276
    return properties.stream()
1✔
277
        .filter(p -> !keys.contains(p.getPropertyName()))
1✔
278
        .collect(Collectors.toList());
1✔
279
  }
280

281
  public List<DatasetPropertyDTO> findDuplicateProperties(List<DatasetPropertyDTO> properties) {
282
    Set<String> uniqueKeys = properties.stream()
1✔
283
        .map(DatasetPropertyDTO::getPropertyName)
1✔
284
        .collect(Collectors.toSet());
1✔
285
    if (uniqueKeys.size() != properties.size()) {
1✔
286
      List<DatasetPropertyDTO> allDuplicateProperties = new ArrayList<>();
1✔
287
      uniqueKeys.forEach(key -> {
1✔
288
        List<DatasetPropertyDTO> propertiesPerKey = properties.stream()
1✔
289
            .filter(property -> property.getPropertyName().equals(key))
1✔
290
            .collect(Collectors.toList());
1✔
291
        if (propertiesPerKey.size() > 1) {
1✔
292
          allDuplicateProperties.addAll(propertiesPerKey);
1✔
293
        }
294
      });
1✔
295
      return allDuplicateProperties;
1✔
296
    }
297
    return Collections.emptyList();
×
298
  }
299

300
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
301
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
302
    if (dataset != null) {
×
303
      datasetServiceDAO.deleteDataset(dataset, userId);
×
304
    }
305
  }
×
306

307
  public void deleteStudy(Study study, User user) throws Exception {
308
    datasetServiceDAO.deleteStudy(study, user);
×
309
  }
×
310

311
  public List<Dataset> searchDatasets(String query, AccessManagement accessManagement, User user) {
312
    List<Dataset> datasets = findAllDatasetsByUser(user);
1✔
313
    return datasets.stream().filter(ds -> ds.isDatasetMatch(query, accessManagement)).toList();
1✔
314
  }
315

316
  public List<DatasetSummary> searchDatasetSummaries(String query) {
317
    return datasetDAO.findDatasetSummariesByQuery(query);
×
318
  }
319

320
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
321
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
322
    Integer datasetId = dataset.getDataSetId();
1✔
323
    Dataset datasetReturn = dataset;
1✔
324
    //Only update and fetch the dataset if it hasn't already been approved
325
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
326
    if (currentApprovalState == null || !currentApprovalState) {
1✔
327
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
328
      datasetReturn = datasetDAO.findDatasetById(datasetId);
1✔
329
    } else {
330
      if (approval == null || !approval) {
1✔
331
        throw new IllegalArgumentException("Dataset is already approved");
1✔
332
      }
333
    }
334

335
    try {
336
      // if approval state changed
337
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
338
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
339
      }
340
    } catch (Exception e) {
×
341
      logger.error("Unable to notifier Data Submitter of dataset approval status: "
×
342
          + dataset.getDatasetIdentifier());
×
343
    }
1✔
344
    return datasetReturn;
1✔
345
  }
346

347
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, Boolean approval)
348
      throws Exception {
349
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
350
    if (approval) {
1✔
351
      emailService.sendDatasetApprovedMessage(
1✔
352
          user,
353
          dac.getName(),
1✔
354
          dataset.getDatasetIdentifier());
1✔
355
    } else {
356
      if (dac.getEmail() != null) {
1✔
357
        String dacEmail = dac.getEmail();
1✔
358
        emailService.sendDatasetDeniedMessage(
1✔
359
            user,
360
            dac.getName(),
1✔
361
            dataset.getDatasetIdentifier(),
1✔
362
            dacEmail);
363
      }
1✔
364
      else {
365
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
366
      }
367
    }
368

369
  }
1✔
370

371
  public List<Dataset> findAllDatasetsByUser(User user) {
372
    if (user.hasUserRole(UserRoles.ADMIN)) {
1✔
373
      return datasetDAO.findAllDatasets();
1✔
374
    } else {
375
      List<Dataset> datasets = datasetDAO.getDatasets();
1✔
376
      if (user.hasUserRole(UserRoles.CHAIRPERSON)) {
1✔
377
        List<Dataset> chairDatasets = datasetDAO.findDatasetsByAuthUserEmail(user.getEmail());
1✔
378
        return Stream
1✔
379
            .concat(chairDatasets.stream(), datasets.stream())
1✔
380
            .distinct()
1✔
381
            .collect(Collectors.toList());
1✔
382
      }
383
      return datasets;
1✔
384
    }
385
  }
386

387
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
388
    return datasetDAO.findDatasetsByIdList(datasetIds);
×
389
  }
390

391
  public List<Dataset> findAllDatasets() {
392
    return datasetDAO.findAllDatasets();
×
393
  }
394

395
  public List<Dataset> findDatasetsForChairperson(User user) {
396
    List<Dac> dacs = dacDAO.findDacsForEmail(user.getEmail());
×
397

398
    return datasetDAO.findDatasetsForChairperson(dacs.stream().map(Dac::getDacId).toList());
×
399
  }
400

401
  public List<Dataset> findDatasetsByCustodian(User user) {
402
    return datasetDAO.findDatasetsByCustodian(user.getUserId(), user.getEmail());
×
403
  }
404

405
  public List<Dataset> findDatasetsForDataSubmitter(User user) {
406
    return datasetDAO.findDatasetsForDataSubmitter(user.getUserId(), user.getEmail());
×
407
  }
408

409
  public List<Dataset> findPublicDatasets() {
410
    return datasetDAO.findPublicDatasets();
×
411
  }
412

413
  public Study getStudyWithDatasetsById(Integer studyId) {
414
    try {
415
      Study study = studyDAO.findStudyById(studyId);
×
416
      if (study == null) {
×
417
        throw new NotFoundException("Study not found");
×
418
      }
419
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
×
420
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
421
        study.addDatasets(datasets);
×
422
      }
423
      return study;
×
424
    } catch (Exception e) {
×
425
      logger.error(e.getMessage());
×
426
      throw e;
×
427
    }
428

429
  }
430

431
  public List<ApprovedDataset> getApprovedDatasets(User user) {
432
    try {
433
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
434
      return approvedDatasets;
1✔
435
    } catch (Exception e) {
×
436
      logger.error(e.getMessage());
×
437
      throw e;
×
438
    }
439
  }
440

441
  /**
442
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
443
   * the dataset, study, and associated properties with new values. This is an admin function only.
444
   *
445
   * @param dataset         The dataset
446
   * @param studyConversion Study Conversion object
447
   * @return Updated/created study
448
   */
449
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
450
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
451
      throw new NotAuthorizedException("Admin use only");
×
452
    }
453
    // Study updates:
454
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
455

456
    // Dataset updates
457
    if (studyConversion.getDacId() != null) {
×
458
      datasetDAO.updateDatasetDacId(dataset.getDataSetId(), studyConversion.getDacId());
×
459
    }
460
    if (studyConversion.getDataUse() != null) {
×
461
      datasetDAO.updateDatasetDataUse(dataset.getDataSetId(),
×
462
          studyConversion.getDataUse().toString());
×
463
    }
464
    if (studyConversion.getDataUse() != null) {
×
465
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
466
          DataUseTranslationType.DATASET);
467
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDataSetId(), translation);
×
468
    }
469
    if (studyConversion.getDatasetName() != null) {
×
470
      datasetDAO.updateDatasetName(dataset.getDataSetId(), studyConversion.getDatasetName());
×
471
    }
472

473
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
474
    // Handle "Phenotype/Indication"
475
    if (studyConversion.getPhenotype() != null) {
×
476
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
477
          studyConversion.getPhenotype());
×
478
    }
479

480
    // Handle "Species"
481
    if (studyConversion.getSpecies() != null) {
×
482
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
483
          studyConversion.getSpecies());
×
484
    }
485

486
    if (studyConversion.getNumberOfParticipants() != null) {
×
487
      // Handle "# of participants"
488
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants", PropertyType.Number,
×
489
          studyConversion.getNumberOfParticipants().toString());
×
490
    }
491

492
    // Handle "Data Location"
493
    if (studyConversion.getDataLocation() != null) {
×
494
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
495
          studyConversion.getDataLocation());
×
496
    }
497

498
    if (studyConversion.getUrl() != null) {
×
499
      // Handle "URL"
500
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
501
          studyConversion.getUrl());
×
502
    }
503

504
    // Handle "Data Submitter User ID"
505
    if (studyConversion.getDataSubmitterEmail() != null) {
×
506
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
507
      if (submitter != null) {
×
508
        datasetDAO.updateDatasetCreateUserId(dataset.getDataSetId(), user.getUserId());
×
509
      }
510
    }
511

512
    return studyDAO.findStudyById(studyId);
×
513
  }
514

515
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
516
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s", user.getEmail(), studyId, custodians));
1✔
517
    Study study = studyDAO.findStudyById(studyId);
1✔
518
    if (study == null) {
1✔
519
      throw new NotFoundException("Study not found");
×
520
    }
521
    Optional<StudyProperty> optionalProp = study.getProperties() == null ?
1✔
522
        Optional.empty() :
1✔
523
        study
524
        .getProperties()
1✔
525
        .stream()
1✔
526
        .filter(p -> p.getKey().equals(dataCustodianEmail))
1✔
527
        .findFirst();
1✔
528
    if (optionalProp.isPresent()) {
1✔
529
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
530
    } else {
531
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
532
    }
533
    return studyDAO.findStudyById(studyId);
1✔
534
  }
535

536
  /**
537
   * This method is used to synchronize a new dataset property with values from the study
538
   * conversion
539
   *
540
   * @param dictionaries   List<Dictionary>
541
   * @param dataset        Dataset
542
   * @param dictionaryName Name to look for in dictionaries
543
   * @param schemaProperty Schema Property to look for in properties
544
   * @param propertyType   Property Type of new value
545
   * @param propValue      New property value
546
   */
547
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
548
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
549
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
550
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
551
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
552
        .findFirst();
×
553
    if (maybeProp.isPresent()) {
×
554
      datasetDAO.updateDatasetProperty(dataset.getDataSetId(), maybeProp.get().getPropertyKey(),
×
555
          propValue);
556
    } else {
557
      dictionaries.stream()
×
558
          .filter(d -> d.getKey().equals(dictionaryName))
×
559
          .findFirst()
×
560
          .ifPresent(dictionary -> {
×
561
            DatasetProperty prop = new DatasetProperty();
×
562
            prop.setDataSetId(dataset.getDataSetId());
×
563
            prop.setPropertyKey(dictionary.getKeyId());
×
564
            prop.setSchemaProperty(schemaProperty);
×
565
            prop.setPropertyValue(propValue);
×
566
            prop.setPropertyType(propertyType);
×
567
            prop.setCreateDate(new Date());
×
568
            datasetDAO.insertDatasetProperties(List.of(prop));
×
569
          });
×
570
    }
571
  }
×
572

573
  /**
574
   * This method is used to synchronize a legacy dataset property with values from the study
575
   * conversion
576
   *
577
   * @param dictionaries   List<Dictionary>
578
   * @param dataset        Dataset
579
   * @param dictionaryName Name to look for in dictionaries
580
   * @param schemaProperty Schema Property to update if necessary
581
   * @param propertyType   Property Type of new value
582
   * @param propValue      New property value
583
   */
584
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
585
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
586
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
587
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
588
        .findFirst();
×
589
    Optional<Dictionary> dictionary = dictionaries.stream()
×
590
        .filter(d -> d.getKey().equals(dictionaryName))
×
591
        .findFirst();
×
592
    // Legacy property exists, update it.
593
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
594
      datasetDAO.updateDatasetProperty(dataset.getDataSetId(), dictionary.get().getKeyId(),
×
595
          propValue);
596
    }
597
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
598
    else if (dictionary.isPresent()) {
×
599
      DatasetProperty prop = new DatasetProperty();
×
600
      prop.setDataSetId(dataset.getDataSetId());
×
601
      prop.setPropertyKey(dictionary.get().getKeyId());
×
602
      prop.setSchemaProperty(schemaProperty);
×
603
      prop.setPropertyValue(propValue);
×
604
      prop.setPropertyType(propertyType);
×
605
      prop.setCreateDate(new Date());
×
606
      datasetDAO.insertDatasetProperties(List.of(prop));
×
607
    }
×
608
    // Neither legacy property nor dictionary term does not exist, log a warning.
609
    else {
610
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
611
    }
612
  }
×
613

614
  private Integer updateStudyFromConversion(User user, Dataset dataset,
615
      StudyConversion studyConversion) {
616
    // Ensure that we are not trying to create a new study with an existing name
617
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
618
    Integer studyId;
619
    Integer userId =
620
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
621
    // Create or update the study:
622
    if (study == null) {
×
623
      study = studyConversion.createNewStudyStub();
×
624
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
625
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
626
          UUID.randomUUID());
×
627
      study.setStudyId(studyId);
×
628
    } else {
629
      studyId = study.getStudyId();
×
630
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
631
          studyConversion.getDescription(), studyConversion.getPiName(),
×
632
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
633
          Instant.now());
×
634
    }
635
    datasetDAO.updateStudyId(dataset.getDataSetId(), studyId);
×
636

637
    // Create or update study properties:
638
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
639
    // If we don't have any props, we need to add all of the new ones
UNCOV
640
    if (existingProps == null || existingProps.isEmpty()) {
×
NEW
641
      studyConversion.getStudyProperties().stream()
×
642
          .filter(Objects::nonNull)
×
643
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
644
              p.getValue().toString()));
×
645
    } else {
646
      // Study props to add:
NEW
647
      studyConversion.getStudyProperties().stream()
×
648
          .filter(Objects::nonNull)
×
649
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
650
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
651
              p.getValue().toString()));
×
652
      // Study props to update:
NEW
653
      studyConversion.getStudyProperties().stream()
×
654
          .filter(Objects::nonNull)
×
655
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
656
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
657
              p.getValue().toString()));
×
658
    }
659
    return studyId;
×
660
  }
661

662
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc