• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #5344

03 Sep 2024 10:52AM UTC coverage: 78.293% (-0.03%) from 78.324%
#5344

push

web-flow
DCJ-633: Remove unused dataset search API and supporting code (#2394)

9879 of 12618 relevant lines covered (78.29%)

0.78 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

54.4
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.common.collect.Lists;
6
import com.google.gson.Gson;
7
import com.google.inject.Inject;
8
import jakarta.ws.rs.BadRequestException;
9
import jakarta.ws.rs.NotAuthorizedException;
10
import jakarta.ws.rs.NotFoundException;
11
import jakarta.ws.rs.core.StreamingOutput;
12
import java.io.IOException;
13
import java.sql.Timestamp;
14
import java.time.Instant;
15
import java.util.ArrayList;
16
import java.util.Collections;
17
import java.util.Date;
18
import java.util.HashSet;
19
import java.util.List;
20
import java.util.Objects;
21
import java.util.Optional;
22
import java.util.Set;
23
import java.util.UUID;
24
import java.util.stream.Collectors;
25
import org.apache.commons.collections4.CollectionUtils;
26
import org.broadinstitute.consent.http.db.DaaDAO;
27
import org.broadinstitute.consent.http.db.DacDAO;
28
import org.broadinstitute.consent.http.db.DatasetDAO;
29
import org.broadinstitute.consent.http.db.StudyDAO;
30
import org.broadinstitute.consent.http.db.UserDAO;
31
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
32
import org.broadinstitute.consent.http.enumeration.PropertyType;
33
import org.broadinstitute.consent.http.enumeration.UserRoles;
34
import org.broadinstitute.consent.http.models.ApprovedDataset;
35
import org.broadinstitute.consent.http.models.Dac;
36
import org.broadinstitute.consent.http.models.DataUse;
37
import org.broadinstitute.consent.http.models.Dataset;
38
import org.broadinstitute.consent.http.models.DatasetProperty;
39
import org.broadinstitute.consent.http.models.DatasetStudySummary;
40
import org.broadinstitute.consent.http.models.DatasetSummary;
41
import org.broadinstitute.consent.http.models.Dictionary;
42
import org.broadinstitute.consent.http.models.Study;
43
import org.broadinstitute.consent.http.models.StudyConversion;
44
import org.broadinstitute.consent.http.models.StudyProperty;
45
import org.broadinstitute.consent.http.models.User;
46
import org.broadinstitute.consent.http.models.dto.DatasetDTO;
47
import org.broadinstitute.consent.http.models.dto.DatasetPropertyDTO;
48
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
49
import org.broadinstitute.consent.http.util.ConsentLogger;
50
import org.broadinstitute.consent.http.util.gson.GsonUtil;
51
import org.slf4j.Logger;
52
import org.slf4j.LoggerFactory;
53

54

55
public class DatasetService implements ConsentLogger {
56

57
  private final Logger logger = LoggerFactory.getLogger(this.getClass());
1✔
58
  public static final String DATASET_NAME_KEY = "Dataset Name";
59
  private final DatasetDAO datasetDAO;
60
  private final DaaDAO daaDAO;
61
  private final DacDAO dacDAO;
62
  private final EmailService emailService;
63
  private final OntologyService ontologyService;
64
  private final StudyDAO studyDAO;
65
  private final DatasetServiceDAO datasetServiceDAO;
66
  private final UserDAO userDAO;
67
  public Integer datasetBatchSize = 50;
1✔
68

69
  @Inject
70
  public DatasetService(DatasetDAO dataSetDAO, DaaDAO daaDAO, DacDAO dacDAO, EmailService emailService,
71
      OntologyService ontologyService, StudyDAO studyDAO,
72
      DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
73
    this.datasetDAO = dataSetDAO;
1✔
74
    this.daaDAO = daaDAO;
1✔
75
    this.dacDAO = dacDAO;
1✔
76
    this.emailService = emailService;
1✔
77
    this.ontologyService = ontologyService;
1✔
78
    this.studyDAO = studyDAO;
1✔
79
    this.datasetServiceDAO = datasetServiceDAO;
1✔
80
    this.userDAO = userDAO;
1✔
81
  }
1✔
82

83
  public Set<DatasetDTO> findDatasetsByDacIds(List<Integer> dacIds) {
84
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
85
      throw new BadRequestException("No dataset IDs provided");
1✔
86
    }
87
    return datasetDAO.findDatasetsByDacIds(dacIds);
1✔
88
  }
89

90
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
91
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
92
      throw new BadRequestException("No dataset IDs provided");
1✔
93
    }
94
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
95
  }
96

97
  /**
98
   * TODO: Refactor this to throw a NotFoundException instead of returning null
99
   * Finds a Dataset by a formatted dataset identifier.
100
   *
101
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
102
   * @return the Dataset with the given identifier, if found.
103
   * @throws IllegalArgumentException if datasetIdentifier is invalid
104
   */
105
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
106
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
107
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
108
    if (d == null) {
1✔
109
      return null;
1✔
110
    }
111

112
    // technically, it is possible to have two dataset identifiers which
113
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
114
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
115
      return null;
1✔
116
    }
117
    return d;
1✔
118
  }
119

120
  public Dataset getDatasetByName(String name) {
121
    String lowercaseName = name.toLowerCase();
1✔
122
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
123
  }
124

125
  public Set<String> findAllStudyNames() {
126
    return datasetDAO.findAllStudyNames();
1✔
127
  }
128

129
  public List<String> findAllDatasetNames() {
130
    return datasetDAO.findAllDatasetNames();
×
131
  }
132

133
  public Study findStudyById(Integer id) {
134
    return studyDAO.findStudyById(id);
×
135
  }
136

137
  public Dataset findDatasetById(Integer id) {
138
    return datasetDAO.findDatasetById(id);
1✔
139
  }
140

141
  public Optional<Dataset> updateDataset(DatasetDTO dataset, Integer datasetId, Integer userId) {
142
    Timestamp now = new Timestamp(new Date().getTime());
1✔
143

144
    if (dataset.getDatasetName() == null) {
1✔
145
      throw new IllegalArgumentException("Dataset 'Name' cannot be null");
×
146
    }
147

148
    Dataset old = findDatasetById(datasetId);
1✔
149
    Set<DatasetProperty> oldProperties = old.getProperties();
1✔
150

151
    List<DatasetPropertyDTO> updateDatasetPropertyDTOs = dataset.getProperties();
1✔
152
    List<DatasetProperty> updateDatasetProperties = processDatasetProperties(datasetId,
1✔
153
        updateDatasetPropertyDTOs);
154

155
    List<DatasetProperty> propertiesToAdd = updateDatasetProperties.stream()
1✔
156
        .filter(p -> oldProperties.stream()
1✔
157
            .noneMatch(op -> op.getPropertyName().equals(p.getPropertyName())))
1✔
158
        .toList();
1✔
159

160
    List<DatasetProperty> propertiesToUpdate = updateDatasetProperties.stream()
1✔
161
        .filter(p -> oldProperties.stream()
1✔
162
            .noneMatch(p::equals))
1✔
163
        .toList();
1✔
164

165
    if (propertiesToAdd.isEmpty() && propertiesToUpdate.isEmpty() &&
1✔
166
        dataset.getDatasetName().equals(old.getName())) {
1✔
167
      return Optional.empty();
1✔
168
    }
169

170
    updateDatasetProperties(propertiesToUpdate, List.of(), propertiesToAdd);
1✔
171
    datasetDAO.updateDataset(datasetId, dataset.getDatasetName(), now, userId,
1✔
172
        dataset.getDacId());
1✔
173
    Dataset updatedDataset = findDatasetById(datasetId);
1✔
174
    return Optional.of(updatedDataset);
1✔
175
  }
176

177
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
178
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
179
    if (d == null) {
1✔
180
      throw new NotFoundException("Dataset not found: " + datasetId);
×
181
    }
182
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
183
      throw new IllegalArgumentException("Admin use only");
1✔
184
    }
185
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
186
    return datasetDAO.findDatasetById(datasetId);
1✔
187
  }
188

189
  public Dataset syncDatasetDataUseTranslation(Integer datasetId) {
190
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
191
    if (dataset == null) {
1✔
192
      throw new NotFoundException("Dataset not found");
1✔
193
    }
194

195
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
196
        DataUseTranslationType.DATASET);
197
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
198

199
    return datasetDAO.findDatasetById(datasetId);
1✔
200
  }
201

202
  private void updateDatasetProperties(List<DatasetProperty> updateProperties,
203
      List<DatasetProperty> deleteProperties, List<DatasetProperty> addProperties) {
204
    updateProperties.forEach(p -> datasetDAO
1✔
205
        .updateDatasetProperty(p.getDataSetId(), p.getPropertyKey(),
1✔
206
            p.getPropertyValue().toString()));
1✔
207
    deleteProperties.forEach(
1✔
208
        p -> datasetDAO.deleteDatasetPropertyByKey(p.getDataSetId(), p.getPropertyKey()));
×
209
    datasetDAO.insertDatasetProperties(addProperties);
1✔
210
  }
1✔
211

212
  @Deprecated // Use synchronizeDatasetProperties() instead
213
  public List<DatasetProperty> processDatasetProperties(Integer datasetId,
214
      List<DatasetPropertyDTO> properties) {
215
    Date now = new Date();
1✔
216
    List<Dictionary> dictionaries = datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
217
    List<String> keys = dictionaries.stream().map(Dictionary::getKey)
1✔
218
        .collect(Collectors.toList());
1✔
219

220
    return properties.stream()
1✔
221
        .filter(p -> keys.contains(p.getPropertyName()) && !p.getPropertyName()
1✔
222
            .equals(DATASET_NAME_KEY))
1✔
223
        .map(p ->
1✔
224
            new DatasetProperty(datasetId,
1✔
225
                dictionaries.get(keys.indexOf(p.getPropertyName())).getKeyId(),
1✔
226
                p.getPropertyValue(),
1✔
227
                PropertyType.String,
228
                now)
229
        )
230
        .collect(Collectors.toList());
1✔
231
  }
232

233
  public List<DatasetPropertyDTO> findInvalidProperties(List<DatasetPropertyDTO> properties) {
234
    List<Dictionary> dictionaries = datasetDAO.getMappedFieldsOrderByReceiveOrder();
1✔
235
    List<String> keys = dictionaries.stream().map(Dictionary::getKey)
1✔
236
        .collect(Collectors.toList());
1✔
237

238
    return properties.stream()
1✔
239
        .filter(p -> !keys.contains(p.getPropertyName()))
1✔
240
        .collect(Collectors.toList());
1✔
241
  }
242

243
  public List<DatasetPropertyDTO> findDuplicateProperties(List<DatasetPropertyDTO> properties) {
244
    Set<String> uniqueKeys = properties.stream()
1✔
245
        .map(DatasetPropertyDTO::getPropertyName)
1✔
246
        .collect(Collectors.toSet());
1✔
247
    if (uniqueKeys.size() != properties.size()) {
1✔
248
      List<DatasetPropertyDTO> allDuplicateProperties = new ArrayList<>();
1✔
249
      uniqueKeys.forEach(key -> {
1✔
250
        List<DatasetPropertyDTO> propertiesPerKey = properties.stream()
1✔
251
            .filter(property -> property.getPropertyName().equals(key))
1✔
252
            .collect(Collectors.toList());
1✔
253
        if (propertiesPerKey.size() > 1) {
1✔
254
          allDuplicateProperties.addAll(propertiesPerKey);
1✔
255
        }
256
      });
1✔
257
      return allDuplicateProperties;
1✔
258
    }
259
    return Collections.emptyList();
×
260
  }
261

262
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
263
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
264
    if (dataset != null) {
×
265
      datasetServiceDAO.deleteDataset(dataset, userId);
×
266
    }
267
  }
×
268

269
  public void deleteStudy(Study study, User user) throws Exception {
270
    datasetServiceDAO.deleteStudy(study, user);
×
271
  }
×
272

273
  public List<DatasetSummary> searchDatasetSummaries(String query) {
274
    return datasetDAO.findDatasetSummariesByQuery(query);
×
275
  }
276

277
  public List<DatasetStudySummary> findAllDatasetStudySummaries() {
278
    return datasetDAO.findAllDatasetStudySummaries();
×
279
  }
280

281
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
282
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
283
    Integer datasetId = dataset.getDataSetId();
1✔
284
    Dataset datasetReturn = dataset;
1✔
285
    //Only update and fetch the dataset if it hasn't already been approved
286
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
287
    if (currentApprovalState == null || !currentApprovalState) {
1✔
288
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
289
      datasetReturn = datasetDAO.findDatasetById(datasetId);
1✔
290
    } else {
291
      if (approval == null || !approval) {
1✔
292
        throw new IllegalArgumentException("Dataset is already approved");
1✔
293
      }
294
    }
295

296
    try {
297
      // if approval state changed
298
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
299
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
300
      }
301
    } catch (Exception e) {
×
302
      logger.error("Unable to notifier Data Submitter of dataset approval status: "
×
303
          + dataset.getDatasetIdentifier());
×
304
    }
1✔
305
    return datasetReturn;
1✔
306
  }
307

308
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, Boolean approval)
309
      throws Exception {
310
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
311
    if (approval) {
1✔
312
      emailService.sendDatasetApprovedMessage(
1✔
313
          user,
314
          dac.getName(),
1✔
315
          dataset.getDatasetIdentifier());
1✔
316
    } else {
317
      if (dac.getEmail() != null) {
1✔
318
        String dacEmail = dac.getEmail();
1✔
319
        emailService.sendDatasetDeniedMessage(
1✔
320
            user,
321
            dac.getName(),
1✔
322
            dataset.getDatasetIdentifier(),
1✔
323
            dacEmail);
324
      }
1✔
325
      else {
326
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
327
      }
328
    }
329

330
  }
1✔
331

332
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
333
    return datasetDAO.findDatasetsByIdList(datasetIds);
1✔
334
  }
335

336
  @Deprecated
337
  public List<Dataset> findAllDatasets() {
338
    return datasetDAO.findAllDatasets();
×
339
  }
340

341
  public List<Integer> findAllDatasetIds() {
342
    return datasetDAO.findAllDatasetIds();
×
343
  }
344

345
  public StreamingOutput findAllDatasetsAsStreamingOutput() {
346
    List<Integer> datasetIds = datasetDAO.findAllDatasetIds();
1✔
347
    final List<List<Integer>> datasetIdSubLists = Lists.partition(datasetIds, datasetBatchSize);
1✔
348
    final List<Integer> lastSubList = datasetIdSubLists.get(datasetIdSubLists.size() - 1);
1✔
349
    final Integer lastIndex = lastSubList.get(lastSubList.size() - 1);
1✔
350
    Gson gson = GsonUtil.buildGson();
1✔
351
    return output -> {
1✔
352
      output.write("[".getBytes());
1✔
353
      datasetIdSubLists.forEach(subList -> {
1✔
354
        List<Dataset> datasets = findDatasetsByIds(subList);
1✔
355
        datasets.forEach(d -> {
1✔
356
          try {
357
            output.write(gson.toJson(d).getBytes());
1✔
358
            if (!Objects.equals(d.getDataSetId(), lastIndex)) {
1✔
359
              output.write(",".getBytes());
1✔
360
            }
361
            output.write("\n".getBytes());
1✔
362
          } catch (IOException e) {
×
363
            logException("Error writing dataset to streaming output, dataset id: " + d.getDataSetId(), e);
×
364
          }
1✔
365
        });
1✔
366
      });
1✔
367
      output.write("]".getBytes());
1✔
368
    };
1✔
369
  }
370

371
  public Study getStudyWithDatasetsById(Integer studyId) {
372
    try {
373
      Study study = studyDAO.findStudyById(studyId);
×
374
      if (study == null) {
×
375
        throw new NotFoundException("Study not found");
×
376
      }
377
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
×
378
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
379
        study.addDatasets(datasets);
×
380
      }
381
      return study;
×
382
    } catch (Exception e) {
×
383
      logger.error(e.getMessage());
×
384
      throw e;
×
385
    }
386

387
  }
388

389
  public List<ApprovedDataset> getApprovedDatasets(User user) {
390
    try {
391
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
392
      return approvedDatasets;
1✔
393
    } catch (Exception e) {
×
394
      logger.error(e.getMessage());
×
395
      throw e;
×
396
    }
397
  }
398

399
  /**
400
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
401
   * the dataset, study, and associated properties with new values. This is an admin function only.
402
   *
403
   * @param dataset         The dataset
404
   * @param studyConversion Study Conversion object
405
   * @return Updated/created study
406
   */
407
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
408
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
409
      throw new NotAuthorizedException("Admin use only");
×
410
    }
411
    // Study updates:
412
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
413

414
    // Dataset updates
415
    if (studyConversion.getDacId() != null) {
×
416
      datasetDAO.updateDatasetDacId(dataset.getDataSetId(), studyConversion.getDacId());
×
417
    }
418
    if (studyConversion.getDataUse() != null) {
×
419
      datasetDAO.updateDatasetDataUse(dataset.getDataSetId(),
×
420
          studyConversion.getDataUse().toString());
×
421
    }
422
    if (studyConversion.getDataUse() != null) {
×
423
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
424
          DataUseTranslationType.DATASET);
425
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDataSetId(), translation);
×
426
    }
427
    if (studyConversion.getDatasetName() != null) {
×
428
      datasetDAO.updateDatasetName(dataset.getDataSetId(), studyConversion.getDatasetName());
×
429
    }
430

431
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
432
    // Handle "Phenotype/Indication"
433
    if (studyConversion.getPhenotype() != null) {
×
434
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
435
          studyConversion.getPhenotype());
×
436
    }
437

438
    // Handle "Species"
439
    if (studyConversion.getSpecies() != null) {
×
440
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
441
          studyConversion.getSpecies());
×
442
    }
443

444
    if (studyConversion.getNumberOfParticipants() != null) {
×
445
      // Handle "# of participants"
446
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants", PropertyType.Number,
×
447
          studyConversion.getNumberOfParticipants().toString());
×
448
    }
449

450
    // Handle "Data Location"
451
    if (studyConversion.getDataLocation() != null) {
×
452
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
453
          studyConversion.getDataLocation());
×
454
    }
455

456
    if (studyConversion.getUrl() != null) {
×
457
      // Handle "URL"
458
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
459
          studyConversion.getUrl());
×
460
    }
461

462
    // Handle "Data Submitter User ID"
463
    if (studyConversion.getDataSubmitterEmail() != null) {
×
464
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
465
      if (submitter != null) {
×
466
        datasetDAO.updateDatasetCreateUserId(dataset.getDataSetId(), user.getUserId());
×
467
      }
468
    }
469

470
    return studyDAO.findStudyById(studyId);
×
471
  }
472

473
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
474
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s", user.getEmail(), studyId, custodians));
1✔
475
    Study study = studyDAO.findStudyById(studyId);
1✔
476
    if (study == null) {
1✔
477
      throw new NotFoundException("Study not found");
×
478
    }
479
    Optional<StudyProperty> optionalProp = study.getProperties() == null ?
1✔
480
        Optional.empty() :
1✔
481
        study
482
        .getProperties()
1✔
483
        .stream()
1✔
484
        .filter(p -> p.getKey().equals(dataCustodianEmail))
1✔
485
        .findFirst();
1✔
486
    if (optionalProp.isPresent()) {
1✔
487
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
488
    } else {
489
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
490
    }
491
    return studyDAO.findStudyById(studyId);
1✔
492
  }
493

494
  /**
495
   * Ensure that all requested datasetIds exist in the user's list of accepted DAAs
496
   * @param user The requesting User
497
   * @param datasetIds The list of dataset ids the user is requesting access to
498
   */
499
  public void enforceDAARestrictions(User user, List<Integer> datasetIds) {
500
    List<Integer> userDaaDatasetIds = daaDAO.findDaaDatasetIdsByUserId(user.getUserId());
1✔
501
    boolean containsAll = new HashSet<>(userDaaDatasetIds).containsAll(datasetIds);
1✔
502
    if (!containsAll) {
1✔
503
      throw new BadRequestException("User does not have appropriate Data Access Agreements for provided datasets");
1✔
504
    }
505
  }
1✔
506

507
  /**
508
   * This method is used to synchronize a new dataset property with values from the study
509
   * conversion
510
   *
511
   * @param dictionaries   List<Dictionary>
512
   * @param dataset        Dataset
513
   * @param dictionaryName Name to look for in dictionaries
514
   * @param schemaProperty Schema Property to look for in properties
515
   * @param propertyType   Property Type of new value
516
   * @param propValue      New property value
517
   */
518
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
519
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
520
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
521
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
522
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
523
        .findFirst();
×
524
    if (maybeProp.isPresent()) {
×
525
      datasetDAO.updateDatasetProperty(dataset.getDataSetId(), maybeProp.get().getPropertyKey(),
×
526
          propValue);
527
    } else {
528
      dictionaries.stream()
×
529
          .filter(d -> d.getKey().equals(dictionaryName))
×
530
          .findFirst()
×
531
          .ifPresent(dictionary -> {
×
532
            DatasetProperty prop = new DatasetProperty();
×
533
            prop.setDataSetId(dataset.getDataSetId());
×
534
            prop.setPropertyKey(dictionary.getKeyId());
×
535
            prop.setSchemaProperty(schemaProperty);
×
536
            prop.setPropertyValue(propValue);
×
537
            prop.setPropertyType(propertyType);
×
538
            prop.setCreateDate(new Date());
×
539
            datasetDAO.insertDatasetProperties(List.of(prop));
×
540
          });
×
541
    }
542
  }
×
543

544
  /**
545
   * This method is used to synchronize a legacy dataset property with values from the study
546
   * conversion
547
   *
548
   * @param dictionaries   List<Dictionary>
549
   * @param dataset        Dataset
550
   * @param dictionaryName Name to look for in dictionaries
551
   * @param schemaProperty Schema Property to update if necessary
552
   * @param propertyType   Property Type of new value
553
   * @param propValue      New property value
554
   */
555
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
556
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
557
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
558
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
559
        .findFirst();
×
560
    Optional<Dictionary> dictionary = dictionaries.stream()
×
561
        .filter(d -> d.getKey().equals(dictionaryName))
×
562
        .findFirst();
×
563
    // Legacy property exists, update it.
564
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
565
      datasetDAO.updateDatasetProperty(dataset.getDataSetId(), dictionary.get().getKeyId(),
×
566
          propValue);
567
    }
568
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
569
    else if (dictionary.isPresent()) {
×
570
      DatasetProperty prop = new DatasetProperty();
×
571
      prop.setDataSetId(dataset.getDataSetId());
×
572
      prop.setPropertyKey(dictionary.get().getKeyId());
×
573
      prop.setSchemaProperty(schemaProperty);
×
574
      prop.setPropertyValue(propValue);
×
575
      prop.setPropertyType(propertyType);
×
576
      prop.setCreateDate(new Date());
×
577
      datasetDAO.insertDatasetProperties(List.of(prop));
×
578
    }
×
579
    // Neither legacy property nor dictionary term does not exist, log a warning.
580
    else {
581
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
582
    }
583
  }
×
584

585
  private Integer updateStudyFromConversion(User user, Dataset dataset,
586
      StudyConversion studyConversion) {
587
    // Ensure that we are not trying to create a new study with an existing name
588
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
589
    Integer studyId;
590
    Integer userId =
591
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
592
    // Create or update the study:
593
    if (study == null) {
×
594
      study = studyConversion.createNewStudyStub();
×
595
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
596
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
597
          UUID.randomUUID());
×
598
      study.setStudyId(studyId);
×
599
    } else {
600
      studyId = study.getStudyId();
×
601
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
602
          studyConversion.getDescription(), studyConversion.getPiName(),
×
603
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
604
          Instant.now());
×
605
    }
606
    datasetDAO.updateStudyId(dataset.getDataSetId(), studyId);
×
607

608
    // Create or update study properties:
609
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
610
    // If we don't have any props, we need to add all of the new ones
611
    if (existingProps == null || existingProps.isEmpty()) {
×
612
      studyConversion.getStudyProperties().stream()
×
613
          .filter(Objects::nonNull)
×
614
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
615
              p.getValue().toString()));
×
616
    } else {
617
      // Study props to add:
618
      studyConversion.getStudyProperties().stream()
×
619
          .filter(Objects::nonNull)
×
620
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
621
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
622
              p.getValue().toString()));
×
623
      // Study props to update:
624
      studyConversion.getStudyProperties().stream()
×
625
          .filter(Objects::nonNull)
×
626
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
627
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
628
              p.getValue().toString()));
×
629
    }
630
    return studyId;
×
631
  }
632

633
  public void setDatasetBatchSize(Integer datasetBatchSize) {
634
    this.datasetBatchSize = datasetBatchSize;
1✔
635
  }
1✔
636

637
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc