• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #6185

08 Jul 2025 07:15PM UTC coverage: 80.365% (+0.9%) from 79.433%
#6185

push

web-flow
DT-247: Remove/replace all usages of DatasetDTO with Dataset (#2602)

6 of 13 new or added lines in 3 files covered. (46.15%)

1 existing line in 1 file now uncovered.

10384 of 12921 relevant lines covered (80.37%)

0.8 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

45.17
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.api.client.http.HttpStatusCodes;
6
import com.google.common.collect.Lists;
7
import com.google.gson.Gson;
8
import com.google.inject.Inject;
9
import jakarta.ws.rs.BadRequestException;
10
import jakarta.ws.rs.NotAuthorizedException;
11
import jakarta.ws.rs.NotFoundException;
12
import jakarta.ws.rs.core.StreamingOutput;
13
import java.io.IOException;
14
import java.time.Instant;
15
import java.util.ArrayList;
16
import java.util.Date;
17
import java.util.HashSet;
18
import java.util.List;
19
import java.util.Objects;
20
import java.util.Optional;
21
import java.util.Set;
22
import java.util.UUID;
23
import org.apache.commons.collections4.CollectionUtils;
24
import org.broadinstitute.consent.http.db.DaaDAO;
25
import org.broadinstitute.consent.http.db.DacDAO;
26
import org.broadinstitute.consent.http.db.DatasetDAO;
27
import org.broadinstitute.consent.http.db.StudyDAO;
28
import org.broadinstitute.consent.http.db.UserDAO;
29
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
30
import org.broadinstitute.consent.http.enumeration.PropertyType;
31
import org.broadinstitute.consent.http.enumeration.UserRoles;
32
import org.broadinstitute.consent.http.models.ApprovedDataset;
33
import org.broadinstitute.consent.http.models.Dac;
34
import org.broadinstitute.consent.http.models.DataUse;
35
import org.broadinstitute.consent.http.models.Dataset;
36
import org.broadinstitute.consent.http.models.DatasetProperty;
37
import org.broadinstitute.consent.http.models.DatasetStudySummary;
38
import org.broadinstitute.consent.http.models.DatasetSummary;
39
import org.broadinstitute.consent.http.models.Dictionary;
40
import org.broadinstitute.consent.http.models.Study;
41
import org.broadinstitute.consent.http.models.StudyConversion;
42
import org.broadinstitute.consent.http.models.StudyProperty;
43
import org.broadinstitute.consent.http.models.User;
44
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
45
import org.broadinstitute.consent.http.util.ConsentLogger;
46
import org.broadinstitute.consent.http.util.gson.GsonUtil;
47

48
public class DatasetService implements ConsentLogger {
49

50
  private final DatasetDAO datasetDAO;
51
  private final DaaDAO daaDAO;
52
  private final DacDAO dacDAO;
53
  private final ElasticSearchService elasticSearchService;
54
  private final EmailService emailService;
55
  private final OntologyService ontologyService;
56
  private final StudyDAO studyDAO;
57
  private final DatasetServiceDAO datasetServiceDAO;
58
  private final UserDAO userDAO;
59
  public Integer datasetBatchSize = 50;
1✔
60

61
  @Inject
62
  public DatasetService(DatasetDAO dataSetDAO, DaaDAO daaDAO, DacDAO dacDAO, ElasticSearchService
63
      elasticSearchService, EmailService emailService, OntologyService ontologyService, StudyDAO
64
      studyDAO, DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
65
    this.datasetDAO = dataSetDAO;
1✔
66
    this.daaDAO = daaDAO;
1✔
67
    this.dacDAO = dacDAO;
1✔
68
    this.elasticSearchService = elasticSearchService;
1✔
69
    this.emailService = emailService;
1✔
70
    this.ontologyService = ontologyService;
1✔
71
    this.studyDAO = studyDAO;
1✔
72
    this.datasetServiceDAO = datasetServiceDAO;
1✔
73
    this.userDAO = userDAO;
1✔
74
  }
1✔
75

76
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
77
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
78
      throw new BadRequestException("No dataset IDs provided");
1✔
79
    }
80
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
81
  }
82

83
  /**
84
   * TODO: Refactor this to throw a NotFoundException instead of returning null
85
   * Finds a Dataset by a formatted dataset identifier.
86
   *
87
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
88
   * @return the Dataset with the given identifier, if found.
89
   * @throws IllegalArgumentException if datasetIdentifier is invalid
90
   */
91
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
92
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
93
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
94
    if (d == null) {
1✔
95
      return null;
1✔
96
    }
97

98
    // technically, it is possible to have two dataset identifiers which
99
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
100
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
101
      return null;
1✔
102
    }
103
    return d;
1✔
104
  }
105

106
  public Dataset getDatasetByName(String name) {
107
    String lowercaseName = name.toLowerCase();
1✔
108
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
109
  }
110

111
  public Set<String> findAllStudyNames() {
112
    return datasetDAO.findAllStudyNames();
1✔
113
  }
114

115
  public List<String> findAllDatasetNames() {
116
    return datasetDAO.findAllDatasetNames();
×
117
  }
118

119
  public Study findStudyById(Integer id) {
120
    return studyDAO.findStudyById(id);
×
121
  }
122

123
  public Dataset findDatasetById(Integer id) {
124
    return datasetDAO.findDatasetById(id);
1✔
125
  }
126

127
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
128
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
129
    if (d == null) {
1✔
130
      throw new NotFoundException("Dataset not found: " + datasetId);
×
131
    }
132
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
133
      throw new IllegalArgumentException("Admin use only");
1✔
134
    }
135
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
136
    elasticSearchService.synchronizeDatasetInESIndex(d, user, false);
1✔
137
    return datasetDAO.findDatasetById(datasetId);
1✔
138
  }
139

140
  public Dataset syncDatasetDataUseTranslation(Integer datasetId, User user) {
141
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
142
    if (dataset == null) {
1✔
143
      throw new NotFoundException("Dataset not found");
1✔
144
    }
145

146
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
147
        DataUseTranslationType.DATASET);
148
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
149
    elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false);
1✔
150
    return datasetDAO.findDatasetById(datasetId);
1✔
151
  }
152

153
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
154
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
155
    if (dataset != null) {
×
156
      try (var response = elasticSearchService.deleteIndex(datasetId, userId)) {
×
157
        if (!HttpStatusCodes.isSuccess(response.getStatus())) {
×
158
          logWarn("Response error, unable to delete dataset from index: %s".formatted(datasetId));
×
159
        }
160
      }
161
      datasetServiceDAO.deleteDataset(dataset, userId);
×
162
    }
163
  }
×
164

165
  public void deleteStudy(Study study, User user) throws Exception {
166
    study.getDatasetIds().forEach(datasetId -> {
×
167
      try (var response = elasticSearchService.deleteIndex(datasetId, user.getUserId())) {
×
168
        if (!HttpStatusCodes.isSuccess(response.getStatus())) {
×
169
          logWarn("Response error, unable to delete dataset from index: %s".formatted(datasetId));
×
170
        }
171
      } catch (IOException e) {
×
172
        throw new RuntimeException(e);
×
173
      }
×
174
    });
×
175
    datasetServiceDAO.deleteStudy(study, user);
×
176
  }
×
177

178
  public List<DatasetSummary> searchDatasetSummaries(String query) {
179
    return datasetDAO.findDatasetSummariesByQuery(query);
×
180
  }
181

182
  public List<DatasetStudySummary> findAllDatasetStudySummaries() {
183
    return datasetDAO.findAllDatasetStudySummaries();
×
184
  }
185

186
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
187
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
188
    Integer datasetId = dataset.getDatasetId();
1✔
189
    Dataset datasetReturn = dataset;
1✔
190
    //Only update and fetch the dataset if it hasn't already been approved
191
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
192
    if (currentApprovalState == null || !currentApprovalState) {
1✔
193
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
194
      elasticSearchService.synchronizeDatasetInESIndex(dataset, user, true);
1✔
195
      datasetReturn = datasetDAO.findDatasetById(datasetId);
1✔
196
    } else {
197
      if (approval == null || !approval) {
1✔
198
        throw new IllegalArgumentException("Dataset is already approved");
1✔
199
      }
200
    }
201

202
    try {
203
      // if approval state changed
204
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
205
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
206
      }
207
    } catch (Exception e) {
×
NEW
208
      logException("Unable to notifier Data Submitter of dataset approval status: %s".formatted(
×
NEW
209
          dataset.getDatasetIdentifier()), e);
×
210
    }
1✔
211
    return datasetReturn;
1✔
212
  }
213

214
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, boolean approval)
215
      throws Exception {
216
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
217
    if (approval) {
1✔
218
      emailService.sendDatasetApprovedMessage(
1✔
219
          user,
220
          dac.getName(),
1✔
221
          dataset.getDatasetIdentifier());
1✔
222
    } else {
223
      if (dac.getEmail() != null) {
1✔
224
        String dacEmail = dac.getEmail();
1✔
225
        emailService.sendDatasetDeniedMessage(
1✔
226
            user,
227
            dac.getName(),
1✔
228
            dataset.getDatasetIdentifier(),
1✔
229
            dacEmail);
230
      } else {
1✔
231
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
232
      }
233
    }
234

235
  }
1✔
236

237
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
238
    return datasetDAO.findDatasetsByIdList(datasetIds);
1✔
239
  }
240

241
  public List<Integer> findAllDatasetIds() {
242
    return datasetDAO.findAllDatasetIds();
×
243
  }
244

245
  public StreamingOutput findAllDatasetsAsStreamingOutput() {
246
    List<Integer> datasetIds = datasetDAO.findAllDatasetIds();
1✔
247
    final List<List<Integer>> datasetIdSubLists = Lists.partition(datasetIds, datasetBatchSize);
1✔
248
    final List<Integer> lastSubList = datasetIdSubLists.get(datasetIdSubLists.size() - 1);
1✔
249
    final Integer lastIndex = lastSubList.get(lastSubList.size() - 1);
1✔
250
    Gson gson = GsonUtil.buildGson();
1✔
251
    return output -> {
1✔
252
      output.write("[".getBytes());
1✔
253
      datasetIdSubLists.forEach(subList -> {
1✔
254
        List<Dataset> datasets = findDatasetsByIds(subList);
1✔
255
        datasets.forEach(d -> {
1✔
256
          try {
257
            output.write(gson.toJson(d).getBytes());
1✔
258
            if (!Objects.equals(d.getDatasetId(), lastIndex)) {
1✔
259
              output.write(",".getBytes());
1✔
260
            }
261
            output.write("\n".getBytes());
1✔
262
          } catch (IOException e) {
×
NEW
263
            logException(
×
NEW
264
                "Error writing dataset to streaming output, dataset id: " + d.getDatasetId(), e);
×
265
          }
1✔
266
        });
1✔
267
      });
1✔
268
      output.write("]".getBytes());
1✔
269
    };
1✔
270
  }
271

272
  public Study getStudyWithDatasetsById(Integer studyId) {
273
    try {
274
      Study study = studyDAO.findStudyById(studyId);
1✔
275
      if (study == null) {
1✔
276
        throw new NotFoundException("Study not found");
1✔
277
      }
278
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
1✔
279
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
280
        study.addDatasets(datasets);
×
281
      }
282
      return study;
1✔
283
    } catch (NotFoundException nfe) {
1✔
284
      throw nfe;
1✔
285
    } catch (Exception e) {
1✔
286
      logException(e);
1✔
287
      throw e;
1✔
288
    }
289
  }
290

291
  public List<ApprovedDataset> getApprovedDatasets(User user) {
292
    try {
293
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
294
      return approvedDatasets;
1✔
295
    } catch (Exception e) {
×
296
      logException(e);
×
297
      throw e;
×
298
    }
299
  }
300

301
  /**
302
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
303
   * the dataset, study, and associated properties with new values. This is an admin function only.
304
   *
305
   * @param dataset         The dataset
306
   * @param studyConversion Study Conversion object
307
   * @return Updated/created study
308
   */
309
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
310
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
311
      throw new NotAuthorizedException("Admin use only");
×
312
    }
313
    // Study updates:
314
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
315

316
    // Dataset updates
317
    if (studyConversion.getDacId() != null) {
×
318
      datasetDAO.updateDatasetDacId(dataset.getDatasetId(), studyConversion.getDacId());
×
319
    }
320
    if (studyConversion.getDataUse() != null) {
×
321
      datasetDAO.updateDatasetDataUse(dataset.getDatasetId(),
×
322
          studyConversion.getDataUse().toString());
×
323
    }
324
    if (studyConversion.getDataUse() != null) {
×
325
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
326
          DataUseTranslationType.DATASET);
327
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDatasetId(), translation);
×
328
    }
329
    if (studyConversion.getDatasetName() != null) {
×
330
      datasetDAO.updateDatasetName(dataset.getDatasetId(), studyConversion.getDatasetName());
×
331
    }
332
    elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false);
×
333
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
334
    // Handle "Phenotype/Indication"
335
    if (studyConversion.getPhenotype() != null) {
×
336
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
337
          studyConversion.getPhenotype());
×
338
    }
339

340
    // Handle "Species"
341
    if (studyConversion.getSpecies() != null) {
×
342
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
343
          studyConversion.getSpecies());
×
344
    }
345

346
    if (studyConversion.getNumberOfParticipants() != null) {
×
347
      // Handle "# of participants"
NEW
348
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants",
×
349
          PropertyType.Number,
UNCOV
350
          studyConversion.getNumberOfParticipants().toString());
×
351
    }
352

353
    // Handle "Data Location"
354
    if (studyConversion.getDataLocation() != null) {
×
355
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
356
          studyConversion.getDataLocation());
×
357
    }
358

359
    if (studyConversion.getUrl() != null) {
×
360
      // Handle "URL"
361
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
362
          studyConversion.getUrl());
×
363
    }
364

365
    // Handle "Data Submitter User ID"
366
    if (studyConversion.getDataSubmitterEmail() != null) {
×
367
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
368
      if (submitter != null) {
×
369
        datasetDAO.updateDatasetCreateUserId(dataset.getDatasetId(), user.getUserId());
×
370
      }
371
    }
372

373
    return studyDAO.findStudyById(studyId);
×
374
  }
375

376
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
377
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s",
1✔
378
        user.getEmail(), studyId, custodians));
1✔
379
    Study study = studyDAO.findStudyById(studyId);
1✔
380
    if (study == null) {
1✔
381
      throw new NotFoundException("Study not found");
×
382
    }
383
    boolean propPresent = study.getProperties().stream()
1✔
384
        .anyMatch(prop -> prop.getKey().equals(dataCustodianEmail));
1✔
385
    if (propPresent) {
1✔
386
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(),
1✔
387
          custodians);
388
    } else {
389
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(),
1✔
390
          custodians);
391
    }
392
    List<Dataset> datasets = datasetDAO.findDatasetsByIdList(study.getDatasetIds());
1✔
393
    datasets.forEach(
1✔
NEW
394
        dataset -> elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false));
×
395
    return studyDAO.findStudyById(studyId);
1✔
396
  }
397

398
  /**
399
   * Ensure that all requested datasetIds exist in the user's list of accepted DAAs
400
   *
401
   * @param user       The requesting User
402
   * @param datasetIds The list of dataset ids the user is requesting access to
403
   */
404
  public void enforceDAARestrictions(User user, List<Integer> datasetIds) {
405
    List<Integer> userDaaDatasetIds = daaDAO.findDaaDatasetIdsByUserId(user.getUserId());
1✔
406
    boolean containsAll = new HashSet<>(userDaaDatasetIds).containsAll(datasetIds);
1✔
407
    if (!containsAll) {
1✔
408
      throw new BadRequestException(
1✔
409
          "User does not have appropriate Data Access Agreements for provided datasets");
410
    }
411
  }
1✔
412

413
  /**
414
   * This method is used to synchronize a new dataset property with values from the study
415
   * conversion
416
   *
417
   * @param dictionaries   List<Dictionary>
418
   * @param dataset        Dataset
419
   * @param dictionaryName Name to look for in dictionaries
420
   * @param schemaProperty Schema Property to look for in properties
421
   * @param propertyType   Property Type of new value
422
   * @param propValue      New property value
423
   */
424
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
425
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
426
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
427
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
428
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
429
        .findFirst();
×
430
    if (maybeProp.isPresent()) {
×
431
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), maybeProp.get().getPropertyKey(),
×
432
          propValue);
433
    } else {
434
      dictionaries.stream()
×
435
          .filter(d -> d.getKey().equals(dictionaryName))
×
436
          .findFirst()
×
437
          .ifPresent(dictionary -> {
×
438
            DatasetProperty prop = new DatasetProperty();
×
439
            prop.setDatasetId(dataset.getDatasetId());
×
440
            prop.setPropertyKey(dictionary.getKeyId());
×
441
            prop.setSchemaProperty(schemaProperty);
×
442
            prop.setPropertyValue(propValue);
×
443
            prop.setPropertyType(propertyType);
×
444
            prop.setCreateDate(new Date());
×
445
            datasetDAO.insertDatasetProperties(List.of(prop));
×
446
          });
×
447
    }
448
  }
×
449

450
  /**
451
   * This method is used to synchronize a legacy dataset property with values from the study
452
   * conversion
453
   *
454
   * @param dictionaries   List<Dictionary>
455
   * @param dataset        Dataset
456
   * @param dictionaryName Name to look for in dictionaries
457
   * @param schemaProperty Schema Property to update if necessary
458
   * @param propertyType   Property Type of new value
459
   * @param propValue      New property value
460
   */
461
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
462
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
463
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
464
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
465
        .findFirst();
×
466
    Optional<Dictionary> dictionary = dictionaries.stream()
×
467
        .filter(d -> d.getKey().equals(dictionaryName))
×
468
        .findFirst();
×
469
    // Legacy property exists, update it.
470
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
471
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), dictionary.get().getKeyId(),
×
472
          propValue);
473
    }
474
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
475
    else if (dictionary.isPresent()) {
×
476
      DatasetProperty prop = new DatasetProperty();
×
477
      prop.setDatasetId(dataset.getDatasetId());
×
478
      prop.setPropertyKey(dictionary.get().getKeyId());
×
479
      prop.setSchemaProperty(schemaProperty);
×
480
      prop.setPropertyValue(propValue);
×
481
      prop.setPropertyType(propertyType);
×
482
      prop.setCreateDate(new Date());
×
483
      datasetDAO.insertDatasetProperties(List.of(prop));
×
484
    }
×
485
    // Neither legacy property nor dictionary term does not exist, log a warning.
486
    else {
487
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
488
    }
489
  }
×
490

491
  private Integer updateStudyFromConversion(User user, Dataset dataset,
492
      StudyConversion studyConversion) {
493
    // Ensure that we are not trying to create a new study with an existing name
494
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
495
    Integer studyId;
496
    Integer userId =
497
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
498
    // Create or update the study:
499
    if (study == null) {
×
500
      study = studyConversion.createNewStudyStub();
×
501
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
502
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
503
          UUID.randomUUID());
×
504
      study.setStudyId(studyId);
×
505
    } else {
506
      studyId = study.getStudyId();
×
507
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
508
          studyConversion.getDescription(), studyConversion.getPiName(),
×
509
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
510
          Instant.now());
×
511
    }
512
    datasetDAO.updateStudyId(dataset.getDatasetId(), studyId);
×
513

514
    // Create or update study properties:
515
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
516
    // If we don't have any props, we need to add all of the new ones
517
    if (existingProps == null || existingProps.isEmpty()) {
×
518
      studyConversion.getStudyProperties().stream()
×
519
          .filter(Objects::nonNull)
×
520
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
521
              p.getValue().toString()));
×
522
    } else {
523
      // Study props to add:
524
      studyConversion.getStudyProperties().stream()
×
525
          .filter(Objects::nonNull)
×
526
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
527
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
528
              p.getValue().toString()));
×
529
      // Study props to update:
530
      studyConversion.getStudyProperties().stream()
×
531
          .filter(Objects::nonNull)
×
532
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
533
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
534
              p.getValue().toString()));
×
535
    }
536
    return studyId;
×
537
  }
538

539
  public void setDatasetBatchSize(Integer datasetBatchSize) {
540
    this.datasetBatchSize = datasetBatchSize;
1✔
541
  }
1✔
542

543
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc