• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #6333

15 Aug 2025 12:17PM UTC coverage: 83.37% (+0.03%) from 83.338%
#6333

push

web-flow
DT-2091: Update DAC Approve Dataset performance (#2636)

Co-authored-by: otchet-broad <111771148+otchet-broad@users.noreply.github.com>

83 of 105 new or added lines in 5 files covered. (79.05%)

4 existing lines in 1 file now uncovered.

10949 of 13133 relevant lines covered (83.37%)

0.83 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

45.0
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.api.client.http.HttpStatusCodes;
6
import com.google.common.collect.Lists;
7
import com.google.gson.Gson;
8
import com.google.inject.Inject;
9
import jakarta.ws.rs.BadRequestException;
10
import jakarta.ws.rs.NotAuthorizedException;
11
import jakarta.ws.rs.NotFoundException;
12
import jakarta.ws.rs.core.StreamingOutput;
13
import java.io.IOException;
14
import java.time.Instant;
15
import java.util.ArrayList;
16
import java.util.Date;
17
import java.util.HashSet;
18
import java.util.List;
19
import java.util.Objects;
20
import java.util.Optional;
21
import java.util.Set;
22
import java.util.UUID;
23
import org.apache.commons.collections4.CollectionUtils;
24
import org.broadinstitute.consent.http.db.DaaDAO;
25
import org.broadinstitute.consent.http.db.DacDAO;
26
import org.broadinstitute.consent.http.db.DatasetDAO;
27
import org.broadinstitute.consent.http.db.StudyDAO;
28
import org.broadinstitute.consent.http.db.UserDAO;
29
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
30
import org.broadinstitute.consent.http.enumeration.PropertyType;
31
import org.broadinstitute.consent.http.enumeration.UserRoles;
32
import org.broadinstitute.consent.http.models.ApprovedDataset;
33
import org.broadinstitute.consent.http.models.Dac;
34
import org.broadinstitute.consent.http.models.DataUse;
35
import org.broadinstitute.consent.http.models.Dataset;
36
import org.broadinstitute.consent.http.models.DatasetProperty;
37
import org.broadinstitute.consent.http.models.DatasetStudySummary;
38
import org.broadinstitute.consent.http.models.DatasetSummary;
39
import org.broadinstitute.consent.http.models.Dictionary;
40
import org.broadinstitute.consent.http.models.Study;
41
import org.broadinstitute.consent.http.models.StudyConversion;
42
import org.broadinstitute.consent.http.models.StudyProperty;
43
import org.broadinstitute.consent.http.models.User;
44
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
45
import org.broadinstitute.consent.http.util.ConsentLogger;
46
import org.broadinstitute.consent.http.util.gson.GsonUtil;
47

48
public class DatasetService implements ConsentLogger {
49

50
  private final DatasetDAO datasetDAO;
51
  private final DaaDAO daaDAO;
52
  private final DacDAO dacDAO;
53
  private final ElasticSearchService elasticSearchService;
54
  private final EmailService emailService;
55
  private final OntologyService ontologyService;
56
  private final StudyDAO studyDAO;
57
  private final DatasetServiceDAO datasetServiceDAO;
58
  private final UserDAO userDAO;
59
  public Integer datasetBatchSize = 50;
1✔
60

61
  @Inject
62
  public DatasetService(DatasetDAO dataSetDAO, DaaDAO daaDAO, DacDAO dacDAO, ElasticSearchService
63
      elasticSearchService, EmailService emailService, OntologyService ontologyService, StudyDAO
64
      studyDAO, DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
65
    this.datasetDAO = dataSetDAO;
1✔
66
    this.daaDAO = daaDAO;
1✔
67
    this.dacDAO = dacDAO;
1✔
68
    this.elasticSearchService = elasticSearchService;
1✔
69
    this.emailService = emailService;
1✔
70
    this.ontologyService = ontologyService;
1✔
71
    this.studyDAO = studyDAO;
1✔
72
    this.datasetServiceDAO = datasetServiceDAO;
1✔
73
    this.userDAO = userDAO;
1✔
74
  }
1✔
75

76
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
77
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
78
      throw new BadRequestException("No dataset IDs provided");
1✔
79
    }
80
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
81
  }
82

83
  /**
84
   * TODO: Refactor this to throw a NotFoundException instead of returning null
85
   * Finds a Dataset by a formatted dataset identifier.
86
   *
87
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
88
   * @return the Dataset with the given identifier, if found.
89
   * @throws IllegalArgumentException if datasetIdentifier is invalid
90
   */
91
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
92
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
93
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
94
    if (d == null) {
1✔
95
      return null;
1✔
96
    }
97

98
    // technically, it is possible to have two dataset identifiers which
99
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
100
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
101
      return null;
1✔
102
    }
103
    return d;
1✔
104
  }
105

106
  public Dataset getDatasetByName(String name) {
107
    String lowercaseName = name.toLowerCase();
1✔
108
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
109
  }
110

111
  public Set<String> findAllStudyNames() {
112
    return datasetDAO.findAllStudyNames();
1✔
113
  }
114

115
  public List<String> findAllDatasetNames() {
116
    return datasetDAO.findAllDatasetNames();
×
117
  }
118

119
  public Study findStudyById(Integer id) {
120
    return studyDAO.findStudyById(id);
×
121
  }
122

123
  public Dataset findDatasetById(Integer id) {
124
    return datasetDAO.findDatasetById(id);
1✔
125
  }
126

127
  /**
128
   * Find the dataset without files by its ID. This method is intended to return a minimal
129
   * dataset for performance reasons, avoiding the retrieval of full FSO information.
130
   *
131
   * @param id Dataset ID
132
   * @return The updated Dataset object
133
   */
134
  public Dataset findDatasetWithoutFSOInformation(Integer id) {
NEW
135
    return datasetDAO.findDatasetWithoutFSOInformation(id);
×
136
  }
137

138
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
139
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
140
    if (d == null) {
1✔
141
      throw new NotFoundException("Dataset not found: " + datasetId);
×
142
    }
143
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
144
      throw new IllegalArgumentException("Admin use only");
1✔
145
    }
146
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
147
    elasticSearchService.synchronizeDatasetInESIndex(d, user, false);
1✔
148
    return datasetDAO.findDatasetById(datasetId);
1✔
149
  }
150

151
  public Dataset syncDatasetDataUseTranslation(Integer datasetId, User user) {
152
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
153
    if (dataset == null) {
1✔
154
      throw new NotFoundException("Dataset not found");
1✔
155
    }
156

157
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
158
        DataUseTranslationType.DATASET);
159
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
160
    elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false);
1✔
161
    return datasetDAO.findDatasetById(datasetId);
1✔
162
  }
163

164
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
165
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
166
    if (dataset != null) {
×
167
      try (var response = elasticSearchService.deleteIndex(datasetId, userId)) {
×
168
        if (!HttpStatusCodes.isSuccess(response.getStatus())) {
×
169
          logWarn("Response error, unable to delete dataset from index: %s".formatted(datasetId));
×
170
        }
171
      }
172
      datasetServiceDAO.deleteDataset(dataset, userId);
×
173
    }
174
  }
×
175

176
  public void deleteStudy(Study study, User user) throws Exception {
177
    study.getDatasetIds().forEach(datasetId -> {
×
178
      try (var response = elasticSearchService.deleteIndex(datasetId, user.getUserId())) {
×
179
        if (!HttpStatusCodes.isSuccess(response.getStatus())) {
×
180
          logWarn("Response error, unable to delete dataset from index: %s".formatted(datasetId));
×
181
        }
182
      } catch (IOException e) {
×
183
        throw new RuntimeException(e);
×
184
      }
×
185
    });
×
186
    datasetServiceDAO.deleteStudy(study, user);
×
187
  }
×
188

189
  public List<DatasetSummary> searchDatasetSummaries(String query) {
190
    return datasetDAO.findDatasetSummariesByQuery(query);
×
191
  }
192

193
  public List<DatasetStudySummary> findAllDatasetStudySummaries() {
194
    return datasetDAO.findAllDatasetStudySummaries();
×
195
  }
196

197
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
198
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
199
    Integer datasetId = dataset.getDatasetId();
1✔
200
    Dataset datasetReturn = dataset;
1✔
201
    //Only update and fetch the dataset if it hasn't already been approved
202
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
203
    if (currentApprovalState == null || !currentApprovalState) {
1✔
204
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
205
      elasticSearchService.asyncDatasetInESIndex(datasetId, user, true);
1✔
206
      datasetReturn = datasetDAO.findDatasetWithoutFSOInformation(datasetId);
1✔
207
    } else {
208
      if (approval == null || !approval) {
1✔
209
        throw new IllegalArgumentException("Dataset is already approved");
1✔
210
      }
211
    }
212

213
    try {
214
      // if approval state changed
215
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
216
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
217
      }
218
    } catch (Exception e) {
×
219
      logException("Unable to notifier Data Submitter of dataset approval status: %s".formatted(
×
220
          dataset.getDatasetIdentifier()), e);
×
221
    }
1✔
222
    return datasetReturn;
1✔
223
  }
224

225
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, boolean approval)
226
      throws Exception {
227
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
228
    if (approval) {
1✔
229
      emailService.sendDatasetApprovedMessage(
1✔
230
          user,
231
          dac.getName(),
1✔
232
          dataset.getDatasetIdentifier());
1✔
233
    } else {
234
      if (dac.getEmail() != null) {
1✔
235
        String dacEmail = dac.getEmail();
1✔
236
        emailService.sendDatasetDeniedMessage(
1✔
237
            user,
238
            dac.getName(),
1✔
239
            dataset.getDatasetIdentifier(),
1✔
240
            dacEmail);
241
      } else {
1✔
242
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
243
      }
244
    }
245

246
  }
1✔
247

248
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
249
    return datasetDAO.findDatasetsByIdList(datasetIds);
1✔
250
  }
251

252
  public List<Integer> findAllDatasetIds() {
253
    return datasetDAO.findAllDatasetIds();
×
254
  }
255

256
  public StreamingOutput findAllDatasetsAsStreamingOutput() {
257
    List<Integer> datasetIds = datasetDAO.findAllDatasetIds();
1✔
258
    final List<List<Integer>> datasetIdSubLists = Lists.partition(datasetIds, datasetBatchSize);
1✔
259
    final List<Integer> lastSubList = datasetIdSubLists.get(datasetIdSubLists.size() - 1);
1✔
260
    final Integer lastIndex = lastSubList.get(lastSubList.size() - 1);
1✔
261
    Gson gson = GsonUtil.buildGson();
1✔
262
    return output -> {
1✔
263
      output.write("[".getBytes());
1✔
264
      datasetIdSubLists.forEach(subList -> {
1✔
265
        List<Dataset> datasets = findDatasetsByIds(subList);
1✔
266
        datasets.forEach(d -> {
1✔
267
          try {
268
            output.write(gson.toJson(d).getBytes());
1✔
269
            if (!Objects.equals(d.getDatasetId(), lastIndex)) {
1✔
270
              output.write(",".getBytes());
1✔
271
            }
272
            output.write("\n".getBytes());
1✔
273
          } catch (IOException e) {
×
274
            logException(
×
275
                "Error writing dataset to streaming output, dataset id: " + d.getDatasetId(), e);
×
276
          }
1✔
277
        });
1✔
278
      });
1✔
279
      output.write("]".getBytes());
1✔
280
    };
1✔
281
  }
282

283
  public Study getStudyWithDatasetsById(Integer studyId) {
284
    try {
285
      Study study = studyDAO.findStudyById(studyId);
1✔
286
      if (study == null) {
1✔
287
        throw new NotFoundException("Study not found");
1✔
288
      }
289
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
1✔
290
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
291
        study.addDatasets(datasets);
×
292
      }
293
      return study;
1✔
294
    } catch (NotFoundException nfe) {
1✔
295
      throw nfe;
1✔
296
    } catch (Exception e) {
1✔
297
      logException(e);
1✔
298
      throw e;
1✔
299
    }
300
  }
301

302
  public List<ApprovedDataset> getApprovedDatasets(User user) {
303
    try {
304
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
305
      return approvedDatasets;
1✔
306
    } catch (Exception e) {
×
307
      logException(e);
×
308
      throw e;
×
309
    }
310
  }
311

312
  /**
313
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
314
   * the dataset, study, and associated properties with new values. This is an admin function only.
315
   *
316
   * @param dataset         The dataset
317
   * @param studyConversion Study Conversion object
318
   * @return Updated/created study
319
   */
320
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
321
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
322
      throw new NotAuthorizedException("Admin use only");
×
323
    }
324
    // Study updates:
325
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
326

327
    // Dataset updates
328
    if (studyConversion.getDacId() != null) {
×
329
      datasetDAO.updateDatasetDacId(dataset.getDatasetId(), studyConversion.getDacId());
×
330
    }
331
    if (studyConversion.getDataUse() != null) {
×
332
      datasetDAO.updateDatasetDataUse(dataset.getDatasetId(),
×
333
          studyConversion.getDataUse().toString());
×
334
    }
335
    if (studyConversion.getDataUse() != null) {
×
336
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
337
          DataUseTranslationType.DATASET);
338
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDatasetId(), translation);
×
339
    }
340
    if (studyConversion.getDatasetName() != null) {
×
341
      datasetDAO.updateDatasetName(dataset.getDatasetId(), studyConversion.getDatasetName());
×
342
    }
343
    elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false);
×
344
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
345
    // Handle "Phenotype/Indication"
346
    if (studyConversion.getPhenotype() != null) {
×
347
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
348
          studyConversion.getPhenotype());
×
349
    }
350

351
    // Handle "Species"
352
    if (studyConversion.getSpecies() != null) {
×
353
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
354
          studyConversion.getSpecies());
×
355
    }
356

357
    if (studyConversion.getNumberOfParticipants() != null) {
×
358
      // Handle "# of participants"
359
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants",
×
360
          PropertyType.Number,
361
          studyConversion.getNumberOfParticipants().toString());
×
362
    }
363

364
    // Handle "Data Location"
365
    if (studyConversion.getDataLocation() != null) {
×
366
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
367
          studyConversion.getDataLocation());
×
368
    }
369

370
    if (studyConversion.getUrl() != null) {
×
371
      // Handle "URL"
372
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
373
          studyConversion.getUrl());
×
374
    }
375

376
    // Handle "Data Submitter User ID"
377
    if (studyConversion.getDataSubmitterEmail() != null) {
×
378
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
379
      if (submitter != null) {
×
380
        datasetDAO.updateDatasetCreateUserId(dataset.getDatasetId(), user.getUserId());
×
381
      }
382
    }
383

384
    return studyDAO.findStudyById(studyId);
×
385
  }
386

387
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
388
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s",
1✔
389
        user.getEmail(), studyId, custodians));
1✔
390
    Study study = studyDAO.findStudyById(studyId);
1✔
391
    if (study == null) {
1✔
392
      throw new NotFoundException("Study not found");
×
393
    }
394
    boolean propPresent = study.getProperties().stream()
1✔
395
        .anyMatch(prop -> prop.getKey().equals(dataCustodianEmail));
1✔
396
    if (propPresent) {
1✔
397
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(),
1✔
398
          custodians);
399
    } else {
400
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(),
1✔
401
          custodians);
402
    }
403
    List<Dataset> datasets = datasetDAO.findDatasetsByIdList(study.getDatasetIds());
1✔
404
    datasets.forEach(
1✔
405
        dataset -> elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false));
×
406
    return studyDAO.findStudyById(studyId);
1✔
407
  }
408

409
  /**
410
   * Ensure that all requested datasetIds exist in the user's list of accepted DAAs
411
   *
412
   * @param user       The requesting User
413
   * @param datasetIds The list of dataset ids the user is requesting access to
414
   */
415
  public void enforceDAARestrictions(User user, List<Integer> datasetIds) {
416
    List<Integer> userDaaDatasetIds = daaDAO.findDaaDatasetIdsByUserId(user.getUserId());
1✔
417
    boolean containsAll = new HashSet<>(userDaaDatasetIds).containsAll(datasetIds);
1✔
418
    if (!containsAll) {
1✔
419
      throw new BadRequestException(
1✔
420
          "User does not have appropriate Data Access Agreements for provided datasets");
421
    }
422
  }
1✔
423

424
  /**
425
   * This method is used to synchronize a new dataset property with values from the study
426
   * conversion
427
   *
428
   * @param dictionaries   List<Dictionary>
429
   * @param dataset        Dataset
430
   * @param dictionaryName Name to look for in dictionaries
431
   * @param schemaProperty Schema Property to look for in properties
432
   * @param propertyType   Property Type of new value
433
   * @param propValue      New property value
434
   */
435
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
436
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
437
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
438
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
439
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
440
        .findFirst();
×
441
    if (maybeProp.isPresent()) {
×
442
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), maybeProp.get().getPropertyKey(),
×
443
          propValue);
444
    } else {
445
      dictionaries.stream()
×
446
          .filter(d -> d.getKey().equals(dictionaryName))
×
447
          .findFirst()
×
448
          .ifPresent(dictionary -> {
×
449
            DatasetProperty prop = new DatasetProperty();
×
450
            prop.setDatasetId(dataset.getDatasetId());
×
451
            prop.setPropertyKey(dictionary.getKeyId());
×
452
            prop.setSchemaProperty(schemaProperty);
×
453
            prop.setPropertyValue(propValue);
×
454
            prop.setPropertyType(propertyType);
×
455
            prop.setCreateDate(new Date());
×
456
            datasetDAO.insertDatasetProperties(List.of(prop));
×
457
          });
×
458
    }
459
  }
×
460

461
  /**
462
   * This method is used to synchronize a legacy dataset property with values from the study
463
   * conversion
464
   *
465
   * @param dictionaries   List<Dictionary>
466
   * @param dataset        Dataset
467
   * @param dictionaryName Name to look for in dictionaries
468
   * @param schemaProperty Schema Property to update if necessary
469
   * @param propertyType   Property Type of new value
470
   * @param propValue      New property value
471
   */
472
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
473
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
474
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
475
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
476
        .findFirst();
×
477
    Optional<Dictionary> dictionary = dictionaries.stream()
×
478
        .filter(d -> d.getKey().equals(dictionaryName))
×
479
        .findFirst();
×
480
    // Legacy property exists, update it.
481
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
482
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), dictionary.get().getKeyId(),
×
483
          propValue);
484
    }
485
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
486
    else if (dictionary.isPresent()) {
×
487
      DatasetProperty prop = new DatasetProperty();
×
488
      prop.setDatasetId(dataset.getDatasetId());
×
489
      prop.setPropertyKey(dictionary.get().getKeyId());
×
490
      prop.setSchemaProperty(schemaProperty);
×
491
      prop.setPropertyValue(propValue);
×
492
      prop.setPropertyType(propertyType);
×
493
      prop.setCreateDate(new Date());
×
494
      datasetDAO.insertDatasetProperties(List.of(prop));
×
495
    }
×
496
    // Neither legacy property nor dictionary term does not exist, log a warning.
497
    else {
498
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
499
    }
500
  }
×
501

502
  private Integer updateStudyFromConversion(User user, Dataset dataset,
503
      StudyConversion studyConversion) {
504
    // Ensure that we are not trying to create a new study with an existing name
505
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
506
    Integer studyId;
507
    Integer userId =
508
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
509
    // Create or update the study:
510
    if (study == null) {
×
511
      study = studyConversion.createNewStudyStub();
×
512
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
513
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
514
          UUID.randomUUID());
×
515
      study.setStudyId(studyId);
×
516
    } else {
517
      studyId = study.getStudyId();
×
518
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
519
          studyConversion.getDescription(), studyConversion.getPiName(),
×
520
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
521
          Instant.now());
×
522
    }
523
    datasetDAO.updateStudyId(dataset.getDatasetId(), studyId);
×
524

525
    // Create or update study properties:
526
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
527
    // If we don't have any props, we need to add all of the new ones
528
    if (existingProps == null || existingProps.isEmpty()) {
×
529
      studyConversion.getStudyProperties().stream()
×
530
          .filter(Objects::nonNull)
×
531
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
532
              p.getValue().toString()));
×
533
    } else {
534
      // Study props to add:
535
      studyConversion.getStudyProperties().stream()
×
536
          .filter(Objects::nonNull)
×
537
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
538
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
539
              p.getValue().toString()));
×
540
      // Study props to update:
541
      studyConversion.getStudyProperties().stream()
×
542
          .filter(Objects::nonNull)
×
543
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
544
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
545
              p.getValue().toString()));
×
546
    }
547
    return studyId;
×
548
  }
549

550
  public void setDatasetBatchSize(Integer datasetBatchSize) {
551
    this.datasetBatchSize = datasetBatchSize;
1✔
552
  }
1✔
553

554
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc