• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #5636

08 Apr 2025 02:19PM UTC coverage: 79.133% (-0.1%) from 79.273%
#5636

push

web-flow
DT-1424: Mark datasets as indexed/deindexed when indexing operations are called (#2470)

58 of 83 new or added lines in 10 files covered. (69.88%)

10 existing lines in 3 files now uncovered.

10277 of 12987 relevant lines covered (79.13%)

0.79 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

46.33
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.api.client.http.HttpStatusCodes;
6
import com.google.common.collect.Lists;
7
import com.google.gson.Gson;
8
import com.google.inject.Inject;
9
import jakarta.ws.rs.BadRequestException;
10
import jakarta.ws.rs.NotAuthorizedException;
11
import jakarta.ws.rs.NotFoundException;
12
import jakarta.ws.rs.core.StreamingOutput;
13
import java.io.IOException;
14
import java.time.Instant;
15
import java.util.ArrayList;
16
import java.util.Date;
17
import java.util.HashSet;
18
import java.util.List;
19
import java.util.Objects;
20
import java.util.Optional;
21
import java.util.Set;
22
import java.util.UUID;
23
import org.apache.commons.collections4.CollectionUtils;
24
import org.broadinstitute.consent.http.db.DaaDAO;
25
import org.broadinstitute.consent.http.db.DacDAO;
26
import org.broadinstitute.consent.http.db.DatasetDAO;
27
import org.broadinstitute.consent.http.db.StudyDAO;
28
import org.broadinstitute.consent.http.db.UserDAO;
29
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
30
import org.broadinstitute.consent.http.enumeration.PropertyType;
31
import org.broadinstitute.consent.http.enumeration.UserRoles;
32
import org.broadinstitute.consent.http.models.ApprovedDataset;
33
import org.broadinstitute.consent.http.models.Dac;
34
import org.broadinstitute.consent.http.models.DataUse;
35
import org.broadinstitute.consent.http.models.Dataset;
36
import org.broadinstitute.consent.http.models.DatasetProperty;
37
import org.broadinstitute.consent.http.models.DatasetStudySummary;
38
import org.broadinstitute.consent.http.models.DatasetSummary;
39
import org.broadinstitute.consent.http.models.Dictionary;
40
import org.broadinstitute.consent.http.models.Study;
41
import org.broadinstitute.consent.http.models.StudyConversion;
42
import org.broadinstitute.consent.http.models.StudyProperty;
43
import org.broadinstitute.consent.http.models.User;
44
import org.broadinstitute.consent.http.models.dto.DatasetDTO;
45
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
46
import org.broadinstitute.consent.http.util.ConsentLogger;
47
import org.broadinstitute.consent.http.util.gson.GsonUtil;
48

49

50
public class DatasetService implements ConsentLogger {
51

52
  private final DatasetDAO datasetDAO;
53
  private final DaaDAO daaDAO;
54
  private final DacDAO dacDAO;
55
  private final ElasticSearchService elasticSearchService;
56
  private final EmailService emailService;
57
  private final OntologyService ontologyService;
58
  private final StudyDAO studyDAO;
59
  private final DatasetServiceDAO datasetServiceDAO;
60
  private final UserDAO userDAO;
61
  public Integer datasetBatchSize = 50;
1✔
62

63
  @Inject
64
  public DatasetService(DatasetDAO dataSetDAO, DaaDAO daaDAO, DacDAO dacDAO, ElasticSearchService
65
      elasticSearchService, EmailService emailService, OntologyService ontologyService, StudyDAO
66
      studyDAO, DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
67
    this.datasetDAO = dataSetDAO;
1✔
68
    this.daaDAO = daaDAO;
1✔
69
    this.dacDAO = dacDAO;
1✔
70
    this.elasticSearchService = elasticSearchService;
1✔
71
    this.emailService = emailService;
1✔
72
    this.ontologyService = ontologyService;
1✔
73
    this.studyDAO = studyDAO;
1✔
74
    this.datasetServiceDAO = datasetServiceDAO;
1✔
75
    this.userDAO = userDAO;
1✔
76
  }
1✔
77

78
  public Set<DatasetDTO> findDatasetsByDacIds(List<Integer> dacIds) {
79
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
80
      throw new BadRequestException("No dataset IDs provided");
1✔
81
    }
82
    return datasetDAO.findDatasetsByDacIds(dacIds);
1✔
83
  }
84

85
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
86
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
87
      throw new BadRequestException("No dataset IDs provided");
1✔
88
    }
89
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
90
  }
91

92
  /**
93
   * TODO: Refactor this to throw a NotFoundException instead of returning null
94
   * Finds a Dataset by a formatted dataset identifier.
95
   *
96
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
97
   * @return the Dataset with the given identifier, if found.
98
   * @throws IllegalArgumentException if datasetIdentifier is invalid
99
   */
100
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
101
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
102
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
103
    if (d == null) {
1✔
104
      return null;
1✔
105
    }
106

107
    // technically, it is possible to have two dataset identifiers which
108
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
109
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
110
      return null;
1✔
111
    }
112
    return d;
1✔
113
  }
114

115
  public Dataset getDatasetByName(String name) {
116
    String lowercaseName = name.toLowerCase();
1✔
117
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
118
  }
119

120
  public Set<String> findAllStudyNames() {
121
    return datasetDAO.findAllStudyNames();
1✔
122
  }
123

124
  public List<String> findAllDatasetNames() {
125
    return datasetDAO.findAllDatasetNames();
×
126
  }
127

128
  public Study findStudyById(Integer id) {
129
    return studyDAO.findStudyById(id);
×
130
  }
131

132
  public Dataset findDatasetById(Integer id) {
133
    return datasetDAO.findDatasetById(id);
1✔
134
  }
135

136
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
137
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
138
    if (d == null) {
1✔
139
      throw new NotFoundException("Dataset not found: " + datasetId);
×
140
    }
141
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
142
      throw new IllegalArgumentException("Admin use only");
1✔
143
    }
144
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
145
    elasticSearchService.synchronizeDatasetInESIndex(d, user, false);
1✔
146
    return datasetDAO.findDatasetById(datasetId);
1✔
147
  }
148

149
  public Dataset syncDatasetDataUseTranslation(Integer datasetId, User user) {
150
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
151
    if (dataset == null) {
1✔
152
      throw new NotFoundException("Dataset not found");
1✔
153
    }
154

155
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
156
        DataUseTranslationType.DATASET);
157
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
158
    elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false);
1✔
159
    return datasetDAO.findDatasetById(datasetId);
1✔
160
  }
161

162
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
163
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
164
    if (dataset != null) {
×
NEW
165
      try (var response = elasticSearchService.deleteIndex(datasetId, userId)) {
×
NEW
166
        if (!HttpStatusCodes.isSuccess(response.getStatus())) {
×
NEW
167
          logWarn("Response error, unable to delete dataset from index: %s".formatted(datasetId));
×
168
        }
169
      }
UNCOV
170
      datasetServiceDAO.deleteDataset(dataset, userId);
×
171
    }
172
  }
×
173

174
  public void deleteStudy(Study study, User user) throws Exception {
NEW
175
    study.getDatasetIds().forEach(datasetId -> {
×
NEW
176
      try (var response = elasticSearchService.deleteIndex(datasetId, user.getUserId())) {
×
NEW
177
        if (!HttpStatusCodes.isSuccess(response.getStatus())) {
×
NEW
178
          logWarn("Response error, unable to delete dataset from index: %s".formatted(datasetId));
×
179
        }
NEW
180
      } catch (IOException e) {
×
NEW
181
        throw new RuntimeException(e);
×
NEW
182
      }
×
NEW
183
    });
×
184
    datasetServiceDAO.deleteStudy(study, user);
×
185
  }
×
186

187
  public List<DatasetSummary> searchDatasetSummaries(String query) {
188
    return datasetDAO.findDatasetSummariesByQuery(query);
×
189
  }
190

191
  public List<DatasetStudySummary> findAllDatasetStudySummaries() {
192
    return datasetDAO.findAllDatasetStudySummaries();
×
193
  }
194

195
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
196
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
197
    Integer datasetId = dataset.getDatasetId();
1✔
198
    Dataset datasetReturn = dataset;
1✔
199
    //Only update and fetch the dataset if it hasn't already been approved
200
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
201
    if (currentApprovalState == null || !currentApprovalState) {
1✔
202
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
203
      elasticSearchService.synchronizeDatasetInESIndex(dataset, user, true);
1✔
204
      datasetReturn = datasetDAO.findDatasetById(datasetId);
1✔
205
    } else {
206
      if (approval == null || !approval) {
1✔
207
        throw new IllegalArgumentException("Dataset is already approved");
1✔
208
      }
209
    }
210

211
    try {
212
      // if approval state changed
213
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
214
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
215
      }
216
    } catch (Exception e) {
×
217
      logException("Unable to notifier Data Submitter of dataset approval status: %s".formatted(dataset.getDatasetIdentifier()), e);
×
218
    }
1✔
219
    return datasetReturn;
1✔
220
  }
221

222
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, boolean approval)
223
      throws Exception {
224
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
225
    if (approval) {
1✔
226
      emailService.sendDatasetApprovedMessage(
1✔
227
          user,
228
          dac.getName(),
1✔
229
          dataset.getDatasetIdentifier());
1✔
230
    } else {
231
      if (dac.getEmail() != null) {
1✔
232
        String dacEmail = dac.getEmail();
1✔
233
        emailService.sendDatasetDeniedMessage(
1✔
234
            user,
235
            dac.getName(),
1✔
236
            dataset.getDatasetIdentifier(),
1✔
237
            dacEmail);
238
      }
1✔
239
      else {
240
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
241
      }
242
    }
243

244
  }
1✔
245

246
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
247
    return datasetDAO.findDatasetsByIdList(datasetIds);
1✔
248
  }
249

250
  public List<Integer> findAllDatasetIds() {
251
    return datasetDAO.findAllDatasetIds();
×
252
  }
253

254
  public StreamingOutput findAllDatasetsAsStreamingOutput() {
255
    List<Integer> datasetIds = datasetDAO.findAllDatasetIds();
1✔
256
    final List<List<Integer>> datasetIdSubLists = Lists.partition(datasetIds, datasetBatchSize);
1✔
257
    final List<Integer> lastSubList = datasetIdSubLists.get(datasetIdSubLists.size() - 1);
1✔
258
    final Integer lastIndex = lastSubList.get(lastSubList.size() - 1);
1✔
259
    Gson gson = GsonUtil.buildGson();
1✔
260
    return output -> {
1✔
261
      output.write("[".getBytes());
1✔
262
      datasetIdSubLists.forEach(subList -> {
1✔
263
        List<Dataset> datasets = findDatasetsByIds(subList);
1✔
264
        datasets.forEach(d -> {
1✔
265
          try {
266
            output.write(gson.toJson(d).getBytes());
1✔
267
            if (!Objects.equals(d.getDatasetId(), lastIndex)) {
1✔
268
              output.write(",".getBytes());
1✔
269
            }
270
            output.write("\n".getBytes());
1✔
271
          } catch (IOException e) {
×
272
            logException("Error writing dataset to streaming output, dataset id: " + d.getDatasetId(), e);
×
273
          }
1✔
274
        });
1✔
275
      });
1✔
276
      output.write("]".getBytes());
1✔
277
    };
1✔
278
  }
279

280
  public Study getStudyWithDatasetsById(Integer studyId) {
281
    try {
282
      Study study = studyDAO.findStudyById(studyId);
1✔
283
      if (study == null) {
1✔
284
        throw new NotFoundException("Study not found");
1✔
285
      }
286
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
1✔
287
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
288
        study.addDatasets(datasets);
×
289
      }
290
      return study;
1✔
291
    } catch (NotFoundException nfe) {
1✔
292
      throw nfe;
1✔
293
    } catch (Exception e) {
1✔
294
      logException(e);
1✔
295
      throw e;
1✔
296
    }
297
  }
298

299
  public List<ApprovedDataset> getApprovedDatasets(User user) {
300
    try {
301
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
302
      return approvedDatasets;
1✔
303
    } catch (Exception e) {
×
304
      logException(e);
×
305
      throw e;
×
306
    }
307
  }
308

309
  /**
310
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
311
   * the dataset, study, and associated properties with new values. This is an admin function only.
312
   *
313
   * @param dataset         The dataset
314
   * @param studyConversion Study Conversion object
315
   * @return Updated/created study
316
   */
317
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
318
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
319
      throw new NotAuthorizedException("Admin use only");
×
320
    }
321
    // Study updates:
322
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
323

324
    // Dataset updates
325
    if (studyConversion.getDacId() != null) {
×
326
      datasetDAO.updateDatasetDacId(dataset.getDatasetId(), studyConversion.getDacId());
×
327
    }
328
    if (studyConversion.getDataUse() != null) {
×
329
      datasetDAO.updateDatasetDataUse(dataset.getDatasetId(),
×
330
          studyConversion.getDataUse().toString());
×
331
    }
332
    if (studyConversion.getDataUse() != null) {
×
333
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
334
          DataUseTranslationType.DATASET);
335
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDatasetId(), translation);
×
336
    }
337
    if (studyConversion.getDatasetName() != null) {
×
338
      datasetDAO.updateDatasetName(dataset.getDatasetId(), studyConversion.getDatasetName());
×
339
    }
NEW
340
    elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false);
×
341
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
342
    // Handle "Phenotype/Indication"
343
    if (studyConversion.getPhenotype() != null) {
×
344
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
345
          studyConversion.getPhenotype());
×
346
    }
347

348
    // Handle "Species"
349
    if (studyConversion.getSpecies() != null) {
×
350
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
351
          studyConversion.getSpecies());
×
352
    }
353

354
    if (studyConversion.getNumberOfParticipants() != null) {
×
355
      // Handle "# of participants"
356
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants", PropertyType.Number,
×
357
          studyConversion.getNumberOfParticipants().toString());
×
358
    }
359

360
    // Handle "Data Location"
361
    if (studyConversion.getDataLocation() != null) {
×
362
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
363
          studyConversion.getDataLocation());
×
364
    }
365

366
    if (studyConversion.getUrl() != null) {
×
367
      // Handle "URL"
368
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
369
          studyConversion.getUrl());
×
370
    }
371

372
    // Handle "Data Submitter User ID"
373
    if (studyConversion.getDataSubmitterEmail() != null) {
×
374
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
375
      if (submitter != null) {
×
376
        datasetDAO.updateDatasetCreateUserId(dataset.getDatasetId(), user.getUserId());
×
377
      }
378
    }
379

380
    return studyDAO.findStudyById(studyId);
×
381
  }
382

383
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
384
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s",
1✔
385
        user.getEmail(), studyId, custodians));
1✔
386
    Study study = studyDAO.findStudyById(studyId);
1✔
387
    if (study == null) {
1✔
388
      throw new NotFoundException("Study not found");
×
389
    }
390
    boolean propPresent = study.getProperties().stream()
1✔
391
        .anyMatch(prop -> prop.getKey().equals(dataCustodianEmail));
1✔
392
    if (propPresent) {
1✔
393
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(),
1✔
394
          custodians);
395
    } else {
396
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(),
1✔
397
          custodians);
398
    }
399
    List<Dataset> datasets = datasetDAO.findDatasetsByIdList(study.getDatasetIds());
1✔
400
    datasets.forEach(dataset -> elasticSearchService.synchronizeDatasetInESIndex(dataset, user, false));
1✔
401
    return studyDAO.findStudyById(studyId);
1✔
402
  }
403

404
  /**
405
   * Ensure that all requested datasetIds exist in the user's list of accepted DAAs
406
   * @param user The requesting User
407
   * @param datasetIds The list of dataset ids the user is requesting access to
408
   */
409
  public void enforceDAARestrictions(User user, List<Integer> datasetIds) {
410
    List<Integer> userDaaDatasetIds = daaDAO.findDaaDatasetIdsByUserId(user.getUserId());
1✔
411
    boolean containsAll = new HashSet<>(userDaaDatasetIds).containsAll(datasetIds);
1✔
412
    if (!containsAll) {
1✔
413
      throw new BadRequestException("User does not have appropriate Data Access Agreements for provided datasets");
1✔
414
    }
415
  }
1✔
416

417
  /**
418
   * This method is used to synchronize a new dataset property with values from the study
419
   * conversion
420
   *
421
   * @param dictionaries   List<Dictionary>
422
   * @param dataset        Dataset
423
   * @param dictionaryName Name to look for in dictionaries
424
   * @param schemaProperty Schema Property to look for in properties
425
   * @param propertyType   Property Type of new value
426
   * @param propValue      New property value
427
   */
428
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
429
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
430
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
431
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
432
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
433
        .findFirst();
×
434
    if (maybeProp.isPresent()) {
×
435
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), maybeProp.get().getPropertyKey(),
×
436
          propValue);
437
    } else {
438
      dictionaries.stream()
×
439
          .filter(d -> d.getKey().equals(dictionaryName))
×
440
          .findFirst()
×
441
          .ifPresent(dictionary -> {
×
442
            DatasetProperty prop = new DatasetProperty();
×
443
            prop.setDatasetId(dataset.getDatasetId());
×
444
            prop.setPropertyKey(dictionary.getKeyId());
×
445
            prop.setSchemaProperty(schemaProperty);
×
446
            prop.setPropertyValue(propValue);
×
447
            prop.setPropertyType(propertyType);
×
448
            prop.setCreateDate(new Date());
×
449
            datasetDAO.insertDatasetProperties(List.of(prop));
×
450
          });
×
451
    }
452
  }
×
453

454
  /**
455
   * This method is used to synchronize a legacy dataset property with values from the study
456
   * conversion
457
   *
458
   * @param dictionaries   List<Dictionary>
459
   * @param dataset        Dataset
460
   * @param dictionaryName Name to look for in dictionaries
461
   * @param schemaProperty Schema Property to update if necessary
462
   * @param propertyType   Property Type of new value
463
   * @param propValue      New property value
464
   */
465
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
466
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
467
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
468
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
469
        .findFirst();
×
470
    Optional<Dictionary> dictionary = dictionaries.stream()
×
471
        .filter(d -> d.getKey().equals(dictionaryName))
×
472
        .findFirst();
×
473
    // Legacy property exists, update it.
474
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
475
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), dictionary.get().getKeyId(),
×
476
          propValue);
477
    }
478
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
479
    else if (dictionary.isPresent()) {
×
480
      DatasetProperty prop = new DatasetProperty();
×
481
      prop.setDatasetId(dataset.getDatasetId());
×
482
      prop.setPropertyKey(dictionary.get().getKeyId());
×
483
      prop.setSchemaProperty(schemaProperty);
×
484
      prop.setPropertyValue(propValue);
×
485
      prop.setPropertyType(propertyType);
×
486
      prop.setCreateDate(new Date());
×
487
      datasetDAO.insertDatasetProperties(List.of(prop));
×
488
    }
×
489
    // Neither legacy property nor dictionary term does not exist, log a warning.
490
    else {
491
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
492
    }
493
  }
×
494

495
  private Integer updateStudyFromConversion(User user, Dataset dataset,
496
      StudyConversion studyConversion) {
497
    // Ensure that we are not trying to create a new study with an existing name
498
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
499
    Integer studyId;
500
    Integer userId =
501
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
502
    // Create or update the study:
503
    if (study == null) {
×
504
      study = studyConversion.createNewStudyStub();
×
505
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
506
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
507
          UUID.randomUUID());
×
508
      study.setStudyId(studyId);
×
509
    } else {
510
      studyId = study.getStudyId();
×
511
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
512
          studyConversion.getDescription(), studyConversion.getPiName(),
×
513
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
514
          Instant.now());
×
515
    }
516
    datasetDAO.updateStudyId(dataset.getDatasetId(), studyId);
×
517

518
    // Create or update study properties:
519
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
520
    // If we don't have any props, we need to add all of the new ones
521
    if (existingProps == null || existingProps.isEmpty()) {
×
522
      studyConversion.getStudyProperties().stream()
×
523
          .filter(Objects::nonNull)
×
524
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
525
              p.getValue().toString()));
×
526
    } else {
527
      // Study props to add:
528
      studyConversion.getStudyProperties().stream()
×
529
          .filter(Objects::nonNull)
×
530
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
531
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
532
              p.getValue().toString()));
×
533
      // Study props to update:
534
      studyConversion.getStudyProperties().stream()
×
535
          .filter(Objects::nonNull)
×
536
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
537
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
538
              p.getValue().toString()));
×
539
    }
540
    return studyId;
×
541
  }
542

543
  public void setDatasetBatchSize(Integer datasetBatchSize) {
544
    this.datasetBatchSize = datasetBatchSize;
1✔
545
  }
1✔
546

547
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc