• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

DataBiosphere / consent / #5580

07 Mar 2025 08:35PM UTC coverage: 79.278% (+0.004%) from 79.274%
#5580

push

web-flow
DT-1284: Improve dataset query performance (#2464)

9 of 10 new or added lines in 4 files covered. (90.0%)

2 existing lines in 2 files now uncovered.

10253 of 12933 relevant lines covered (79.28%)

0.79 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

44.21
/src/main/java/org/broadinstitute/consent/http/service/DatasetService.java
1
package org.broadinstitute.consent.http.service;
2

3
import static org.broadinstitute.consent.http.models.dataset_registration_v1.builder.DatasetRegistrationSchemaV1Builder.dataCustodianEmail;
4

5
import com.google.common.collect.Lists;
6
import com.google.gson.Gson;
7
import com.google.inject.Inject;
8
import jakarta.ws.rs.BadRequestException;
9
import jakarta.ws.rs.NotAuthorizedException;
10
import jakarta.ws.rs.NotFoundException;
11
import jakarta.ws.rs.core.StreamingOutput;
12
import java.io.IOException;
13
import java.time.Instant;
14
import java.util.ArrayList;
15
import java.util.Date;
16
import java.util.HashSet;
17
import java.util.List;
18
import java.util.Objects;
19
import java.util.Optional;
20
import java.util.Set;
21
import java.util.UUID;
22
import org.apache.commons.collections4.CollectionUtils;
23
import org.broadinstitute.consent.http.db.DaaDAO;
24
import org.broadinstitute.consent.http.db.DacDAO;
25
import org.broadinstitute.consent.http.db.DatasetDAO;
26
import org.broadinstitute.consent.http.db.StudyDAO;
27
import org.broadinstitute.consent.http.db.UserDAO;
28
import org.broadinstitute.consent.http.enumeration.DataUseTranslationType;
29
import org.broadinstitute.consent.http.enumeration.PropertyType;
30
import org.broadinstitute.consent.http.enumeration.UserRoles;
31
import org.broadinstitute.consent.http.models.ApprovedDataset;
32
import org.broadinstitute.consent.http.models.Dac;
33
import org.broadinstitute.consent.http.models.DataUse;
34
import org.broadinstitute.consent.http.models.Dataset;
35
import org.broadinstitute.consent.http.models.DatasetProperty;
36
import org.broadinstitute.consent.http.models.DatasetStudySummary;
37
import org.broadinstitute.consent.http.models.DatasetSummary;
38
import org.broadinstitute.consent.http.models.Dictionary;
39
import org.broadinstitute.consent.http.models.Study;
40
import org.broadinstitute.consent.http.models.StudyConversion;
41
import org.broadinstitute.consent.http.models.StudyProperty;
42
import org.broadinstitute.consent.http.models.User;
43
import org.broadinstitute.consent.http.models.dto.DatasetDTO;
44
import org.broadinstitute.consent.http.service.dao.DatasetServiceDAO;
45
import org.broadinstitute.consent.http.util.ConsentLogger;
46
import org.broadinstitute.consent.http.util.gson.GsonUtil;
47

48

49
public class DatasetService implements ConsentLogger {
50

51
  private final DatasetDAO datasetDAO;
52
  private final DaaDAO daaDAO;
53
  private final DacDAO dacDAO;
54
  private final EmailService emailService;
55
  private final OntologyService ontologyService;
56
  private final StudyDAO studyDAO;
57
  private final DatasetServiceDAO datasetServiceDAO;
58
  private final UserDAO userDAO;
59
  public Integer datasetBatchSize = 50;
1✔
60

61
  @Inject
62
  public DatasetService(DatasetDAO dataSetDAO, DaaDAO daaDAO, DacDAO dacDAO, EmailService emailService,
63
      OntologyService ontologyService, StudyDAO studyDAO,
64
      DatasetServiceDAO datasetServiceDAO, UserDAO userDAO) {
1✔
65
    this.datasetDAO = dataSetDAO;
1✔
66
    this.daaDAO = daaDAO;
1✔
67
    this.dacDAO = dacDAO;
1✔
68
    this.emailService = emailService;
1✔
69
    this.ontologyService = ontologyService;
1✔
70
    this.studyDAO = studyDAO;
1✔
71
    this.datasetServiceDAO = datasetServiceDAO;
1✔
72
    this.userDAO = userDAO;
1✔
73
  }
1✔
74

75
  public Set<DatasetDTO> findDatasetsByDacIds(List<Integer> dacIds) {
76
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
77
      throw new BadRequestException("No dataset IDs provided");
1✔
78
    }
79
    return datasetDAO.findDatasetsByDacIds(dacIds);
1✔
80
  }
81

82
  public List<Dataset> findDatasetListByDacIds(List<Integer> dacIds) {
83
    if (CollectionUtils.isEmpty(dacIds)) {
1✔
84
      throw new BadRequestException("No dataset IDs provided");
1✔
85
    }
86
    return datasetDAO.findDatasetListByDacIds(dacIds);
1✔
87
  }
88

89
  /**
90
   * TODO: Refactor this to throw a NotFoundException instead of returning null
91
   * Finds a Dataset by a formatted dataset identifier.
92
   *
93
   * @param datasetIdentifier The formatted identifier, e.g. DUOS-123456
94
   * @return the Dataset with the given identifier, if found.
95
   * @throws IllegalArgumentException if datasetIdentifier is invalid
96
   */
97
  public Dataset findDatasetByIdentifier(String datasetIdentifier) throws IllegalArgumentException {
98
    Integer alias = Dataset.parseIdentifierToAlias(datasetIdentifier);
1✔
99
    Dataset d = datasetDAO.findDatasetByAlias(alias);
1✔
100
    if (d == null) {
1✔
101
      return null;
1✔
102
    }
103

104
    // technically, it is possible to have two dataset identifiers which
105
    // have the same alias but are not the same: e.g., DUOS-5 and DUOS-00005
106
    if (!Objects.equals(d.getDatasetIdentifier(), datasetIdentifier)) {
1✔
107
      return null;
1✔
108
    }
109
    return d;
1✔
110
  }
111

112
  public Dataset getDatasetByName(String name) {
113
    String lowercaseName = name.toLowerCase();
1✔
114
    return datasetDAO.getDatasetByName(lowercaseName);
1✔
115
  }
116

117
  public Set<String> findAllStudyNames() {
118
    return datasetDAO.findAllStudyNames();
1✔
119
  }
120

121
  public List<String> findAllDatasetNames() {
122
    return datasetDAO.findAllDatasetNames();
×
123
  }
124

125
  public Study findStudyById(Integer id) {
126
    return studyDAO.findStudyById(id);
×
127
  }
128

129
  public Dataset findDatasetById(Integer id) {
130
    return datasetDAO.findDatasetById(id);
1✔
131
  }
132

133
  public Dataset updateDatasetDataUse(User user, Integer datasetId, DataUse dataUse) {
134
    Dataset d = datasetDAO.findDatasetById(datasetId);
1✔
135
    if (d == null) {
1✔
136
      throw new NotFoundException("Dataset not found: " + datasetId);
×
137
    }
138
    if (!user.hasUserRole(UserRoles.ADMIN)) {
1✔
139
      throw new IllegalArgumentException("Admin use only");
1✔
140
    }
141
    datasetDAO.updateDatasetDataUse(datasetId, dataUse.toString());
1✔
142
    return datasetDAO.findDatasetById(datasetId);
1✔
143
  }
144

145
  public Dataset syncDatasetDataUseTranslation(Integer datasetId) {
146
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
1✔
147
    if (dataset == null) {
1✔
148
      throw new NotFoundException("Dataset not found");
1✔
149
    }
150

151
    String translation = ontologyService.translateDataUse(dataset.getDataUse(),
1✔
152
        DataUseTranslationType.DATASET);
153
    datasetDAO.updateDatasetTranslatedDataUse(datasetId, translation);
1✔
154

155
    return datasetDAO.findDatasetById(datasetId);
1✔
156
  }
157

158
  public void deleteDataset(Integer datasetId, Integer userId) throws Exception {
159
    Dataset dataset = datasetDAO.findDatasetById(datasetId);
×
160
    if (dataset != null) {
×
161
      datasetServiceDAO.deleteDataset(dataset, userId);
×
162
    }
163
  }
×
164

165
  public void deleteStudy(Study study, User user) throws Exception {
166
    datasetServiceDAO.deleteStudy(study, user);
×
167
  }
×
168

169
  public List<DatasetSummary> searchDatasetSummaries(String query) {
170
    return datasetDAO.findDatasetSummariesByQuery(query);
×
171
  }
172

173
  public List<DatasetStudySummary> findAllDatasetStudySummaries() {
174
    return datasetDAO.findAllDatasetStudySummaries();
×
175
  }
176

177
  public Dataset approveDataset(Dataset dataset, User user, Boolean approval) {
178
    Boolean currentApprovalState = dataset.getDacApproval();
1✔
179
    Integer datasetId = dataset.getDatasetId();
1✔
180
    Dataset datasetReturn = dataset;
1✔
181
    //Only update and fetch the dataset if it hasn't already been approved
182
    //If it has, simply returned the dataset in the argument (which was already queried for in the resource)
183
    if (currentApprovalState == null || !currentApprovalState) {
1✔
184
      datasetDAO.updateDatasetApproval(approval, Instant.now(), user.getUserId(), datasetId);
1✔
185
      datasetReturn = datasetDAO.findDatasetById(datasetId);
1✔
186
    } else {
187
      if (approval == null || !approval) {
1✔
188
        throw new IllegalArgumentException("Dataset is already approved");
1✔
189
      }
190
    }
191

192
    try {
193
      // if approval state changed
194
      if (currentApprovalState != datasetReturn.getDacApproval()) {
1✔
195
        sendDatasetApprovalNotificationEmail(dataset, user, approval);
1✔
196
      }
197
    } catch (Exception e) {
×
198
      logException("Unable to notifier Data Submitter of dataset approval status: %s".formatted(dataset.getDatasetIdentifier()), e);
×
199
    }
1✔
200
    return datasetReturn;
1✔
201
  }
202

203
  private void sendDatasetApprovalNotificationEmail(Dataset dataset, User user, Boolean approval)
204
      throws Exception {
205
    Dac dac = dacDAO.findById(dataset.getDacId());
1✔
206
    if (approval) {
1✔
207
      emailService.sendDatasetApprovedMessage(
1✔
208
          user,
209
          dac.getName(),
1✔
210
          dataset.getDatasetIdentifier());
1✔
211
    } else {
212
      if (dac.getEmail() != null) {
1✔
213
        String dacEmail = dac.getEmail();
1✔
214
        emailService.sendDatasetDeniedMessage(
1✔
215
            user,
216
            dac.getName(),
1✔
217
            dataset.getDatasetIdentifier(),
1✔
218
            dacEmail);
219
      }
1✔
220
      else {
221
        logWarn("Unable to send dataset denied email to DAC: " + dac.getDacId());
1✔
222
      }
223
    }
224

225
  }
1✔
226

227
  public List<Dataset> findDatasetsByIds(List<Integer> datasetIds) {
228
    return datasetDAO.findDatasetsByIdList(datasetIds);
1✔
229
  }
230

231
  public List<Integer> findAllDatasetIds() {
UNCOV
232
    return datasetDAO.findAllDatasetIds();
×
233
  }
234

235
  public StreamingOutput findAllDatasetsAsStreamingOutput() {
236
    List<Integer> datasetIds = datasetDAO.findAllDatasetIds();
1✔
237
    final List<List<Integer>> datasetIdSubLists = Lists.partition(datasetIds, datasetBatchSize);
1✔
238
    final List<Integer> lastSubList = datasetIdSubLists.get(datasetIdSubLists.size() - 1);
1✔
239
    final Integer lastIndex = lastSubList.get(lastSubList.size() - 1);
1✔
240
    Gson gson = GsonUtil.buildGson();
1✔
241
    return output -> {
1✔
242
      output.write("[".getBytes());
1✔
243
      datasetIdSubLists.forEach(subList -> {
1✔
244
        List<Dataset> datasets = findDatasetsByIds(subList);
1✔
245
        datasets.forEach(d -> {
1✔
246
          try {
247
            output.write(gson.toJson(d).getBytes());
1✔
248
            if (!Objects.equals(d.getDatasetId(), lastIndex)) {
1✔
249
              output.write(",".getBytes());
1✔
250
            }
251
            output.write("\n".getBytes());
1✔
252
          } catch (IOException e) {
×
253
            logException("Error writing dataset to streaming output, dataset id: " + d.getDatasetId(), e);
×
254
          }
1✔
255
        });
1✔
256
      });
1✔
257
      output.write("]".getBytes());
1✔
258
    };
1✔
259
  }
260

261
  public Study getStudyWithDatasetsById(Integer studyId) {
262
    try {
263
      Study study = studyDAO.findStudyById(studyId);
×
264
      if (study == null) {
×
265
        throw new NotFoundException("Study not found");
×
266
      }
267
      if (study.getDatasetIds() != null && !study.getDatasetIds().isEmpty()) {
×
268
        List<Dataset> datasets = findDatasetsByIds(new ArrayList<>(study.getDatasetIds()));
×
269
        study.addDatasets(datasets);
×
270
      }
271
      return study;
×
272
    } catch (Exception e) {
×
273
      logException(e);
×
274
      throw e;
×
275
    }
276

277
  }
278

279
  public List<ApprovedDataset> getApprovedDatasets(User user) {
280
    try {
281
      List<ApprovedDataset> approvedDatasets = datasetDAO.getApprovedDatasets(user.getUserId());
1✔
282
      return approvedDatasets;
1✔
283
    } catch (Exception e) {
×
284
      logException(e);
×
285
      throw e;
×
286
    }
287
  }
288

289
  /**
290
   * This method is used to convert a dataset into a study if none exist, or if one does, to update
291
   * the dataset, study, and associated properties with new values. This is an admin function only.
292
   *
293
   * @param dataset         The dataset
294
   * @param studyConversion Study Conversion object
295
   * @return Updated/created study
296
   */
297
  public Study convertDatasetToStudy(User user, Dataset dataset, StudyConversion studyConversion) {
298
    if (!user.hasUserRole(UserRoles.ADMIN)) {
×
299
      throw new NotAuthorizedException("Admin use only");
×
300
    }
301
    // Study updates:
302
    Integer studyId = updateStudyFromConversion(user, dataset, studyConversion);
×
303

304
    // Dataset updates
305
    if (studyConversion.getDacId() != null) {
×
306
      datasetDAO.updateDatasetDacId(dataset.getDatasetId(), studyConversion.getDacId());
×
307
    }
308
    if (studyConversion.getDataUse() != null) {
×
309
      datasetDAO.updateDatasetDataUse(dataset.getDatasetId(),
×
310
          studyConversion.getDataUse().toString());
×
311
    }
312
    if (studyConversion.getDataUse() != null) {
×
313
      String translation = ontologyService.translateDataUse(studyConversion.getDataUse(),
×
314
          DataUseTranslationType.DATASET);
315
      datasetDAO.updateDatasetTranslatedDataUse(dataset.getDatasetId(), translation);
×
316
    }
317
    if (studyConversion.getDatasetName() != null) {
×
318
      datasetDAO.updateDatasetName(dataset.getDatasetId(), studyConversion.getDatasetName());
×
319
    }
320

321
    List<Dictionary> dictionaries = datasetDAO.getDictionaryTerms();
×
322
    // Handle "Phenotype/Indication"
323
    if (studyConversion.getPhenotype() != null) {
×
324
      legacyPropConversion(dictionaries, dataset, "Phenotype/Indication", null, PropertyType.String,
×
325
          studyConversion.getPhenotype());
×
326
    }
327

328
    // Handle "Species"
329
    if (studyConversion.getSpecies() != null) {
×
330
      legacyPropConversion(dictionaries, dataset, "Species", null, PropertyType.String,
×
331
          studyConversion.getSpecies());
×
332
    }
333

334
    if (studyConversion.getNumberOfParticipants() != null) {
×
335
      // Handle "# of participants"
336
      legacyPropConversion(dictionaries, dataset, "# of participants", "numberOfParticipants", PropertyType.Number,
×
337
          studyConversion.getNumberOfParticipants().toString());
×
338
    }
339

340
    // Handle "Data Location"
341
    if (studyConversion.getDataLocation() != null) {
×
342
      newPropConversion(dictionaries, dataset, "Data Location", "dataLocation", PropertyType.String,
×
343
          studyConversion.getDataLocation());
×
344
    }
345

346
    if (studyConversion.getUrl() != null) {
×
347
      // Handle "URL"
348
      newPropConversion(dictionaries, dataset, "URL", "url", PropertyType.String,
×
349
          studyConversion.getUrl());
×
350
    }
351

352
    // Handle "Data Submitter User ID"
353
    if (studyConversion.getDataSubmitterEmail() != null) {
×
354
      User submitter = userDAO.findUserByEmail(studyConversion.getDataSubmitterEmail());
×
355
      if (submitter != null) {
×
356
        datasetDAO.updateDatasetCreateUserId(dataset.getDatasetId(), user.getUserId());
×
357
      }
358
    }
359

360
    return studyDAO.findStudyById(studyId);
×
361
  }
362

363
  public Study updateStudyCustodians(User user, Integer studyId, String custodians) {
364
    logInfo(String.format("User %s is updating custodians for study id: %s; custodians: %s", user.getEmail(), studyId, custodians));
1✔
365
    Study study = studyDAO.findStudyById(studyId);
1✔
366
    if (study == null) {
1✔
367
      throw new NotFoundException("Study not found");
×
368
    }
369
    Optional<StudyProperty> optionalProp = study.getProperties() == null ?
1✔
370
        Optional.empty() :
1✔
371
        study
372
        .getProperties()
1✔
373
        .stream()
1✔
374
        .filter(p -> p.getKey().equals(dataCustodianEmail))
1✔
375
        .findFirst();
1✔
376
    if (optionalProp.isPresent()) {
1✔
377
      studyDAO.updateStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
378
    } else {
379
      studyDAO.insertStudyProperty(studyId, dataCustodianEmail, PropertyType.Json.toString(), custodians);
1✔
380
    }
381
    return studyDAO.findStudyById(studyId);
1✔
382
  }
383

384
  /**
385
   * Ensure that all requested datasetIds exist in the user's list of accepted DAAs
386
   * @param user The requesting User
387
   * @param datasetIds The list of dataset ids the user is requesting access to
388
   */
389
  public void enforceDAARestrictions(User user, List<Integer> datasetIds) {
390
    List<Integer> userDaaDatasetIds = daaDAO.findDaaDatasetIdsByUserId(user.getUserId());
1✔
391
    boolean containsAll = new HashSet<>(userDaaDatasetIds).containsAll(datasetIds);
1✔
392
    if (!containsAll) {
1✔
393
      throw new BadRequestException("User does not have appropriate Data Access Agreements for provided datasets");
1✔
394
    }
395
  }
1✔
396

397
  /**
398
   * This method is used to synchronize a new dataset property with values from the study
399
   * conversion
400
   *
401
   * @param dictionaries   List<Dictionary>
402
   * @param dataset        Dataset
403
   * @param dictionaryName Name to look for in dictionaries
404
   * @param schemaProperty Schema Property to look for in properties
405
   * @param propertyType   Property Type of new value
406
   * @param propValue      New property value
407
   */
408
  private void newPropConversion(List<Dictionary> dictionaries, Dataset dataset,
409
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
410
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
411
        .filter(p -> Objects.nonNull(p.getSchemaProperty()))
×
412
        .filter(p -> p.getSchemaProperty().equals(schemaProperty))
×
413
        .findFirst();
×
414
    if (maybeProp.isPresent()) {
×
415
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), maybeProp.get().getPropertyKey(),
×
416
          propValue);
417
    } else {
418
      dictionaries.stream()
×
419
          .filter(d -> d.getKey().equals(dictionaryName))
×
420
          .findFirst()
×
421
          .ifPresent(dictionary -> {
×
422
            DatasetProperty prop = new DatasetProperty();
×
423
            prop.setDatasetId(dataset.getDatasetId());
×
424
            prop.setPropertyKey(dictionary.getKeyId());
×
425
            prop.setSchemaProperty(schemaProperty);
×
426
            prop.setPropertyValue(propValue);
×
427
            prop.setPropertyType(propertyType);
×
428
            prop.setCreateDate(new Date());
×
429
            datasetDAO.insertDatasetProperties(List.of(prop));
×
430
          });
×
431
    }
432
  }
×
433

434
  /**
435
   * This method is used to synchronize a legacy dataset property with values from the study
436
   * conversion
437
   *
438
   * @param dictionaries   List<Dictionary>
439
   * @param dataset        Dataset
440
   * @param dictionaryName Name to look for in dictionaries
441
   * @param schemaProperty Schema Property to update if necessary
442
   * @param propertyType   Property Type of new value
443
   * @param propValue      New property value
444
   */
445
  private void legacyPropConversion(List<Dictionary> dictionaries, Dataset dataset,
446
      String dictionaryName, String schemaProperty, PropertyType propertyType, String propValue) {
447
    Optional<DatasetProperty> maybeProp = dataset.getProperties().stream()
×
448
        .filter(p -> p.getPropertyName().equals(dictionaryName))
×
449
        .findFirst();
×
450
    Optional<Dictionary> dictionary = dictionaries.stream()
×
451
        .filter(d -> d.getKey().equals(dictionaryName))
×
452
        .findFirst();
×
453
    // Legacy property exists, update it.
454
    if (dictionary.isPresent() && maybeProp.isPresent()) {
×
455
      datasetDAO.updateDatasetProperty(dataset.getDatasetId(), dictionary.get().getKeyId(),
×
456
          propValue);
457
    }
458
    // Legacy property does not exist, but we have a valid dictionary term, so create it.
459
    else if (dictionary.isPresent()) {
×
460
      DatasetProperty prop = new DatasetProperty();
×
461
      prop.setDatasetId(dataset.getDatasetId());
×
462
      prop.setPropertyKey(dictionary.get().getKeyId());
×
463
      prop.setSchemaProperty(schemaProperty);
×
464
      prop.setPropertyValue(propValue);
×
465
      prop.setPropertyType(propertyType);
×
466
      prop.setCreateDate(new Date());
×
467
      datasetDAO.insertDatasetProperties(List.of(prop));
×
468
    }
×
469
    // Neither legacy property nor dictionary term does not exist, log a warning.
470
    else {
471
      logWarn("Unable to find dictionary term: " + dictionaryName);
×
472
    }
473
  }
×
474

475
  private Integer updateStudyFromConversion(User user, Dataset dataset,
476
      StudyConversion studyConversion) {
477
    // Ensure that we are not trying to create a new study with an existing name
478
    Study study = studyDAO.findStudyByName(studyConversion.getName());
×
479
    Integer studyId;
480
    Integer userId =
481
        (dataset.getCreateUserId() != null) ? dataset.getCreateUserId() : user.getUserId();
×
482
    // Create or update the study:
483
    if (study == null) {
×
484
      study = studyConversion.createNewStudyStub();
×
485
      studyId = studyDAO.insertStudy(study.getName(), study.getDescription(), study.getPiName(),
×
486
          study.getDataTypes(), study.getPublicVisibility(), userId, Instant.now(),
×
487
          UUID.randomUUID());
×
488
      study.setStudyId(studyId);
×
489
    } else {
490
      studyId = study.getStudyId();
×
491
      studyDAO.updateStudy(study.getStudyId(), studyConversion.getName(),
×
492
          studyConversion.getDescription(), studyConversion.getPiName(),
×
493
          studyConversion.getDataTypes(), studyConversion.getPublicVisibility(), userId,
×
494
          Instant.now());
×
495
    }
496
    datasetDAO.updateStudyId(dataset.getDatasetId(), studyId);
×
497

498
    // Create or update study properties:
499
    Set<StudyProperty> existingProps = studyDAO.findStudyById(studyId).getProperties();
×
500
    // If we don't have any props, we need to add all of the new ones
501
    if (existingProps == null || existingProps.isEmpty()) {
×
502
      studyConversion.getStudyProperties().stream()
×
503
          .filter(Objects::nonNull)
×
504
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
505
              p.getValue().toString()));
×
506
    } else {
507
      // Study props to add:
508
      studyConversion.getStudyProperties().stream()
×
509
          .filter(Objects::nonNull)
×
510
          .filter(p -> existingProps.stream().noneMatch(ep -> ep.getKey().equals(p.getKey())))
×
511
          .forEach(p -> studyDAO.insertStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
512
              p.getValue().toString()));
×
513
      // Study props to update:
514
      studyConversion.getStudyProperties().stream()
×
515
          .filter(Objects::nonNull)
×
516
          .filter(p -> existingProps.stream().anyMatch(ep -> ep.equals(p)))
×
517
          .forEach(p -> studyDAO.updateStudyProperty(studyId, p.getKey(), p.getType().toString(),
×
518
              p.getValue().toString()));
×
519
    }
520
    return studyId;
×
521
  }
522

523
  public void setDatasetBatchSize(Integer datasetBatchSize) {
524
    this.datasetBatchSize = datasetBatchSize;
1✔
525
  }
1✔
526

527
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc