• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

IQSS / dataverse / #22693

03 Jul 2024 01:09PM CUT coverage: 20.626% (-0.09%) from 20.716%
#22693

push

github

web-flow
Merge pull request #10664 from IQSS/develop

merge develop into master for 6.3

195 of 1852 new or added lines in 82 files covered. (10.53%)

72 existing lines in 33 files now uncovered.

17335 of 84043 relevant lines covered (20.63%)

0.21 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
1
package edu.harvard.iq.dataverse.engine.command.impl;
2

3
import edu.harvard.iq.dataverse.DataFile;
4
import edu.harvard.iq.dataverse.DataFileCategory;
5
import edu.harvard.iq.dataverse.Dataset;
6
import edu.harvard.iq.dataverse.DatasetLock;
7
import edu.harvard.iq.dataverse.DatasetVersion;
8
import edu.harvard.iq.dataverse.DatasetVersionDifference;
9
import edu.harvard.iq.dataverse.FileMetadata;
10
import edu.harvard.iq.dataverse.authorization.Permission;
11
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
12
import edu.harvard.iq.dataverse.engine.command.CommandContext;
13
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
14
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
15
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
16
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
17
import edu.harvard.iq.dataverse.util.DatasetFieldUtil;
18
import edu.harvard.iq.dataverse.util.FileMetadataUtil;
19

20
import java.util.ArrayList;
21
import java.util.List;
22
import java.util.logging.Level;
23
import java.util.logging.Logger;
24

25
import jakarta.validation.ConstraintViolationException;
26

27
/**
28
 *
29
 * @author skraffmiller
30
 */
31
@RequiredPermissions(Permission.EditDataset)
32
public class UpdateDatasetVersionCommand extends AbstractDatasetCommand<Dataset> {
33

34
    static final Logger logger = Logger.getLogger(UpdateDatasetVersionCommand.class.getCanonicalName());
×
35
    private final List<FileMetadata> filesToDelete;
36
    private boolean validateLenient = false;
×
37
    private final DatasetVersion clone;
38
    final FileMetadata fmVarMet;
39
    
40
    public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) {
41
        super(aRequest, theDataset);
×
42
        this.filesToDelete = new ArrayList<>();
×
43
        this.clone = null;
×
44
        this.fmVarMet = null;
×
45
    }    
×
46
    
47
    public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest, List<FileMetadata> filesToDelete) {
48
        super(aRequest, theDataset);
×
49
        this.filesToDelete = filesToDelete;
×
50
        this.clone = null;
×
51
        this.fmVarMet = null;
×
52
    }
×
53
    
54
    public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest, List<FileMetadata> filesToDelete, DatasetVersion clone) {
55
        super(aRequest, theDataset);
×
56
        this.filesToDelete = filesToDelete;
×
57
        this.clone = clone;
×
58
        this.fmVarMet = null;
×
59
    }
×
60
    
61
    public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest, DataFile fileToDelete) {
62
        super(aRequest, theDataset);
×
63
        
64
        // get the latest file metadata for the file; ensuring that it is a draft version
65
        this.filesToDelete = new ArrayList<>();
×
66
        this.clone = null;
×
67
        this.fmVarMet = null;
×
68
        for (FileMetadata fmd : theDataset.getOrCreateEditVersion().getFileMetadatas()) {
×
69
            if (fmd.getDataFile().equals(fileToDelete)) {
×
70
                filesToDelete.add(fmd);
×
71
                break;
×
72
            }
73
        }
×
74
    } 
×
75
    
76
    public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest, DatasetVersion clone) {
77
        super(aRequest, theDataset);
×
78
        this.filesToDelete = new ArrayList<>();
×
79
        this.clone = clone;
×
80
        this.fmVarMet = null;
×
81
    }
×
82

83
    public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest, FileMetadata fm) {
84
        super(aRequest, theDataset);
×
85
        this.filesToDelete = new ArrayList<>();
×
86
        this.clone = null;
×
87
        this.fmVarMet = fm;
×
88
    }
×
89

90
    public boolean isValidateLenient() {
91
        return validateLenient;
×
92
    }
93

94
    public void setValidateLenient(boolean validateLenient) {
95
        this.validateLenient = validateLenient;
×
96
    }
×
97

98
    @Override
99
    public Dataset execute(CommandContext ctxt) throws CommandException {
100
        if ( ! (getUser() instanceof AuthenticatedUser) ) {
×
101
            throw new IllegalCommandException("Only authenticated users can update datasets", this);
×
102
        }
103
        
104
        Dataset theDataset = getDataset();        
×
105
        ctxt.permissions().checkUpdateDatasetVersionLock(theDataset, getRequest(), this);
×
106
        Dataset savedDataset = null;
×
107
        
108
        DatasetVersion persistedVersion = clone;
×
109
        /*
110
         * Unless a pre-change clone has been provided, we need to get it from the db.
111
         * There are two cases: We're updating an existing draft, which has an id, and
112
         * exists in the database We've created a new draft, with null id, and we need
113
         * to get the lastest version in the db
114
         * 
115
         */
116
        if(persistedVersion==null) {
×
117
            Long id = getDataset().getLatestVersion().getId();
×
118
            persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId());
×
119
        }
120
        
121
        //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied.
122
        checkSystemMetadataKeyIfNeeded(getDataset().getOrCreateEditVersion(fmVarMet), persistedVersion);
×
123

NEW
124
        getDataset().getOrCreateEditVersion().setLastUpdateTime(getTimestamp());
×
125

NEW
126
        registerExternalVocabValuesIfAny(ctxt, getDataset().getOrCreateEditVersion(fmVarMet));
×
127

128
        try {
129
            // Invariant: Dataset has no locks preventing the update
130
            String lockInfoMessage = "saving current edits";
×
131
            DatasetLock lock = ctxt.datasets().addDatasetLock(getDataset().getId(), DatasetLock.Reason.EditInProgress, ((AuthenticatedUser) getUser()).getId(), lockInfoMessage);
×
132
            if (lock != null) {
×
133
                theDataset.addLock(lock);
×
134
            } else {
135
                logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", getDataset().getId());
×
136
            }
137
            
138
            getDataset().getOrCreateEditVersion(fmVarMet).setDatasetFields(getDataset().getOrCreateEditVersion(fmVarMet).initDatasetFields());
×
139
            validateOrDie(getDataset().getOrCreateEditVersion(fmVarMet), isValidateLenient());
×
140

141
            final DatasetVersion editVersion = getDataset().getOrCreateEditVersion(fmVarMet);
×
142

143
            DatasetFieldUtil.tidyUpFields(editVersion.getDatasetFields(), true);
×
144

145
            // Merge the new version into out JPA context, if needed.
146
            if (editVersion.getId() == null || editVersion.getId() == 0L) {
×
147
                ctxt.em().persist(editVersion);
×
148
            } else {
149
                    try {
150
                            ctxt.em().merge(editVersion);
×
151
                    } catch (ConstraintViolationException e) {
×
152
                            logger.log(Level.SEVERE,"Exception: ");
×
153
                            e.getConstraintViolations().forEach(err->logger.log(Level.SEVERE,err.toString()));
×
154
                            throw e;
×
155
                    }
×
156
            }
157

158
            for (DataFile dataFile : theDataset.getFiles()) {
×
159
                if (dataFile.getCreateDate() == null) {
×
160
                    dataFile.setCreateDate(getTimestamp());
×
161
                    dataFile.setCreator((AuthenticatedUser) getUser());
×
162
                }
163
                dataFile.setModificationTime(getTimestamp());
×
164
            }
×
165

166
            // Remove / delete any files that were removed
167

168
            // If any of the files that we are deleting has a UNF, we will need to
169
            // re-calculate the UNF of the version - since that is the product
170
            // of the UNFs of the individual files.
171
            boolean recalculateUNF = false;
×
172
            /*
173
             * The separate loop is just to make sure that the dataset database is updated,
174
             * specifically when an image datafile is being deleted, which is being used as
175
             * the dataset thumbnail as part of a batch delete. if we don't remove the
176
             * thumbnail association with the dataset before the actual deletion of the
177
             * file, it might throw foreign key integration violation exceptions.
178
             */
179
            for (FileMetadata fmd : filesToDelete) {
×
180
                // check if this file is being used as the default thumbnail
181
                if (fmd.getDataFile().equals(theDataset.getThumbnailFile())) {
×
182
                    logger.fine("deleting the dataset thumbnail designation");
×
183
                    theDataset.setThumbnailFile(null);
×
184
                }
185

186
                if (fmd.getDataFile().getUnf() != null) {
×
187
                    recalculateUNF = true;
×
188
                }
189
            }
×
190
            // we have to merge to update the database but not flush because
191
            // we don't want to create two draft versions!
192
            // Although not completely tested, it looks like this merge handles the
193
            // thumbnail case - if the filemetadata is removed from the context below and
194
            // the dataset still references it, that could cause an issue. Merging here
195
            // avoids any reference from it being the dataset thumbnail
196
            theDataset = ctxt.em().merge(theDataset);
×
197

198
            /*
199
             * This code has to handle many cases, and anyone making changes should
200
             * carefully check tests and basic methods that update the dataset version. The
201
             * differences between the cases stem primarily from differences in whether the
202
             * files to add, and their filemetadata, and files to delete, and their
203
             * filemetadata have been persisted at this point, which manifests itself as to
204
             * whether they have id numbers or not, and apparently, whether or not they
205
             * exists in lists, e.g. the getFileMetadatas() list of a datafile.
206
             *
207
             * To handle this, the code is carefully checking to make sure that deletions
208
             * are deleting the right things and not, for example, doing a remove(fmd) when
209
             * the fmd.getId() is null, which just removes the first element found.
210
             */
211
            for (FileMetadata fmd : filesToDelete) {
×
212
                logger.fine("Deleting fmd: " + fmd.getId() + " for file: " + fmd.getDataFile().getId());
×
213
                // if file is draft (ie. new to this version), delete it. Otherwise just remove
214
                // filemetadata object)
215
                // There are a few cases to handle:
216
                // * the fmd has an id (has been persisted) and is the one in the current
217
                // (draft) version
218
                // * the fmd has an id (has been persisted) but it is from a published version
219
                // so we need the corresponding one from the draft version (i.e. created during
220
                // a getEditVersion call)
221
                // * the fmd has no id (hasn't been persisted) so we have to use non-id based
222
                // means to identify it and remove it from lists
223

224
                if (fmd.getId() != null) {
×
225
                    // If the datasetversion doesn't match, we have the fmd from a published version
226
                    // and we need to remove the one for the newly created draft instead, so we find
227
                    // it here
228
                    logger.fine("Edit ver: " + theDataset.getOrCreateEditVersion().getId());
×
229
                    logger.fine("fmd ver: " + fmd.getDatasetVersion().getId());
×
230
                    if (!theDataset.getOrCreateEditVersion().equals(fmd.getDatasetVersion())) {
×
231
                        fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getOrCreateEditVersion());
×
232
                    }
233
                } 
234
                fmd = ctxt.em().merge(fmd);
×
235

236
                // There are two datafile cases as well - the file has been released, so we're
237
                // just removing it from the current draft version or it is only in the draft
238
                // version and we completely remove the file.
239
                if (!fmd.getDataFile().isReleased()) {
×
240
                    // remove the file
241
                    ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest()));
×
242
                    // and remove the file from the dataset's list
243
                    theDataset.getFiles().remove(fmd.getDataFile());
×
244
                } else {
245
                    // if we aren't removing the file, we need to explicitly remove the fmd from the
246
                    // context and then remove it from the datafile's list
247
                    ctxt.em().remove(fmd);
×
248
                    FileMetadataUtil.removeFileMetadataFromList(fmd.getDataFile().getFileMetadatas(), fmd);
×
249
                }
250
                // In either case, to fully remove the fmd, we have to remove any other possible
251
                // references
252
                // From the datasetversion
253
                FileMetadataUtil.removeFileMetadataFromList(theDataset.getOrCreateEditVersion().getFileMetadatas(), fmd);
×
254
                // and from the list associated with each category
255
                for (DataFileCategory cat : theDataset.getCategories()) {
×
256
                    FileMetadataUtil.removeFileMetadataFromList(cat.getFileMetadatas(), fmd);
×
257
                }
×
258
            }
×
259
            for(FileMetadata fmd: theDataset.getOrCreateEditVersion().getFileMetadatas()) {
×
260
                logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version");    
×
261
            }
×
262
            
263
            if (recalculateUNF) {
×
264
                ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getOrCreateEditVersion());
×
265
            }
266

267
            theDataset.setModificationTime(getTimestamp());
×
268

269
            savedDataset = ctxt.em().merge(theDataset);
×
270
            ctxt.em().flush();
×
271

272
            updateDatasetUser(ctxt);
×
273
            if (clone != null) {
×
274
                DatasetVersionDifference dvd = new DatasetVersionDifference(editVersion, clone);
×
275
                AuthenticatedUser au = (AuthenticatedUser) getUser();
×
276
                ctxt.datasetVersion().writeEditVersionLog(dvd, au);
×
277
            }
278
        } finally {
279
            // We're done making changes - remove the lock...
280
            //Failures above may occur before savedDataset is set, in which case we need to remove the lock on theDataset instead
281
            if(savedDataset!=null) {
×
282
            ctxt.datasets().removeDatasetLocks(savedDataset, DatasetLock.Reason.EditInProgress);
×
283
            } else {
284
                ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.EditInProgress);
×
285
            }
286
        }
287

288
        return savedDataset; 
×
289
    }
290
    
291
    @Override
292
    public boolean onSuccess(CommandContext ctxt, Object r) {
293
        // Async indexing significantly improves performance when updating datasets with thousands of files
294
        // Indexing will be started immediately, unless an index is already busy for the given data
295
        // (it will be scheduled then for later indexing of the newest version).
296
        // See the documentation of asyncIndexDataset method for more details.
297
        ctxt.index().asyncIndexDataset((Dataset) r, true);
×
298
        return true;
×
299
    }
300

301
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc