diff --git a/doc/release-notes/11243-editmetadata-api-extension.md b/doc/release-notes/11243-editmetadata-api-extension.md new file mode 100644 index 00000000000..3666d8bc30a --- /dev/null +++ b/doc/release-notes/11243-editmetadata-api-extension.md @@ -0,0 +1,7 @@ +### Edit Dataset Metadata API extension + +- This endpoint now allows removing fields (by sending empty values), as long as they are not required by the dataset. +- New ``sourceLastUpdateTime`` optional query parameter, which prevents inconsistencies by managing updates that + may occur from other users while a dataset is being edited. + +NOTE: This release note was updated to conform to the refactoring of the validation as part of issue #11392 diff --git a/doc/release-notes/11392-edit-file-metadata-empty-values.md b/doc/release-notes/11392-edit-file-metadata-empty-values.md new file mode 100644 index 00000000000..5839fa100af --- /dev/null +++ b/doc/release-notes/11392-edit-file-metadata-empty-values.md @@ -0,0 +1,7 @@ +### Edit File Metadata empty values should clear data + +Previously the API POST /files/{id}/metadata would ignore fields with empty values. Now the API updates the fields with the empty values essentially clearing the data. Missing fields will still be ignored. + +An optional query parameter (sourceLastUpdateTime) was added to ensure the metadata update doesn't overwrite stale data. + +See also [the guides](https://dataverse-guide--11359.org.readthedocs.build/en/11359/api/native-api.html#updating-file-metadata), #11392, and #11359. diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 46b4a9e6f00..b1eea7995e6 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -7,6 +7,11 @@ This API changelog is experimental and we would love feedback on its usefulness. :local: :depth: 1 +v6.8 +---- +- For POST /api/files/{id}/metadata passing an empty string ("description":"") or array ("categories":[]) will no longer be ignored. Empty fields will now clear out the values in the file's metadata. To ignore the fields simply do not include them in the JSON string. +- For PUT /api/datasets/{id}/editMetadata the query parameter "sourceInternalVersionNumber" has been removed and replaced with "sourceLastUpdateTime" to verify that the data being edited hasn't been modified and isn't stale. + v6.7 ---- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 293fc94638d..221dd2b72e4 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2156,26 +2156,26 @@ For these edits your JSON file need only include those dataset fields which you This endpoint also allows removing fields, as long as they are not required by the dataset. To remove a field, send an empty value (``""``) for individual fields. For multiple fields, send an empty array (``[]``). A sample JSON file for removing fields may be downloaded here: :download:`dataset-edit-metadata-delete-fields-sample.json <../_static/api/dataset-edit-metadata-delete-fields-sample.json>` -If another user updates the dataset version metadata before you send the update request, data inconsistencies may occur. To prevent this, you can use the optional ``sourceInternalVersionNumber`` query parameter. This parameter must include the internal version number corresponding to the dataset version being updated. Note that internal version numbers increase sequentially with each version update. +If another user updates the dataset version metadata before you send the update request, metadata inconsistencies may occur. To prevent this, you can use the optional ``sourceLastUpdateTime`` query parameter. This parameter must include the ``lastUpdateTime`` corresponding to the dataset version being updated. The date must be in the format ``yyyy-MM-dd'T'HH:mm:ss'Z'``. -If this parameter is provided, the update will proceed only if the internal version number remains unchanged. Otherwise, the request will fail with an error. +If this parameter is provided, the update will proceed only if the ``lastUpdateTime`` remains unchanged (meaning no one has updated the dataset metadata since you retrieved it). Otherwise, the request will fail with an error. -Example using ``sourceInternalVersionNumber``: +Example using ``sourceLastUpdateTime``: .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z - export SOURCE_INTERNAL_VERSION_NUMBER=5 + export SOURCE_LAST_UPDATE_TIME=2025-04-25T13:58:28Z - curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true&sourceInternalVersionNumber=$SOURCE_INTERNAL_VERSION_NUMBER" --upload-file dataset-update-metadata.json + curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true&sourceLastUpdateTime=SOURCE_LAST_UPDATE_TIME" --upload-file dataset-update-metadata.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true&sourceInternalVersionNumber=5" --upload-file dataset-update-metadata.json + curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true&sourceLastUpdateTime=2025-04-25T13:58:28Z" --upload-file dataset-update-metadata.json Delete Dataset Metadata @@ -4730,6 +4730,8 @@ Updating File Metadata Updates the file metadata for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the new metadata. No metadata from the previous version of this file will be persisted, so if you want to update a specific field first get the json with the above command and alter the fields you want. +An optional parameter, sourceLastUpdateTime=datetime (in format: ``yyyy-MM-dd'T'HH:mm:ss'Z'``), can be used to verify that the file metadata being edited has not been changed since you last retrieved it, thereby avoiding potential lost metadata updates. The value for sourceLastUpdateTime can be taken from ``lastUpdateTime`` in the response to get $SERVER_URL/api/files/$ID API call. + A curl example using an ``ID`` .. code-block:: bash @@ -4750,17 +4752,18 @@ The fully expanded example above (without environment variables) looks like this -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "https://demo.dataverse.org/api/files/24/metadata" -A curl example using a ``PERSISTENT_ID`` +A curl example using a ``PERSISTENT_ID`` and the sourceLastUpdateTime parameter: .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export UPDATE_TIME=2025-04-25T13:58:28Z curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ - "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" + "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID&sourceLastUpdateTime=$UPDATE_TIME" The fully expanded example above (without environment variables) looks like this: @@ -4768,7 +4771,7 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ - "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" + "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000&sourceLastUpdateTime=2025-04-25T13:58:28Z" Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 018657bff4d..76ef91fbd3a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -28,6 +28,7 @@ import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.DateUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParser; @@ -52,6 +53,7 @@ import java.io.InputStream; import java.net.URI; +import java.time.Instant; import java.util.*; import java.util.concurrent.Callable; import java.util.logging.Level; @@ -447,10 +449,22 @@ public Command handleLatestPublished() { return dsv; } - protected void validateInternalVersionNumberIsNotOutdated(Dataset dataset, int internalVersion) throws WrappedResponse { - if (dataset.getLatestVersion().getVersion() > internalVersion) { + protected void validateInternalTimestampIsNotOutdated(DvObject dvObject, String sourceLastUpdateTime) throws WrappedResponse { + Date date = sourceLastUpdateTime != null ? DateUtil.parseDate(sourceLastUpdateTime, "yyyy-MM-dd'T'HH:mm:ss'Z'") : null; + if (date == null) { throw new WrappedResponse( - badRequest(BundleUtil.getStringFromBundle("abstractApiBean.error.datasetInternalVersionNumberIsOutdated", Collections.singletonList(Integer.toString(internalVersion)))) + badRequest(BundleUtil.getStringFromBundle("jsonparser.error.parsing.date", Collections.singletonList(sourceLastUpdateTime))) + ); + } + Instant instant = date.toInstant(); + Instant updateTimestamp = + (dvObject instanceof DataFile) ? ((DataFile) dvObject).getFileMetadata().getDatasetVersion().getLastUpdateTime().toInstant() : + (dvObject instanceof Dataset) ? ((Dataset) dvObject).getLatestVersion().getLastUpdateTime().toInstant() : + instant; + // granularity is to the second since the json output only returns dates in this format to the second + if (updateTimestamp.getEpochSecond() != instant.getEpochSecond()) { + throw new WrappedResponse( + badRequest(BundleUtil.getStringFromBundle("abstractApiBean.error.internalVersionTimestampIsOutdated", Collections.singletonList(sourceLastUpdateTime))) ); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 94f51dd4ccc..dfa704fdd8c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1118,12 +1118,14 @@ private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ @PUT @AuthRequired @Path("{id}/editMetadata") - public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") boolean replaceData, @QueryParam("sourceInternalVersionNumber") Integer sourceInternalVersionNumber) { + public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, + @QueryParam("replace") boolean replaceData, + @QueryParam("sourceLastUpdateTime") String sourceLastUpdateTime) { try { Dataset dataset = findDatasetOrDie(id); - if (sourceInternalVersionNumber != null) { - validateInternalVersionNumberIsNotOutdated(dataset, sourceInternalVersionNumber); + if (sourceLastUpdateTime != null) { + validateInternalTimestampIsNotOutdated(dataset, sourceLastUpdateTime); } JsonObject json = JsonUtil.getJsonObject(jsonBody); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 61a69236f57..5834e7e0008 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -410,8 +410,7 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP @AuthRequired @Path("{id}/metadata") public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDataParam("jsonData") String jsonData, - @PathParam("id") String fileIdOrPersistentId - ) throws DataFileTagException, CommandException { + @PathParam("id") String fileIdOrPersistentId, @QueryParam("sourceLastUpdateTime") String sourceLastUpdateTime) { FileMetadata upFmd = null; @@ -429,6 +428,13 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa return error(BAD_REQUEST, "Error attempting get the requested data file."); } + if (sourceLastUpdateTime != null) { + try { + validateInternalTimestampIsNotOutdated(df, sourceLastUpdateTime); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } //You shouldn't be trying to edit a datafile that has been replaced List result = em.createNamedQuery("DataFile.findDataFileThatReplacedId", Long.class) @@ -519,7 +525,7 @@ public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDa return error(Response.Status.INTERNAL_SERVER_ERROR, "Error adding metadata to DataFile: " + e); } - } catch (WrappedResponse wr) { + } catch (CommandException | WrappedResponse ex) { return error(BAD_REQUEST, "An error has occurred attempting to update the requested DataFile, likely related to permissions."); } diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java index 54844160163..f7df81b6386 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -194,46 +194,28 @@ public boolean getTabIngest() { return this.tabIngest; } - public boolean hasCategories(){ - if ((categories == null)||(this.categories.isEmpty())){ - return false; - } - return true; + public boolean hasCategories() { + return categories != null; } - public boolean hasFileDataTags(){ - if ((dataFileTags == null)||(this.dataFileTags.isEmpty())){ - return false; - } - return true; + public boolean hasFileDataTags() { + return dataFileTags != null; } public boolean hasDescription(){ - if ((description == null)||(this.description.isEmpty())){ - return false; - } - return true; + return description != null; } - public boolean hasDirectoryLabel(){ - if ((directoryLabel == null)||(this.directoryLabel.isEmpty())){ - return false; - } - return true; + public boolean hasDirectoryLabel() { + return directoryLabel != null; } - public boolean hasLabel(){ - if ((label == null)||(this.label.isEmpty())){ - return false; - } - return true; + public boolean hasLabel() { + return label != null; } - public boolean hasProvFreeform(){ - if ((provFreeForm == null)||(this.provFreeForm.isEmpty())){ - return false; - } - return true; + public boolean hasProvFreeform() { + return provFreeForm != null; } public boolean hasStorageIdentifier() { @@ -245,7 +227,7 @@ public String getStorageIdentifier() { } public boolean hasFileName() { - return ((fileName!=null)&&(!fileName.isEmpty())); + return fileName != null; } public String getFileName() { @@ -253,7 +235,7 @@ public String getFileName() { } public boolean hasMimetype() { - return ((mimeType!=null)&&(!mimeType.isEmpty())); + return mimeType != null; } public String getMimeType() { @@ -266,7 +248,7 @@ public void setCheckSum(String checkSum, ChecksumType type) { } public boolean hasCheckSum() { - return ((checkSumValue!=null)&&(!checkSumValue.isEmpty())); + return checkSumValue != null; } public String getCheckSum() { @@ -294,15 +276,10 @@ public void setFileSize(long fileSize) { * @param tags */ public void setCategories(List newCategories) { - if (newCategories != null) { newCategories = Util.removeDuplicatesNullsEmptyStrings(newCategories); - if (newCategories.isEmpty()) { - newCategories = null; - } + this.categories = newCategories; } - - this.categories = newCategories; } /** @@ -495,27 +472,20 @@ private void addFileDataTags(List potentialTags) throws DataFileTagExcep } potentialTags = Util.removeDuplicatesNullsEmptyStrings(potentialTags); - - if (potentialTags.isEmpty()){ - return; - } - + // Make a new list - this.dataFileTags = new ArrayList<>(); + List newList = new ArrayList<>(); // Add valid potential tags to the list for (String tagToCheck : potentialTags){ if (DataFileTag.isDataFileTag(tagToCheck)){ - this.dataFileTags.add(tagToCheck); + newList.add(tagToCheck); }else{ String errMsg = BundleUtil.getStringFromBundle("file.addreplace.error.invalid_datafile_tag"); throw new DataFileTagException(errMsg + " [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString()); } } - // Shouldn't happen.... - if (dataFileTags.isEmpty()){ - dataFileTags = null; - } + this.dataFileTags = newList; } private void msg(String s){ diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 23bdd460160..0a0e0bfcc81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -905,7 +905,8 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) - .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); + .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()) + .add("lastUpdateTime", format(fileMetadata.getDatasetVersion().getLastUpdateTime())); Dataset dfOwner = df.getOwner(); if (dfOwner != null) { builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 6560b914f56..fbc8c13a691 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -3220,7 +3220,7 @@ datasetFieldValidator.error.emptyRequiredSingleValueForField=Empty required valu updateDatasetFieldsCommand.api.processDatasetUpdate.parseError=Error parsing dataset update: {0} #AbstractApiBean.java -abstractApiBean.error.datasetInternalVersionNumberIsOutdated=Dataset internal version number {0} is outdated +abstractApiBean.error.internalVersionTimestampIsOutdated=Internal version timestamp {0} is outdated #RoleAssigneeServiceBean.java roleAssigneeServiceBean.error.dataverseRequestCannotBeNull=DataverseRequest cannot be null. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 2decd7b19d7..2ffefe37eb6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -786,25 +786,26 @@ public void testAddUpdateDatasetViaNativeAPI() { """; Response updateMetadataRemoveAlternativeTitles = UtilIT.editVersionMetadataFromJsonStr(datasetPersistentId, jsonString, apiToken); + updateMetadataRemoveAlternativeTitles.prettyPrint(); updateMetadataRemoveAlternativeTitles.then().assertThat() .body("data.metadataBlocks.citation.fields[2].typeName", not(equalTo("alternativeTitle"))) .statusCode(OK.getStatusCode()); - // Test sourceInternalVersionNumber optional query parameter - - Integer internalVersionNumber = updateMetadataRemoveAlternativeTitles.then().extract().path("data.internalVersionNumber"); - assertNotNull(internalVersionNumber); + // Test sourceLastUpdateTime optional query parameter + String sourceLastUpdateTime = updateMetadataRemoveAlternativeTitles.then().extract().path("data.lastUpdateTime"); + assertNotNull(sourceLastUpdateTime); + String oldTimestamp = "2025-04-25T13:58:28Z"; // Case 1 - Pass outdated internal version number - Response updateMetadataWithOutdatedInternalVersionNumber = UtilIT.editVersionMetadataFromJsonStr(datasetPersistentId, jsonString, apiToken, internalVersionNumber - 1); + Response updateMetadataWithOutdatedInternalVersionNumber = UtilIT.editVersionMetadataFromJsonStr(datasetPersistentId, jsonString, apiToken, oldTimestamp); updateMetadataWithOutdatedInternalVersionNumber.then().assertThat() - .body("message", equalTo(BundleUtil.getStringFromBundle("abstractApiBean.error.datasetInternalVersionNumberIsOutdated", Collections.singletonList(Integer.toString(internalVersionNumber - 1))))) + .body("message", equalTo(BundleUtil.getStringFromBundle("abstractApiBean.error.internalVersionTimestampIsOutdated", Collections.singletonList(oldTimestamp)))) .statusCode(BAD_REQUEST.getStatusCode()); // Case 2 - Pass latest internal version number - Response updateMetadataWithLatestInternalVersionNumber = UtilIT.editVersionMetadataFromJsonStr(datasetPersistentId, jsonString, apiToken, internalVersionNumber); + Response updateMetadataWithLatestInternalVersionNumber = UtilIT.editVersionMetadataFromJsonStr(datasetPersistentId, jsonString, apiToken, sourceLastUpdateTime); updateMetadataWithLatestInternalVersionNumber.then().assertThat() .statusCode(OK.getStatusCode()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 8a423f550ff..bbd8a4a4f56 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2,13 +2,14 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; import edu.harvard.iq.dataverse.util.json.JsonUtil; import io.restassured.RestAssured; import io.restassured.response.Response; -import java.util.List; +import java.util.*; import java.util.logging.Logger; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; @@ -33,9 +34,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.text.MessageFormat; -import java.util.Arrays; -import java.util.Collections; -import java.util.Map; import jakarta.json.Json; import jakarta.json.JsonObjectBuilder; @@ -3313,4 +3311,118 @@ public void testUploadFilesWithLimits() throws JsonParseException { .body("message", containsString(BundleUtil.getStringFromBundle("file.dataset.error.set.file.count.limit"))) .statusCode(FORBIDDEN.getStatusCode()); } + + @Test + public void testUpdateWithEmptyFieldsAndVersionCheck() throws InterruptedException { + // Create User, Dataverse, and Dataset + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload a tab file + JsonObjectBuilder json = Json.createObjectBuilder() + .add(OptionalFileParams.DESCRIPTION_ATTR_NAME, "my description") + .add(OptionalFileParams.DIRECTORY_LABEL_ATTR_NAME, "data/subdir1") + .add(OptionalFileParams.PROVENANCE_FREEFORM_ATTR_NAME, "prov Free Form") + .add(OptionalFileParams.CATEGORIES_ATTR_NAME, Json.createArrayBuilder().add("Data")); + String pathToTestFile = "src/test/resources/tab/test.tab"; + Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, json.build(), apiToken); + uploadFile.prettyPrint(); + uploadFile.then().assertThat().statusCode(OK.getStatusCode()); + Long fileId = JsonPath.from(uploadFile.body().asString()).getLong("data.files[0].dataFile.id"); + assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToTestFile); + + // Can't add tags until after the file is ingested and determined to be a tabular file + JsonObjectBuilder updateFileJson = Json.createObjectBuilder() + .add(OptionalFileParams.FILE_DATA_TAGS_ATTR_NAME, Json.createArrayBuilder().add("Survey")); + Response updateFileResponse = UtilIT.updateFileMetadata(String.valueOf(fileId), updateFileJson.build().toString(), apiToken); + updateFileResponse.prettyPrint(); + + // Get and verify the FileData + Response getFile = UtilIT.getFileData(String.valueOf(fileId), apiToken); + getFile.prettyPrint(); + getFile.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.description", equalTo("my description")) + .body("data.dataFile.description", equalTo("my description")) + .body("data.directoryLabel", equalTo("data/subdir1")) + .body("data.categories", hasItem("Data")) + .body("data.dataFile.tabularTags", hasItem("Survey")); + + // Publish the Dataverse and Dataset + Response publishResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishResponse.then().assertThat().statusCode(OK.getStatusCode()); + publishResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get the base version + getFile = UtilIT.getFileData(String.valueOf(fileId), apiToken); + getFile.prettyPrint(); + String lastUpdateTime = String.valueOf(JsonPath.from(getFile.body().asString()).getString("data.dataFile.lastUpdateTime")); + + // first user updates which creates a new DRAFT version + json = Json.createObjectBuilder() + .add(OptionalFileParams.DESCRIPTION_ATTR_NAME, "") + .add(OptionalFileParams.LABEL_ATTR_NAME, "test.tab") + .add(OptionalFileParams.DIRECTORY_LABEL_ATTR_NAME, "") + .add(OptionalFileParams.PROVENANCE_FREEFORM_ATTR_NAME, "") + .add(OptionalFileParams.CATEGORIES_ATTR_NAME, Json.createArrayBuilder()) + .add(OptionalFileParams.FILE_DATA_TAGS_ATTR_NAME, Json.createArrayBuilder()); + Response updateResponse = UtilIT.updateFileMetadata(String.valueOf(fileId), json.build().toString(), apiToken, lastUpdateTime); + updateResponse.prettyPrint(); + updateResponse.then().assertThat().statusCode(OK.getStatusCode()); + Thread.sleep(1500); + + // Get the latest version + getFile = UtilIT.getFileData(String.valueOf(fileId), apiToken); + getFile.prettyPrint(); + getFile.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.description", equalTo("")) + .body("data.dataFile.description", equalTo("")) + .body("data.directoryLabel", nullValue()) + .body("data.provFreeForm", nullValue()) + .body("data.categories", nullValue()) + .body("data.dataFile.tabularTags", nullValue()); + String latestUpdateTime = String.valueOf(JsonPath.from(getFile.body().asString()).getString("data.dataFile.lastUpdateTime")); + assertTrue(!latestUpdateTime.equalsIgnoreCase(lastUpdateTime)); + + // Second user updates the base version which should fail since it's already been updated + json = Json.createObjectBuilder() + .add(OptionalFileParams.DESCRIPTION_ATTR_NAME, "my new description"); + updateResponse = UtilIT.updateFileMetadata(String.valueOf(fileId), json.build().toString(), apiToken, lastUpdateTime); + updateResponse.prettyPrint(); + updateResponse.then().assertThat() + .body("status", equalTo(ApiConstants.STATUS_ERROR)) + .body("message", equalTo(BundleUtil.getStringFromBundle("abstractApiBean.error.internalVersionTimestampIsOutdated",Collections.singletonList(lastUpdateTime)))) + .statusCode(BAD_REQUEST.getStatusCode()); + + // Second user refreshes and updates. Should pass now + getFile = UtilIT.getFileData(String.valueOf(fileId), apiToken); + getFile.prettyPrint(); + getFile.then().assertThat() + .statusCode(OK.getStatusCode()); + lastUpdateTime = String.valueOf(JsonPath.from(getFile.body().asString()).getString("data.dataFile.lastUpdateTime")); + updateResponse = UtilIT.updateFileMetadata(String.valueOf(fileId), json.build().toString(), apiToken, lastUpdateTime); + updateResponse.prettyPrint(); + updateResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Test invalid date + updateResponse = UtilIT.updateFileMetadata(String.valueOf(fileId), json.build().toString(), apiToken, "bad-date"); + updateResponse.prettyPrint(); + updateResponse.then().assertThat() + .body("status", equalTo(ApiConstants.STATUS_ERROR)) + .body("message", equalTo(BundleUtil.getStringFromBundle("jsonparser.error.parsing.date",Collections.singletonList("bad-date")))) + .statusCode(BAD_REQUEST.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 9bceb3919f8..2a6c91b89c5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -807,7 +807,7 @@ static Response editVersionMetadataFromJsonStr(String persistentId, String jsonS return editVersionMetadataFromJsonStr(persistentId, jsonString, apiToken, null); } - static Response editVersionMetadataFromJsonStr(String persistentId, String jsonString, String apiToken, Integer sourceInternalVersionNumber) { + static Response editVersionMetadataFromJsonStr(String persistentId, String jsonString, String apiToken, String sourceLastUpdateTime) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .body(jsonString) @@ -815,7 +815,7 @@ static Response editVersionMetadataFromJsonStr(String persistentId, String jsonS .put("/api/datasets/:persistentId/editMetadata/?persistentId=" + persistentId + "&replace=true" - + (sourceInternalVersionNumber != null ? "&sourceInternalVersionNumber=" + sourceInternalVersionNumber : "")); + + (sourceLastUpdateTime != null ? "&sourceLastUpdateTime=" + sourceLastUpdateTime : "")); } static Response updateDatasetPIDMetadata(String persistentId, String apiToken) { @@ -1124,14 +1124,20 @@ static Response deleteFileApi(Integer fileId, String apiToken) { .header(API_TOKEN_HTTP_HEADER, apiToken) .delete("/api/files/" + fileId); } - + static Response updateFileMetadata(String fileIdOrPersistentId, String jsonAsString, String apiToken) { + return updateFileMetadata(fileIdOrPersistentId, jsonAsString,apiToken, null); + } + static Response updateFileMetadata(String fileIdOrPersistentId, String jsonAsString, String apiToken, String sourceLastUpdateTime) { String idInPath = fileIdOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. if (!NumberUtils.isCreatable(fileIdOrPersistentId)) { idInPath = ":persistentId"; optionalQueryParam = "?persistentId=" + fileIdOrPersistentId; } + if (sourceLastUpdateTime != null) { + optionalQueryParam = optionalQueryParam + (optionalQueryParam.isEmpty() ? "?" : "&") + "sourceLastUpdateTime=" + sourceLastUpdateTime; + } RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken); if (jsonAsString != null) { diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java index c9f251f7e77..cbca33f409d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java @@ -195,8 +195,9 @@ public void test_09_unusedParamsGood() throws DataFileTagException { assertNull(instance.getDescription()); assertFalse(instance.hasDescription()); - assertNull(instance.getCategories()); - assertFalse(instance.hasCategories()); + assertNotNull(instance.getCategories()); + assertTrue(instance.hasCategories()); + assertTrue(instance.getCategories().isEmpty()); assertNull(instance.getDataFileTags()); assertFalse(instance.hasFileDataTags()); @@ -292,4 +293,4 @@ private void msgt(String s){ print json.dumps(json.dumps(d)) -*/ \ No newline at end of file +*/