Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"python.analysis.typeCheckingMode": "basic"
}
5 changes: 5 additions & 0 deletions display.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
dvcli dataset --help
dvcli collection --help
dvcli file --help
dvcli search --help

13 changes: 9 additions & 4 deletions pyDataverse/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,9 @@ def post_request(
**request_params,
)

def put_request(self, url, data=None, auth=DEPRECATION_GUARD, params=None):
def put_request(
self, url, data=None, auth=DEPRECATION_GUARD, params=None, files=None
):
"""Make a PUT request.

Parameters
Expand Down Expand Up @@ -326,7 +328,6 @@ def put_request(self, url, data=None, auth=DEPRECATION_GUARD, params=None):
return self._sync_request(
Copy link

Copilot AI Jun 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The new files parameter is accepted by put_request but not forwarded to the underlying request; include files=files in both _sync_request and _async_request calls to support file uploads via PUT.

Copilot uses AI. Check for mistakes.
method=httpx.put,
url=url,
json=data,
headers=headers,
params=params,
**request_params,
Expand All @@ -335,7 +336,6 @@ def put_request(self, url, data=None, auth=DEPRECATION_GUARD, params=None):
return self._async_request(
method=self.client.put,
url=url,
json=data,
headers=headers,
params=params,
**request_params,
Expand Down Expand Up @@ -1545,7 +1545,12 @@ def edit_dataset_metadata(
self.base_url_api_native, identifier
)
params = {"replace": True} if replace else {}
resp = self.put_request(url, metadata, auth, params)
resp = self.put_request(
url=url,
data=metadata,
auth=auth,
params=params,
)

if resp.status_code == 401:
error_msg = resp.json()["message"]
Expand Down
7 changes: 7 additions & 0 deletions test.py
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess this file should have been omitted?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh no, thanks a lot! Removed it 🥲

Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from pyDataverse.api import NativeApi
from rich import print

api = NativeApi("https://demo.dataverse.org", "6c0e784a-38cc-42a4-8017-7ec09c3785ff")

dataset = api.get_dataset(identifier="doi:10.70122/FK2/BYBCAB")
print(dataset.json())
173 changes: 173 additions & 0 deletions tests/api/test_edit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
import json
import os
from typing import Any
import httpx
from pyDataverse.api import NativeApi


class TestEditDatasetMetadata:
def test_edit_dataset_metadata_replace(self):
"""
Test case for editing a dataset's metadata.

This test case performs the following steps:
1. Creates a dataset using the provided metadata.
2. Edits the dataset metadata and replaces the existing metadata.
3. Asserts that the metadata was edited successfully.

Raises:
AssertionError: If the metadata edit fails.

"""
# Arrange
BASE_URL = os.getenv("BASE_URL").rstrip("/")
API_TOKEN = os.getenv("API_TOKEN")

# Create dataset
metadata = json.load(open("tests/data/file_upload_ds_minimum.json"))
Copy link

Copilot AI Jun 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] Use a context manager when opening files (e.g., with open(...) as f:) to ensure the file is properly closed.

Suggested change
metadata = json.load(open("tests/data/file_upload_ds_minimum.json"))
with open("tests/data/file_upload_ds_minimum.json") as f:
metadata = json.load(f)

Copilot uses AI. Check for mistakes.
pid = self._create_dataset(BASE_URL, API_TOKEN, metadata)
api = NativeApi(BASE_URL, API_TOKEN)

# Prepare file upload
edit_metadata = {
"fields": [
{
"typeName": "title",
"value": "New Title",
},
{
"typeName": "datasetContact",
"value": [
{
"datasetContactEmail": {
"typeName": "datasetContactEmail",
"value": "jane@doe.com",
},
"datasetContactName": {
"typeName": "datasetContactName",
"value": "Jane Doe",
},
}
],
},
]
}

# Act
response = api.edit_dataset_metadata(
identifier=pid,
metadata=edit_metadata,
replace=True,
)

response.raise_for_status()

# Assert
dataset = api.get_dataset(pid).json()
new_title = self._get_field_value(dataset, "citation", "title")
new_contact = self._get_field_value(dataset, "citation", "datasetContact")[0]

assert new_title == "New Title", "Metadata edit failed."
assert (
new_contact["datasetContactEmail"]["value"] == "jane@doe.com"
), "Metadata edit failed."
assert (
new_contact["datasetContactName"]["value"] == "Jane Doe"
), "Metadata edit failed."

def test_edit_dataset_metadata_add(self):
"""
Test case for editing a dataset's metadata.

This test case performs the following steps:
1. Creates a dataset using the provided metadata.
2. Edits the dataset metadata and replaces the existing metadata.
3. Asserts that the metadata was edited successfully.

Raises:
AssertionError: If the metadata edit fails.

"""
# Arrange
BASE_URL = os.getenv("BASE_URL").rstrip("/")
API_TOKEN = os.getenv("API_TOKEN")

# Create dataset
metadata = json.load(open("tests/data/file_upload_ds_minimum.json"))
pid = self._create_dataset(BASE_URL, API_TOKEN, metadata)
api = NativeApi(BASE_URL, API_TOKEN)

# Prepare file upload
edit_metadata = {
"fields": [
{"typeName": "subject", "value": ["Astronomy and Astrophysics"]},
{"typeName": "subtitle", "value": "Subtitle"},
]
}

# Act
response = api.edit_dataset_metadata(
identifier=pid,
metadata=edit_metadata,
)

response.raise_for_status()

# Assert
dataset = api.get_dataset(pid).json()
new_subject = self._get_field_value(dataset, "citation", "subject")
new_subtitle = self._get_field_value(dataset, "citation", "subtitle")

assert "Astronomy and Astrophysics" in new_subject, "Metadata edit failed."
assert new_subtitle == "Subtitle", "Metadata edit failed."

@staticmethod
def _create_dataset(
BASE_URL: str,
API_TOKEN: str,
metadata: dict,
):
"""
Create a dataset in the Dataverse.

Args:
BASE_URL (str): The base URL of the Dataverse instance.
API_TOKEN (str): The API token for authentication.
metadata (dict): The metadata for the dataset.

Returns:
str: The persistent identifier (PID) of the created dataset.
"""
url = f"{BASE_URL}/api/dataverses/root/datasets"
response = httpx.post(
url=url,
json=metadata,
headers={
"X-Dataverse-key": API_TOKEN,
"Content-Type": "application/json",
},
)

response.raise_for_status()

return response.json()["data"]["persistentId"]

@staticmethod
def _get_field_value(data: dict, block: str, field: str) -> Any:
"""
Get the value of a field in a metadata block.
"""

blocks = data["data"]["latestVersion"]["metadataBlocks"]

assert block in blocks, f"Block {block} not found in metadata blocks"

metadata_block = blocks[block]

try:
filtered = next(
filter(lambda f: f["typeName"] == field, metadata_block["fields"])
)
return filtered["value"]
except StopIteration:
raise ValueError(f"Field {field} not found in block {block}")