Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove test aspects requiring published datasets #217

Merged
merged 2 commits into from
Mar 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions datalad_dataverse/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,4 @@
dataverse_dataset,
dataverse_demoinstance_url,
dataverse_instance_url,
dataverse_published_collection,
dataverse_publishable_dataset,
)
43 changes: 6 additions & 37 deletions datalad_dataverse/tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,28 +74,9 @@ def dataverse_collection(dataverse_admin_api,

# if all other fixtures and tests have properly cleaned-up after
# themselves we can now simply delete the collection
dataverse_admin_api.delete_dataverse(collection_alias)


@pytest.fixture(autouse=False, scope='session')
def dataverse_published_collection(dataverse_admin_api, dataverse_collection):
# This may not work in all test setups due to lack of permissions or /root
# not being published or it being published already. Try though, since it's
# necessary to publish datasets in order to test against dataverse datasets
# with several versions.
from pyDataverse.exceptions import (
ApiAuthorizationError,
OperationFailedError,
)
try:
dataverse_admin_api.publish_dataverse(dataverse_collection)
except ApiAuthorizationError:
# Test setup doesn't allow for it
pass
except OperationFailedError as e:
print(str(e))

yield dataverse_collection
r = dataverse_admin_api.delete_dataverse(collection_alias)
# make sure cleanup failure does not go unnoticed
r.raise_for_status()


@pytest.fixture(autouse=False, scope='function')
Expand All @@ -106,21 +87,9 @@ def dataverse_dataset(dataverse_admin_api, dataverse_collection):
yield dspid

# cleanup
dataverse_admin_api.destroy_dataset(dspid)


@pytest.fixture(autouse=False, scope='function')
def dataverse_publishable_dataset(dataverse_admin_api,
dataverse_published_collection):
"""Same as `dataverse_dataset` but dataset is part of a published
collection. This is required to be able to publish the dataset."""
dspid = create_test_dataverse_dataset(
dataverse_admin_api, dataverse_published_collection, 'testds')

yield dspid

# cleanup
dataverse_admin_api.destroy_dataset(dspid)
r = dataverse_admin_api.destroy_dataset(dspid)
# make sure cleanup failure does not go unnoticed
r.raise_for_status()


@pytest.fixture(autouse=False, scope='function')
Expand Down
27 changes: 2 additions & 25 deletions datalad_dataverse/tests/test_create_sibling_dataverse.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def test_workflow(dataverse_admin_api,
dataverse_admin_credential_setup,
dataverse_demoinstance_url,
dataverse_instance_url,
dataverse_published_collection,
dataverse_collection,
tmp_path,
*, mode):
path = tmp_path / 'ds'
Expand All @@ -41,7 +41,7 @@ def test_workflow(dataverse_admin_api,
try:
results = ds.create_sibling_dataverse(
url=dataverse_instance_url,
collection=dataverse_published_collection,
collection=dataverse_collection,
name='git_remote',
storage_name='special_remote',
mode=mode,
Expand Down Expand Up @@ -85,29 +85,6 @@ def test_workflow(dataverse_admin_api,
ds.save(message="Move a file")
ds.push(to="git_remote", **ckwa)

# Publish this version (if we can):
# May fail due to same reasons as the publication of the collection in
# `_prep_test`.
# Plus: Somehow this doesn't workout on demo.dataverse.org
# Looks like we can't modify a published dataset there?
# (In local docker setup that automatically creates a new draft
# version)
# However, at least when possible (docker setup with published root
# collection), test some aspect of dealing with this.
if dataverse_instance_url != dataverse_demoinstance_url:
try:
response = dataverse_admin_api.publish_dataset(dspid, release_type='major')
except Exception as e:
# nothing to do - we test what we can test, but print the reason
print(str(e))
published = response is not None and response.status_code == 200
if not published and response is not None:
# Publishing didn't succeed, but gave a json reponse not an
# exception - print in this case, too.
print(f"{response.json()}")
else:
published = False

# Add a file and push again (creating new draft version)
(ds.pathobj / "newfile.txt").write_text("Whatever new content")
ds.save(message="Add a file")
Expand Down
25 changes: 3 additions & 22 deletions datalad_dataverse/tests/test_pydataverse.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def check_upload(api, dsid, fcontent, fpath, src_md5):
def test_file_removal(
tmp_path,
dataverse_admin_api,
dataverse_publishable_dataset,
dataverse_dataset,
):

# the starting point of `dataverse_dataset` is a freshly
Expand All @@ -184,7 +184,7 @@ def test_file_removal(
fpath = tmp_path / 'dummy.txt'
fpath.write_text(fcontent)
response = dataverse_admin_api.upload_datafile(
identifier=dataverse_publishable_dataset,
identifier=dataverse_dataset,
filename=fpath,
)
# worked
Expand All @@ -207,27 +207,8 @@ def test_file_removal(

# Re-upload
response = dataverse_admin_api.upload_datafile(
identifier=dataverse_publishable_dataset,
identifier=dataverse_dataset,
filename=fpath,
)
assert response.status_code == 200, \
f"failed to upload file {response.status_code}: {response.json()}"

fid2 = response.json()['data']['files'][0]['dataFile']['id']

# Publish the dataset
# Note, that "major" release is required. We can't publish a "minor" when
# there's no major yet.
response = dataverse_admin_api.publish_dataset(
dataverse_publishable_dataset, release_type="major")
assert response.status_code == 200, \
f"publishing dataset failed with {response.status_code}: {response.json()}"

# We can't remove a file that is part of a published dataset:
status = delete(
f'{dataverse_admin_api.base_url}/dvn/api/data-deposit/v1.1/swordv2/'
f'edit-media/file/{fid2}',
auth=HTTPBasicAuth(dataverse_admin_api.api_token, ''))

assert status.status_code == 400, \
f"unexpected status on deletion {status.status_code}: {status.json()}"