Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include version in the Portal link #111

Merged
merged 3 commits into from
Feb 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion automate/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,9 @@
1. Take a look at `create_new_flow.py` and make and neccessary adjustments to the title and other settings.
2. Run `python create_new_flow.py` to create a new flow.
3. Note the flow id and scope that is printed to the console.
4. Update the `FLOW_ID` and `FLOW_SCOPE` variables in `lambda_environment_vars.tf` in the infra directory with the values from the previous step.
4. Update the `FLOW_ID` and `FLOW_SCOPE` variables in `lambda_environment_vars.tf` in the infra directory with the values from the previous step.

## Deploying a Flow from the Command Line
1. Cd into the `automate` directory.
2. `export $(cat ../secrets.env ) PYTHONPATH=../aws && python deploy_mdf_flow.py dev 1.0.0-rc.10`
3. The first argument is the environment and the second is the version of the flow to deploy.
2 changes: 1 addition & 1 deletion aws/automate_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def submit(self, mdf_rec, organization,
assert destination_parsed.scheme == 'globus'

automate_rec = {
"mdf_portal_link": self.portal_url+mdf_rec["mdf"]["source_id"],
"mdf_portal_link": self.portal_url+mdf_rec["mdf"]["versioned_source_id"],
"user_transfer_inputs": self.create_transfer_items(
data_sources=data_sources,
organization=organization,
Expand Down
18 changes: 15 additions & 3 deletions aws/submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from automate_manager import AutomateManager
from dynamo_manager import DynamoManager
from organization import Organization
from organization import Organization, OrganizationException
from source_id_manager import SourceIDManager
from utils import get_secret

Expand Down Expand Up @@ -115,13 +115,25 @@ def lambda_handler(event, context):
print("+++Metadata+++", metadata)

org_cannonical_name = metadata.get("mdf", {}).get("organization", "MDF Open")

# MDF Connect Client needs to only allow one organization. Til then, we just
# take the first one
if type(org_cannonical_name) == list:
org_cannonical_name = org_cannonical_name[0]

organization = Organization.from_schema_repo(org_cannonical_name)
print("######", organization)
try:
organization = Organization.from_schema_repo(org_cannonical_name)
print("######", organization)
except OrganizationException as e:
return {
'statusCode': 400,
'body': json.dumps(
{
"success": False,
"error": f"Organization: {org_cannonical_name} not found"
})
}


# Validate input JSON
# resourceType is always going to be Dataset, don't require from user
Expand Down
5 changes: 4 additions & 1 deletion aws/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,10 @@ def mdf_rec():
},
],
},
"mdf": {"source_id": "123-456-7890", "version": "1.0.1"},
"mdf": {"source_id": "123-456-7890",
"version": "1.0.1",
"versioned_source_id": "123-456-7890-1.0.1"
}
}


Expand Down
7 changes: 7 additions & 0 deletions aws/tests/submit_dataset.feature
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,10 @@ Feature: Submit Dataset
And an automate flow started with a true mint DOI flag
And the data destination should be the Petrel MDF directory
And I should receive a success result with the generated uuid and version 1.0


Scenario: Submit Dataset with invalid organization
Given I'm authenticated with MDF
And I have a new MDF dataset to submit for an organization that does not exist
When I submit the dataset
Then I should receive a failure result
25 changes: 24 additions & 1 deletion aws/tests/test_automate_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def test_update_metadata_only(self, mock_automate, secrets, organization, mocker


@mock.patch('automate_manager.GlobusAutomateFlow', autospec=True)
def test_mint_doi(self, mock_automate, secrets, organization_mint_doi, mocker, mdf_rec):
def test_mint_doi(self, mock_automate, secrets, organization_mint_doi, mocker, mdf_rec, set_environ):
mock_flow = mocker.Mock()
mock_automate.from_existing_flow = mocker.Mock(return_value=mock_flow)
os.environ['PORTAL_URL'] = "https://acdc.alcf.anl.gov/mdf/detail/"
Expand Down Expand Up @@ -228,3 +228,26 @@ def test_mint_doi(self, mock_automate, secrets, organization_mint_doi, mocker, m
mock_flow.run_flow.assert_called()
assert(mock_flow.run_flow.call_args[0][0]['mint_doi'])

@mock.patch('automate_manager.GlobusAutomateFlow', autospec=True)
def test_mdf_portal_link(self, mock_automate, secrets, organization_mint_doi, mocker, mdf_rec, set_environ):
mock_flow = mocker.Mock()
mock_automate.from_existing_flow = mocker.Mock(return_value=mock_flow)
os.environ['PORTAL_URL'] = "https://acdc.alcf.anl.gov/mdf/detail/"
manager = AutomateManager(secrets, is_test=True)

data_sources = [
"https://app.globus.org/file-manager?destination_id=e38ee745-6d04-11e5-ba46-22000b92c6ec&destination_path=%2FMDF%2Fmdf_connect%2Ftest_files%2Fcanonical_datasets%2Fdft%2F"
]
user_token = {'access_token': '1234567890'}
_ = manager.submit(mdf_rec=mdf_rec, organization=organization_mint_doi,
submitting_user_token=user_token,
submitting_user_id="12-33-55", monitor_by_id=["12-33-55",
"5fc63928-3752-11e8-9c6f-0e00fd09bf20"],
submitting_user_email="[email protected]",
search_index_uuid='098-765-4321',
data_sources=data_sources, is_test=False,
update_metadata_only=False)

mock_flow.run_flow.assert_called()
assert(mock_flow.run_flow.call_args[0][0]['mdf_portal_link'] == "https://acdc.alcf.anl.gov/mdf/detail/123-456-7890-1.0.1")

8 changes: 8 additions & 0 deletions aws/tests/test_submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ def test_submit():
def test_update_other_users_record():
pass

@scenario('submit_dataset.feature', 'Submit Dataset with invalid organization')
def test_invalid_organization():
pass


@scenario('submit_dataset.feature', 'Attempt to add a record with an existing source_id')
def test_add_record_with_existing_source_id():
Expand Down Expand Up @@ -105,6 +109,10 @@ def mdf_datset(mdf, mdf_environment, mocker):

return mdf.get_submission()

@given("I have a new MDF dataset to submit for an organization that does not exist", target_fixture='mdf_submission')
def invalid_org(mdf, mdf_environment):
mdf.set_organization("Not A Valid Organization")
return mdf.get_submission()

@given('I have a new MDF dataset to submit with a source_id that already exists',
target_fixture='mdf_submission')
Expand Down
Loading