From f88fb9e763d66e720cee26e35e4a1e6569543a67 Mon Sep 17 00:00:00 2001 From: BryanFauble <17128019+BryanFauble@users.noreply.github.com> Date: Thu, 26 Oct 2023 15:11:00 -0700 Subject: [PATCH] Run cli in it's own test --- .github/workflows/build.yml | 41 +++++++++++++ .../synapseclient/core/test_caching.py | 58 +++++++++++-------- 2 files changed, 75 insertions(+), 24 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 47632d57e..7352ccc36 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -135,6 +135,47 @@ jobs: # use loadscope to avoid issues running tests concurrently that share scoped fixtures pytest -sv tests/integration -n auto --ignore=tests/integration/synapseclient/test_command_line_client.py --dist loadscope fi + - name: run-integration-tests-cli + shell: bash + + # keep versions consistent with the first and last from the strategy matrix + if: ${{ contains(fromJSON('["3.9"]'), matrix.python) }} + run: | + if [ -z "${{ secrets.encrypted_d17283647768_key }}" ] || [ -z "${{ secrets.encrypted_d17283647768_key }}" ]; then + echo "No test configuration decryption keys available, skipping integration tests" + + else + + # decrypt the encrypted test synapse configuration + openssl aes-256-cbc -K ${{ secrets.encrypted_d17283647768_key }} -iv ${{ secrets.encrypted_d17283647768_iv }} -in test.synapseConfig.enc -out test.synapseConfig -d + mv test.synapseConfig ~/.synapseConfig + + if [ "${{ startsWith(matrix.os, 'ubuntu') }}" == "true" ]; then + # on linux only we can build and run a docker container to serve as an SFTP host for our SFTP tests. + # Docker is not available on GH Action runners on Mac and Windows. + + docker build -t sftp_tests - < tests/integration/synapseclient/core/upload/Dockerfile_sftp + docker run -d sftp_tests:latest + + # get the internal IP address of the just launched container + export SFTP_HOST=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $(docker ps -q)) + + printf "[sftp://$SFTP_HOST]\nusername: test\npassword: test\n" >> ~/.synapseConfig + + # add to known_hosts so the ssh connections can be made without any prompting/errors + mkdir -p ~/.ssh + ssh-keyscan -H $SFTP_HOST >> ~/.ssh/known_hosts + fi + + # set env vars used in external bucket tests from secrets + export EXTERNAL_S3_BUCKET_NAME="${{secrets.EXTERNAL_S3_BUCKET_NAME}}" + export EXTERNAL_S3_BUCKET_AWS_ACCESS_KEY_ID="${{secrets.EXTERNAL_S3_BUCKET_AWS_ACCESS_KEY_ID}}" + export EXTERNAL_S3_BUCKET_AWS_SECRET_ACCESS_KEY="${{secrets.EXTERNAL_S3_BUCKET_AWS_SECRET_ACCESS_KEY}}" + + # use loadscope to avoid issues running tests concurrently that share scoped fixtures + pytest -sv tests/integration/synapseclient/test_command_line_client.py -n auto --dist loadscope + fi + # enforce the code matches the Black code style lint: runs-on: ubuntu-latest diff --git a/tests/integration/synapseclient/core/test_caching.py b/tests/integration/synapseclient/core/test_caching.py index 9823fe5b4..7a2619c39 100644 --- a/tests/integration/synapseclient/core/test_caching.py +++ b/tests/integration/synapseclient/core/test_caching.py @@ -14,17 +14,10 @@ import synapseclient.core.utils as utils from synapseclient.core.exceptions import SynapseError, SynapseHTTPError -from synapseclient import File, Project +from synapseclient import File, Project, Synapse, Entity from func_timeout import FunctionTimedOut, func_set_timeout -@pytest.fixture(scope="module") -def project(syn, schedule_for_cleanup): - project = syn.store(Project(name=str(uuid.uuid4()))) - schedule_for_cleanup(project) - return project - - @pytest.fixture(scope="module", autouse=True) def syn_state(syn): syn.test_keepRunning = True @@ -43,18 +36,19 @@ def syn_state(syn): @pytest.mark.flaky(reruns=6) -def test_threaded_access( - syn: synapseclient.Synapse, project: Project, schedule_for_cleanup -): +def test_threaded_access(syn: Synapse, schedule_for_cleanup): + project = syn.store(Project(name=str(uuid.uuid4()))) + schedule_for_cleanup(project) try: execute_test_threaded_access(syn, project, schedule_for_cleanup) except FunctionTimedOut: + syn.test_keepRunning = False syn.logger.warning("test_threaded_access timed out") pytest.fail("test_threaded_access timed out") @func_set_timeout(120) -def execute_test_threaded_access(syn, project, schedule_for_cleanup): +def execute_test_threaded_access(syn: Synapse, project: Project, schedule_for_cleanup): """Starts multiple threads to perform store and get calls randomly.""" # Doesn't this test look like a DOS attack on Synapse? # Maybe it should be called explicity... @@ -108,7 +102,7 @@ def execute_test_threaded_access(syn, project, schedule_for_cleanup): ############# -def wrap_function_as_child_thread(syn, function, *args, **kwargs): +def wrap_function_as_child_thread(syn: Synapse, function, *args, **kwargs): """Wraps the given function so that it ties into the main thread.""" def child_thread(): @@ -122,8 +116,10 @@ def child_thread(): f"Starting thread uuid: {unique_uuid}, function: {str(function)}" ) function(*args, **kwargs, unique_uuid=unique_uuid) - except Exception: - syn.logger.warning(f"Exception in thread uuid: {unique_uuid}") + except Exception as ex: + syn.logger.warning( + f"Exception in thread uuid: {unique_uuid}, exception: {ex}" + ) syn.test_errors.put(traceback.format_exc()) syn.logger.warning(f"Finished thread uuid: {unique_uuid}") @@ -134,7 +130,7 @@ def child_thread(): return child_thread -def collect_errors_and_fail(syn): +def collect_errors_and_fail(syn: Synapse): """Pulls error traces from the error queue and fails if the queue is not empty.""" failures = [] for i in range(syn.test_errors.qsize()): @@ -148,7 +144,9 @@ def collect_errors_and_fail(syn): ###################### -def thread_keep_storing_one_File(syn, project, schedule_for_cleanup, unique_uuid): +def thread_keep_storing_one_File( + syn: Synapse, project: Project, schedule_for_cleanup, unique_uuid: str +): """Makes one file and stores it over and over again.""" # Make a local file to continuously store @@ -162,7 +160,13 @@ def thread_keep_storing_one_File(syn, project, schedule_for_cleanup, unique_uuid syn.logger.warning( f"thread_keep_storing_one_File(): [storing {myPrecious.path}, uuid: {unique_uuid}]" ) - stored = store_catch_412_HTTPError(syn, myPrecious) + try: + stored = store_catch_412_HTTPError(syn, myPrecious) + except FunctionTimedOut: + syn.logger.warning( + f"thread_keep_storing_one_File()::store_catch_412_HTTPError timed out, Path: {myPrecious.path}, uuid: {unique_uuid}" + ) + if stored is not None: myPrecious = stored elif "id" in myPrecious: @@ -179,7 +183,7 @@ def thread_keep_storing_one_File(syn, project, schedule_for_cleanup, unique_uuid ) -def thread_get_files_from_Project(syn, project, unique_uuid): +def thread_get_files_from_Project(syn: Synapse, project: Project, unique_uuid: str): """Continually polls and fetches items from the Project.""" while syn.test_keepRunning: @@ -202,7 +206,7 @@ def thread_get_files_from_Project(syn, project, unique_uuid): def thread_get_and_update_file_from_Project( - syn, project, schedule_for_cleanup, unique_uuid + syn: Synapse, project: Project, schedule_for_cleanup, unique_uuid: str ): """Fetches one item from the Project and updates it with a new file.""" @@ -223,8 +227,13 @@ def thread_get_and_update_file_from_Project( syn.logger.warning( f"thread_get_and_update_file_from_Project(), Updating: [project: {project.id}, entity: {entity.id}, path: {path}, uuid: {unique_uuid}]]" ) - entity.path = path - entity = store_catch_412_HTTPError(syn, entity) + entity.path = path() + try: + entity = store_catch_412_HTTPError(syn, entity) + except FunctionTimedOut: + syn.logger.warning( + f"thread_get_and_update_file_from_Project()::store_catch_412_HTTPError timed out, path: {entity.path}, uuid: {unique_uuid}" + ) if entity is not None: assert os.stat(entity.path) == os.stat(path) @@ -249,12 +258,13 @@ def sleep_for_a_bit() -> int: return time_to_sleep -def get_all_ids_from_Project(syn, project): +def get_all_ids_from_Project(syn: Synapse, project: Project): """Fetches all currently available Synapse IDs from the parent Project.""" return [result["id"] for result in syn.getChildren(project.id)] -def store_catch_412_HTTPError(syn, entity): +@func_set_timeout(20) +def store_catch_412_HTTPError(syn: Synapse, entity: Entity): """Returns the stored Entity if the function succeeds or None if the 412 is caught.""" try: return syn.store(entity)