diff --git a/.github/scripts/s3_client.py b/.github/scripts/s3_client.py index 5fb3106..0b37751 100644 --- a/.github/scripts/s3_client.py +++ b/.github/scripts/s3_client.py @@ -1,14 +1,14 @@ -import os import io -from pathlib import Path +import json +import os from dataclasses import dataclass, field from datetime import timedelta +from pathlib import Path from typing import Iterator -import json -from minio import Minio # type: ignore # import requests # type: ignore from loguru import logger # type: ignore +from minio import Minio # type: ignore @dataclass @@ -42,15 +42,12 @@ def bucket_exists(self, bucket): return self._client.bucket_exists(bucket) def put( - self, - path, - file_object, - length=-1, - content_type="application/octet-stream"): + self, path, file_object, length=-1, content_type="application/octet-stream" + ): # For unknown length (ie without reading file into mem) give `part_size` part_size = 0 if length == -1: - part_size = 10*1024*1024 + part_size = 10 * 1024 * 1024 path = f"{self.prefix}/{path}" self._client.put_object( self.bucket, @@ -62,19 +59,16 @@ def put( ) def get_file_urls( - self, - path="", - exclude_files=("status.json"), - lifetime=timedelta(hours=1), - ) -> list[str]: + self, + path="", + exclude_files=("status.json"), + lifetime=timedelta(hours=1), + ) -> list[str]: """Checks an S3 'folder' for its list of files""" logger.debug("Getting file list using {}, at {}", self, path) path = f"{self.prefix}/{path}" - objects = self._client.list_objects( - self.bucket, - prefix=path, - recursive=True) - file_urls : list[str] = [] + objects = self._client.list_objects(self.bucket, prefix=path, recursive=True) + file_urls: list[str] = [] for obj in objects: if obj.is_dir: continue @@ -92,7 +86,6 @@ def get_file_urls( # Option 2: Work with minio.datatypes.Object directly return file_urls - def ls(self, path, only_folders=False, only_files=False) -> Iterator[str]: """ List folder contents, non-recursive, ala `ls` @@ -101,10 +94,7 @@ def ls(self, path, only_folders=False, only_files=False) -> Iterator[str]: # path = str(Path(self.prefix, path)) path = f"{self.prefix}/{path}" logger.debug("Running ls at path: {}", path) - objects = self._client.list_objects( - self.bucket, - prefix=path, - recursive=False) + objects = self._client.list_objects(self.bucket, prefix=path, recursive=False) for obj in objects: if only_files and obj.is_dir: continue @@ -112,8 +102,7 @@ def ls(self, path, only_folders=False, only_files=False) -> Iterator[str]: continue yield Path(obj.object_name).name - - def load_file(self, path): + def load_file(self, path) -> str: """Load file from S3""" path = f"{self.prefix}/{path}" try: @@ -131,31 +120,31 @@ def load_file(self, path): return content # url = self.client.get_presigned_url( - # "GET", - # self.bucket, - # str(Path(self.prefix, path)), - # expires=timedelta(minutes=10), + # "GET", + # self.bucket, + # str(Path(self.prefix, path)), + # expires=timedelta(minutes=10), # ) # response = requests.get(url) # return response.content - def check_versions(self, model_name: str) -> Iterator[VersionStatus]: + def check_versions(self, resource_path: str) -> Iterator[VersionStatus]: """ Check model repository for version of model-name. Returns dictionary of version-status pairs. """ - logger.debug("Checking versions for {}", model_name) - version_folders = self.ls(f"{model_name}/", only_folders=True) + logger.debug("Checking versions for {}", resource_path) + version_folders = self.ls(f"{resource_path}/", only_folders=True) # For each folder get the contents of status.json for version in version_folders: - yield self.get_version_status(model_name, version) + yield self.get_version_status(resource_path, version) - def get_unpublished_version(self, model_name:str) -> str: + def get_unpublished_version(self, resource_path: str) -> str: """Get the unpublisted version""" - versions = list(self.check_versions(model_name)) + versions = list(self.check_versions(resource_path)) if len(versions) == 0: return "1" unpublished = [version for version in versions if version.status == "staging"] @@ -166,49 +155,51 @@ def get_unpublished_version(self, model_name:str) -> str: raise ValueError("Opps! We seem to have > 1 staging versions!!") return unpublished[0].version - def get_version_status(self, model_name: str, version: str) -> VersionStatus: - status = self.get_status(model_name, version) - status_str = status.get('status', 'status-field-unset') - version_path = f"{model_name}/{version}" + def get_version_status(self, resource_path: str, version: str) -> VersionStatus: + status = self.get_status(resource_path, version) + status_str = status.get("status", "status-field-unset") + version_path = f"{resource_path}/{version}" return VersionStatus(version, status_str, version_path) - def get_status(self, model_name: str, version: str) -> dict: - version_path = f"{model_name}/{version}" - logger.debug("model_name: {}, version: {}", model_name, version) + def get_status(self, resource_path: str, version: str) -> dict: + version_path = f"{resource_path}/{version}" + logger.debug("resource_path: {}, version: {}", resource_path, version) status_path = f"{version_path}/status.json" logger.debug("Getting status using path {}", status_path) status = self.load_file(status_path) status = json.loads(status) return status - def put_status(self, model_name: str, version: str, status: dict): - logger.debug("Updating status for {}-{}, with {}", model_name, version, status) + def put_status(self, resource_path: str, version: str, status: dict): + logger.debug( + "Updating status for {}-{}, with {}", resource_path, version, status + ) contents = json.dumps(status).encode() file_object = io.BytesIO(contents) self.put( - f"{model_name}/{version}/status.json", + f"{resource_path}/{version}/status.json", file_object, length=len(contents), content_type="application/json", ) - def get_log(self, model_name: str, version: str) -> dict: - version_path = f"{model_name}/{version}" - logger.debug("model_name: {}, version: {}", model_name, version) + def get_log(self, resource_path: str, version: str) -> dict: + version_path = f"{resource_path}/{version}" + logger.debug("resource_path: {}, version: {}", resource_path, version) path = f"{version_path}/log.json" logger.debug("Getting log using path {}", path) log = self.load_file(path) log = json.loads(log) return log - def put_log(self, model_name: str, version: str, log: dict): - logger.debug("Updating log for {}-{}, with {}", model_name, version, log) + def put_log(self, resource_path: str, version: str, log: dict): + logger.debug("Updating log for {}-{}, with {}", resource_path, version, log) contents = json.dumps(log).encode() file_object = io.BytesIO(contents) self.put( - f"{model_name}/{version}/log.json", + f"{resource_path}/{version}/log.json", file_object, length=len(contents), content_type="application/json", @@ -239,5 +230,3 @@ def create_client() -> Client: secret_key=secret_access_key, ) return client - - diff --git a/.github/scripts/unzip_model.py b/.github/scripts/unzip_package.py similarity index 66% rename from .github/scripts/unzip_model.py rename to .github/scripts/unzip_package.py index c04a32e..743fcb6 100644 --- a/.github/scripts/unzip_model.py +++ b/.github/scripts/unzip_package.py @@ -1,19 +1,21 @@ import argparse import io import traceback -from typing import Optional import urllib.request import zipfile +from typing import Optional - -from update_status import update_status from s3_client import create_client +from update_status import update_status def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("model_name", help="Model name") - parser.add_argument("model_zip_url", help="Model URL (needs to be publicly accessible or presigned)") + parser.add_argument("resource_path", help="Resource ID") + parser.add_argument( + "package_url", + help="Resource package URL (needs to be publicly accessible or presigned)", + ) return parser @@ -27,22 +29,22 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() - model_name = args.model_name - model_zip_url = args.model_zip_url + resource_path = args.resource_path + package_url = args.package_url try: - unzip_from_url(model_name, model_zip_url) + unzip_from_url(resource_path, package_url) except Exception: err_message = f"An error occurred in the CI:\n {traceback.format_exc()}" print(err_message) - update_status(model_name, {'status' : err_message}) + update_status(resource_path, {"status": err_message}) raise -def unzip_from_url(model_name, model_zip_url): +def unzip_from_url(resource_path, package_url): filename = "model.zip" client = create_client() - versions = client.check_versions(model_name) + versions = client.check_versions(resource_path) if len(versions) == 0: version = "1" @@ -52,22 +54,22 @@ def unzip_from_url(model_name, model_zip_url): raise NotImplementedError("Updating/publishing new version not implemented") # TODO: Need to make sure status is staging - status = client.get_status(model_name, version) + status = client.get_status(resource_path, version) status_str = status.get("status", "missing-status") if status_str != "staging": raise ValueError( - "Model {} at version {} is status: {}", - model_name, version, status_str) + "Model {} at version {} is status: {}", resource_path, version, status_str + ) # Download the model zip file - remotezip = urllib.request.urlopen(model_zip_url) + remotezip = urllib.request.urlopen(package_url) # Unzip the zip file zipinmemory = io.BytesIO(remotezip.read()) zipobj = zipfile.ZipFile(zipinmemory) for filename in zipobj.namelist(): # file_object = io.BytesIO(zipobj) file_object = zipobj.open(filename) - path = f"{model_name}/{version}/{filename}" + path = f"{resource_path}/{version}/files/{filename}" client.put( path, diff --git a/.github/scripts/update_log.py b/.github/scripts/update_log.py index ce307a3..12e95c4 100644 --- a/.github/scripts/update_log.py +++ b/.github/scripts/update_log.py @@ -1,13 +1,14 @@ import argparse -from typing import Optional import datetime -from loguru import logger +from typing import Optional +from loguru import logger from s3_client import create_client + def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("model_name", help="Model name") + parser.add_argument("resource_path", help="Model name") parser.add_argument("category", help="Log category") parser.add_argument("summary", help="Log summary") parser.add_argument("--version", help="Version") @@ -24,31 +25,34 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() - model_name = args.model_name + resource_path = args.resource_path category = args.category summary = args.summary version = args.version - add_log_entry(model_name, category, summary, version=version) + add_log_entry(resource_path, category, summary, version=version) + -def add_log_entry(model_name, category, summary, version=None): +def add_log_entry(resource_path, category, summary, version=None): timenow = datetime.datetime.now().isoformat() client = create_client() - logger.info("Updating log for {} with category {} and summary", - model_name, - category, - summary) + logger.info( + "Updating log for {} with category {} and summary", + resource_path, + category, + summary, + ) if version is None: - version = client.get_unpublished_version(model_name) + version = client.get_unpublished_version(resource_path) logger.info("Version detected: {}", version) else: logger.info("Version requested: {}", version) - log = client.get_log(model_name, version) + log = client.get_log(resource_path, version) if category not in log: log[category] = [] log[category].append({"timestamp": timenow, "log": summary}) - client.put_log(model_name, version, log) + client.put_log(resource_path, version, log) if __name__ == "__main__": diff --git a/.github/scripts/update_status.py b/.github/scripts/update_status.py index 7180f72..ad1c2d8 100644 --- a/.github/scripts/update_status.py +++ b/.github/scripts/update_status.py @@ -1,13 +1,14 @@ import argparse -from typing import Optional import datetime -from loguru import logger +from typing import Optional +from loguru import logger from s3_client import create_client + def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("model_name", help="Model name") + parser.add_argument("resource_path", help="Model name") parser.add_argument("status", help="Status") parser.add_argument("--version", help="Version") parser.add_argument("--step", help="Step", default=0, type=int) @@ -25,29 +26,32 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() - model_name = args.model_name + resource_path = args.resource_path version = args.version step = args.step num_steps = args.num_steps status = args.status - update_status(model_name, status, version=version, step=step, num_steps=num_steps) + update_status(resource_path, status, version=version, step=step, num_steps=num_steps) -def update_status(model_name, status_text, version=None, step=None, num_steps=None): +def update_status(resource_path: str, status_text: str, version: Optional[str] = None, step: Optional[int], num_steps: int = 6): + assert step is None or step <= num_steps timenow = datetime.datetime.now().isoformat() client = create_client() - logger.info("Updating status for {} with text {} [steps={}, num_steps={}]", - model_name, - status_text, - step, - num_steps) + logger.info( + "Updating status for {} with text {} [steps={}, num_steps={}]", + resource_path, + status_text, + step, + num_steps, + ) if version is None: - version = client.get_unpublished_version(model_name) + version = client.get_unpublished_version(resource_path) logger.info("Version detected: {}", version) else: logger.info("Version requested: {}", version) - status = client.get_status(model_name, version) + status = client.get_status(resource_path, version) if "messages" not in status: status["messages"] = [] @@ -57,8 +61,7 @@ def update_status(model_name, status_text, version=None, step=None, num_steps=No status["num_steps"] = num_steps status["last_message"] = status_text status["messages"].append({"timestamp": timenow, "text": status_text}) - client.put_status(model_name, version, status) - + client.put_status(resource_path, version, status) if __name__ == "__main__": diff --git a/.github/scripts/update_status.sh b/.github/scripts/update_status.sh index f6f9355..dcc8505 100644 --- a/.github/scripts/update_status.sh +++ b/.github/scripts/update_status.sh @@ -1,46 +1,46 @@ -#!/bin/sh +#!/bin/sh # Updated to use S3 creds: # S3_HOST # S3_BUCKET # S3_FOLDER # S3_ACCESS_KEY_ID # S3_SECRET_ACCESS_KEY -# First arg is now model_nickname +# First arg is now resource_path FILENAME=status.json -MODEL_NAME=$1 +resource_path=$1 STATUS=$2 -if [ -z "$MODEL_NAME" ]; then - printf '%s\n' "MODEL_NAME is unset or empty" >&2; +if [ -z "$resource_path" ]; then + printf '%s\n' "resource_path is unset or empty" >&2; exit 1 fi -if [ -z "$S3_HOST" ]; then +if [ -z "$S3_HOST" ]; then printf '%s\n' "S3_HOST is unset or empty" >&2; exit 1 fi -if [ -z "$S3_BUCKET" ]; then +if [ -z "$S3_BUCKET" ]; then printf '%s\n' "S3_BUCKET is unset or empty" >&2; exit 1 fi -if [ -z "$S3_FOLDER" ]; then +if [ -z "$S3_FOLDER" ]; then printf '%s\n' "S3_FOLDER is unset or empty" >&2; exit 1 fi -if [ -z "$S3_ACCESS_KEY_ID" ]; then +if [ -z "$S3_ACCESS_KEY_ID" ]; then printf '%s\n' "S3_ACCESS_KEY_ID is unset or empty" >&2; exit 1 fi -if [ -z "$S3_SECRET_ACCESS_KEY" ]; then +if [ -z "$S3_SECRET_ACCESS_KEY" ]; then printf '%s\n' "S3_SECRET_ACCESS_KEY is unset or empty" >&2; exit 1 fi -#curl -X PUT -H 'Content-Type: application/json' -d '{"status": "'"$2"'"}' "$1" +#curl -X PUT -H 'Content-Type: application/json' -d '{"status": "'"$2"'"}' "$1" -RESOURCE="/${S3_BUCKET}/${S3_FOLDER}/${MODEL_NAME}/${FILENAME}" +RESOURCE="/${S3_BUCKET}/${S3_FOLDER}/${resource_path}/${FILENAME}" CONTENT_TYPE="application/json" DATE=`date -R` _SIGNATURE="PUT\n\n${CONTENT_TYPE}\n${DATE}\n${RESOURCE}" diff --git a/.github/scripts/upload_model_to_zenodo.py b/.github/scripts/upload_model_to_zenodo.py index eedede8..6c4ff06 100644 --- a/.github/scripts/upload_model_to_zenodo.py +++ b/.github/scripts/upload_model_to_zenodo.py @@ -1,22 +1,23 @@ import argparse -from io import BytesIO import logging import os +import pprint +from datetime import datetime +from io import BytesIO from pathlib import Path -from urllib.parse import urlparse, urljoin, quote_plus from typing import Optional -from datetime import datetime -import pprint - +from urllib.parse import quote_plus, urljoin, urlparse -from packaging.version import parse as parse_version import requests # type: ignore -from loguru import logger # type: ignore import spdx_license_list # type: ignore -import yaml # type: ignore - -from update_status import update_status +from loguru import logger # type: ignore +from packaging.version import parse as parse_version +from ruamel.yaml import YAML from s3_client import create_client +from update_status import update_status + +yaml = YAML(typ="safe") + spdx_licenses = [item.id for item in spdx_license_list.LICENSES.values()] @@ -24,16 +25,16 @@ 200, # OK Request succeeded. Response included. Usually sent for GET/PUT/PATCH requests. 201, # Created Request succeeded. Response included. Usually sent for POST requests 202, # Accepted Request succeeded. Response included. Usually sent for POST requests, - # where background processing is needed to fulfill the request. + # where background processing is needed to fulfill the request. 204, # No Content Request succeeded. No response included. Usually sent for DELETE requests. ) -ACCESS_TOKEN = os.getenv('ZENODO_API_ACCESS_TOKEN') -S3_HOST = os.getenv('S3_HOST') -S3_ACCESS_KEY = os.getenv('S3_ACCESS_KEY_ID') -S3_SECRET_KEY = os.getenv('S3_SECRET_ACCESS_KEY') -S3_BUCKET = os.getenv('S3_BUCKET') -S3_FOLDER = os.getenv('S3_FOLDER') -ZENODO_URL = os.getenv('ZENODO_URL') +ACCESS_TOKEN = os.getenv("ZENODO_API_ACCESS_TOKEN") +S3_HOST = os.getenv("S3_HOST") +S3_ACCESS_KEY = os.getenv("S3_ACCESS_KEY_ID") +S3_SECRET_KEY = os.getenv("S3_SECRET_ACCESS_KEY") +S3_BUCKET = os.getenv("S3_BUCKET") +S3_FOLDER = os.getenv("S3_FOLDER") +ZENODO_URL = os.getenv("ZENODO_URL") MAX_RDF_VERSION = parse_version("0.5.0") @@ -56,7 +57,7 @@ def assert_good_response(response, message, info=None): def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("--model_name", help="Model name", required=True) + parser.add_argument("--resource_path", help="Model name", required=True) parser.add_argument("--version", help="Version", nargs="?", default=None) return parser @@ -72,40 +73,34 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() headers = {"Content-Type": "application/json"} - params = {'access_token': ACCESS_TOKEN} + params = {"access_token": ACCESS_TOKEN} client = create_client() - # TODO: GET THE CURRENT VERSION if args.version is None: - version = client.get_unpublished_version(args.model_name) - - - s3_path = Path(args.model_name, version) + version = client.get_unpublished_version(args.resource_path) + s3_path = f"{args.resource_path}/{version}/files" # List the files at the model URL file_urls = client.get_file_urls(path=s3_path) - logger.info("Using file URLs:\n{}", '\n'.join((str(obj) for obj in file_urls))) + logger.info("Using file URLs:\n{}", "\n".join((str(obj) for obj in file_urls))) # Create empty deposition response = requests.post( - f'{ZENODO_URL}/api/deposit/depositions', - params=params, - json={}, - headers=headers) + f"{ZENODO_URL}/api/deposit/depositions", params=params, json={}, headers=headers + ) assert_good_response(response, "Failed to create deposition") - # Use the bucket link deposition_info = response.json() bucket_url = deposition_info["links"]["bucket"] rdf_text = client.load_file(Path(s3_path, "rdf.yaml")) - rdf = yaml.safe_load(rdf_text) + rdf = yaml.load(rdf_text) if not isinstance(rdf, dict): - raise Exception('Failed to load rdf.yaml from S3') + raise Exception("Failed to load rdf.yaml from S3") # PUT files to the deposition for file_url in file_urls: @@ -116,9 +111,8 @@ def main(): deposition_id = deposition_info["id"] deposition_doi = deposition_info["metadata"]["prereserve_doi"]["doi"] - docstring = rdf.get("documentation", "") - if (not docstring.startswith("http") and docstring.endswith(".md")): + if not docstring.startswith("http") and docstring.endswith(".md"): # docstring should point to one of the files present... # Get the file URL @@ -128,53 +122,48 @@ def main(): docstring = text # const file = this.zipPackage.files[ - # this.rdf.documentation.replace("./", "") + # this.rdf.documentation.replace("./", "") # ]; # if (file) { - # docstring = await file.async("string"); // get markdown - # docstring = DOMPurify.sanitize(marked(docstring)); + # docstring = await file.async("string"); // get markdown + # docstring = DOMPurify.sanitize(marked(docstring)); # } base_url = f"{ZENODO_URL}/record/{deposition_id}/files/" - metadata = rdf_to_metadata( - rdf, - base_url, - deposition_info, - docstring) - + metadata = rdf_to_metadata(rdf, base_url, deposition_info, docstring) response = requests.put( - f'{ZENODO_URL}/api/deposit/depositions/%s' % deposition_id, - params={'access_token': ACCESS_TOKEN}, - json={'metadata':metadata}, - headers=headers) + f"{ZENODO_URL}/api/deposit/depositions/%s" % deposition_id, + params={"access_token": ACCESS_TOKEN}, + json={"metadata": metadata}, + headers=headers, + ) assert_good_response( - response, - "Failed to put metadata", - info={'metadata':metadata} + response, "Failed to put metadata", info={"metadata": metadata} ) - update_status( - args.model_name, - "Would be publishing now...(but leaving as draft)", - step=None, num_steps=None) + args.resource_path, + "Would be publishing now...(but leaving as draft)", + step=None, + num_steps=None, + ) return - response = requests.post(f'{ZENODO_URL}/api/deposit/depositions/%s/actions/publish' % deposition_id, - params=params) + response = requests.post( + f"{ZENODO_URL}/api/deposit/depositions/%s/actions/publish" % deposition_id, + params=params, + ) assert_good_response(response, "Failed to publish deposition") update_status( - args.model_name, - f"The deposition DOI is {deposition_doi}", - step=None, num_steps=None) - - - - + args.resource_path, + f"The deposition DOI is {deposition_doi}", + step=None, + num_steps=None, + ) def put_file_from_url(file_url: str, destination_url: str, params: dict) -> dict: @@ -188,8 +177,7 @@ def put_file_from_url(file_url: str, destination_url: str, params: dict) -> dict # return put_file(response.raw, filename, destination_url, params) - -def put_file_path(path: str|Path, url: str, params: dict) -> dict: +def put_file_path(path: str | Path, url: str, params: dict) -> dict: """PUT file to url with params, given a file-path""" path = Path(path) filename = path.name @@ -208,34 +196,35 @@ def put_file(file_object, name, url, params): def rdf_authors_to_metadata_creators(rdf): - if 'authors' not in rdf: + if "authors" not in rdf: return [] authors = rdf["authors"] creators = [] for author in authors: - if (isinstance(author, str)): - creator = { 'name': author.split(";")[0], 'affiliation': "" } + if isinstance(author, str): + creator = {"name": author.split(";")[0], "affiliation": ""} else: creator = { - 'name': author['name'].split(";")[0], - 'affiliation': author['affiliation'], + "name": author["name"].split(";")[0], + "affiliation": author["affiliation"], } - if 'orcid' in author: - creator['orcid'] = author['orcid'] + if "orcid" in author: + creator["orcid"] = author["orcid"] creators.append(creator) return creators + def rdf_to_metadata( - rdf:dict, - base_url: str, - deposition_info: dict, - docstring: str, - additional_note="(Uploaded via https://bioimage.io)") -> dict: + rdf: dict, + base_url: str, + deposition_info: dict, + docstring: str, + additional_note="(Uploaded via https://bioimage.io)", +) -> dict: - validate_rdf(rdf) creators = rdf_authors_to_metadata_creators(rdf) - rdf['config']['_deposit'] = deposition_info + rdf["config"]["_deposit"] = deposition_info url = quote_plus(f"{rdf['config']['_deposit']['id']}") docstring_html = "" if docstring: @@ -244,17 +233,17 @@ def rdf_to_metadata( keywords = ["bioimage.io", "bioimage.io:" + rdf["type"]] related_identifiers = generate_related_identifiers_from_rdf(rdf, base_url) metadata = { - 'title': rdf['name'], - 'description': description, - 'access_right': "open", - 'license': rdf['license'], - 'upload_type': "other", - 'creators': creators, - 'publication_date': datetime.now().date().isoformat(), - 'keywords': keywords + rdf['tags'], - 'notes': rdf['description']+ additional_note, - 'related_identifiers': related_identifiers, - 'communities': [], + "title": rdf["name"], + "description": description, + "access_right": "open", + "license": rdf["license"], + "upload_type": "other", + "creators": creators, + "publication_date": datetime.now().date().isoformat(), + "keywords": keywords + rdf["tags"], + "notes": rdf["description"] + additional_note, + "related_identifiers": related_identifiers, + "communities": [], } return metadata @@ -267,24 +256,28 @@ def generate_related_identifiers_from_rdf(rdf, base_url): cover = urljoin(base_url, cover) covers.append(cover) - related_identifiers.append({ - 'relation': "hasPart", # is part of this upload - 'identifier': cover, - 'resource_type': "image-figure", - 'scheme': "url" - }) + related_identifiers.append( + { + "relation": "hasPart", # is part of this upload + "identifier": cover, + "resource_type": "image-figure", + "scheme": "url", + } + ) for link in rdf.get("links", ()): - related_identifiers.append({ - 'identifier': f"https://bioimage.io/#/r/{quote_plus(link)}", - 'relation': "references", # // is referenced by this upload - 'resource_type': "other", - 'scheme': "url" - }) + related_identifiers.append( + { + "identifier": f"https://bioimage.io/#/r/{quote_plus(link)}", + "relation": "references", # // is referenced by this upload + "resource_type": "other", + "scheme": "url", + } + ) # rdf.yaml or model.yaml if rdf["rdf_source"].startswith("http"): - rdf_file= rdf["rdf_source"] + rdf_file = rdf["rdf_source"] else: rdf_file = urljoin(base_url, rdf["rdf_source"]) # When we update an existing deposit, make sure we save the relative link @@ -293,54 +286,30 @@ def generate_related_identifiers_from_rdf(rdf, base_url): rdf_file = rdf_file[-1] rdf_file = urljoin(base_url, rdf_file) - related_identifiers.append({ - 'identifier': rdf_file, - 'relation': "isCompiledBy", # // compiled/created this upload - 'resource_type': "other", - 'scheme': "url", - }) + related_identifiers.append( + { + "identifier": rdf_file, + "relation": "isCompiledBy", # // compiled/created this upload + "resource_type": "other", + "scheme": "url", + } + ) documentation = rdf.get("documentation") if documentation: if not documentation.startswith("http"): documentation = urljoin(base_url, documentation) - related_identifiers.append({ - 'identifier': documentation, - 'relation': "isDocumentedBy", # is referenced by this upload - 'resource_type': "publication-technicalnote", - 'scheme': "url" - }) + related_identifiers.append( + { + "identifier": documentation, + "relation": "isDocumentedBy", # is referenced by this upload + "resource_type": "publication-technicalnote", + "scheme": "url", + } + ) return related_identifiers -def validate_rdf(rdf: dict): - """Unfortunately, probably some duplicate effort here re the spec lib, but for now 🤷""" - - if (rdf['type'] == "model") and (parse_version(rdf['format_version']) > MAX_RDF_VERSION): - raise Exception(f"Unsupported format version {rdf['format_version']} (it must <= {MAX_RDF_VERSION})") - - if rdf['license'] not in spdx_licenses: - raise Exception("Invalid license, the license identifier must be one from the SPDX license list (https://spdx.org/licenses/)") - if 'type' not in rdf: - raise Exception("`type` key is not defined in the RDF.") - - for cover in rdf.get("covers", []): - if "access_token=" in cover: - raise Exception("Cover URL should not contain access token") - - for link in rdf.get("links", ()): - if "access_token=" in link: - raise Exception(f"Link should not contain access token: {link}") - - if "rdf_source" not in rdf: - raise Exception("`rdf_source` key is not found in the RDF") - - if "access_token=" in rdf.get("documentation", ""): - raise Exception("Documentation URL should not contain access token") - - if __name__ == "__main__": main() - - diff --git a/.github/workflows/ci_runner.yaml b/.github/workflows/ci_runner.yaml index 72c8578..9f9b56a 100644 --- a/.github/workflows/ci_runner.yaml +++ b/.github/workflows/ci_runner.yaml @@ -3,12 +3,12 @@ name: CI Runner on: workflow_dispatch: inputs: - model_nickname: - description: 'Nickname of the model - to be used to access the model data on S3' + resource_path: + description: 'Bioimageio wide, version specific resource identifier "resource_path/version"' required: true type: string - model_zip_url: - description: 'Presigned url for the model zip-file' + package_url: + description: 'Presigned url for the package zip-file' required: true type: string @@ -38,19 +38,19 @@ jobs: python -m pip install "minio==7.2.3" "ruamel.yaml==0.18.5" "bioimageio.spec==0.4.9.post5" "typer" - name: Unzip model file run: | - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Unzipping package" "1" "6" - python .github/scripts/unzip_model.py "${{inputs.model_nickname}}" "${{inputs.model_zip_url}}" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Unzip and upload to ${{env.S3_HOST}}/${{env.S3_BUCKET}}/${{env.S3_FOLDER}}/${{inputs.resource_path}}/files" "1" + python .github/scripts/unzip_package.py "${{inputs.resource_path}}" "${{inputs.package_url}}" - name: Validate format id: validate run: | - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Starting validation" "2" "6" - python .github/scripts/validate_format.py "${{ inputs.model_nickname }}" "${{inputs.model_zip_url}}" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Starting validation" "2" + python .github/scripts/validate_format.py "${{ inputs.resource_path }}" "${{env.S3_HOST}}/${{env.S3_BUCKET}}/${{env.S3_FOLDER}}/${{inputs.resource_path}}/files/rdf.yaml" - run: | - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Starting additional tests" "3" "6" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Starting additional tests" "3" if: steps.validate.outputs.has_dynamic_test_cases == 'yes' - run: | - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Validation done" "3" "6" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Validation done" "3" if: steps.validate.outputs.has_dynamic_test_cases == 'no' @@ -80,7 +80,7 @@ jobs: run: pip install typer bioimageio.spec - name: dynamic validation shell: bash -l {0} - run: python scripts/test_dynamically.py "${{inputs.model_zip_url}}" ${{ matrix.weight_format }} --create-env-outcome ${{ steps.create_env.outcome }} --${{ contains(inputs.deploy_to, 'gh-pages') && 'no-ignore' || 'ignore' }}-rdf-source-field-in-validation + run: python scripts/test_dynamically.py "${{env.S3_HOST}}/${{env.S3_BUCKET}}/${{env.S3_FOLDER}}/${{inputs.resource_path}}/files/rdf.yaml" ${{ matrix.weight_format }} --create-env-outcome ${{ steps.create_env.outcome }} --${{ contains(inputs.deploy_to, 'gh-pages') && 'no-ignore' || 'ignore' }}-rdf-source-field-in-validation timeout-minutes: 60 conclude: @@ -90,4 +90,4 @@ jobs: steps: - uses: actions/checkout@v4 - run: | - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Awaiting review" "4" "6" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Awaiting review" "4" diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 2530135..2a7cb9d 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -3,8 +3,8 @@ name: publish on: workflow_dispatch: inputs: - model_nickname: - description: 'Nickname of the model - to be used to access the model data on S3' + resource_path: + description: 'Bioimageio ID of the resource - to be used to access the resource on S3' required: true type: string @@ -33,6 +33,6 @@ jobs: python -m pip install "minio==7.2.3" "loguru==0.7.2" "packaging==23.2" "spdx-license-list==3.22" "ruamel.yaml==0.18.5" "typer" - name: Publish to Zenodo run: | - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Publishing to Zenodo" "5" "6" - python .github/scripts/upload_model_to_zenodo.py --model_name "${{inputs.model_nickname}}" - python .github/scripts/update_status.py "${{ inputs.model_nickname }}" "Publishing complete" "6" "6" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing to Zenodo" "5" + python .github/scripts/upload_model_to_zenodo.py --resource_path "${{inputs.resource_path}}" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing complete" "6" diff --git a/functions/create_status.js b/functions/create_status.js index 96bfc1b..bd8bfc2 100644 --- a/functions/create_status.js +++ b/functions/create_status.js @@ -1,27 +1,27 @@ -import { S3Client, ListBucketsCommand, PutObjectCommand } from "@aws-sdk/client-s3"; -const { S3_ENDPOINT, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY } = process.env; +import { PutObjectCommand, S3Client } from "@aws-sdk/client-s3"; +const { S3_ENDPOINT, S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY } = process.env; export default async (event, context) => { - + console.log(S3_ENDPOINT); console.log(S3_ACCESS_KEY_ID); console.log(S3_SECRET_ACCESS_KEY); console.log(`Connecting to S3: ${S3_ENDPOINT}`); - + const data = await event.json(); console.log(data); - //const options = { - //method: "POST", - //headers: headers, - //body: JSON.stringify({ - //'ref': 'main', - //'inputs':{ - //'status_url': data.status_url, - //'model_nickname': data.model_nickname, - //} - //}) + //const options = { + //method: "POST", + //headers: headers, + //body: JSON.stringify({ + //'ref': 'main', + //'inputs':{ + //'status_url': data.status_url, + //'resource_path': data.resource_path, + //} + //}) //}; //const data = await JSON.parse(event.body) @@ -46,54 +46,54 @@ export default async (event, context) => { const command = new PutObjectCommand({ Bucket: "public-datasets", Key: `${root_folder}/${folder}/${filename}`, - Body: JSON.stringify({"status": "This works too"}), + Body: JSON.stringify({ "status": "This works too" }), }); const resp = await client.send(command); console.log(resp); //const data = await event.json(); - //const options = { - //method: "POST", - //headers: headers, - //body: JSON.stringify({ - //'ref': 'main', - //'inputs':{'status_url': data.status_url} - //}) + //const options = { + //method: "POST", + //headers: headers, + //body: JSON.stringify({ + //'ref': 'main', + //'inputs':{'status_url': data.status_url} + //}) //}; //if(!data.status_url){ - //const res = Response.json({'message': "Failed: status_url not found in request json"}); - //res.status = 500; - //res.headers.set("Access-Control-Allow-Origin", "*"); - //res.headers.append("Access-Control-Allow-Headers", "*"); - //res.headers.append("Access-Control-Allow-Methods", "*"); - //return res; - + //const res = Response.json({'message': "Failed: status_url not found in request json"}); + //res.status = 500; + //res.headers.set("Access-Control-Allow-Origin", "*"); + //res.headers.append("Access-Control-Allow-Headers", "*"); + //res.headers.append("Access-Control-Allow-Methods", "*"); + //return res; + //}else{ - //try{ - //let resp = await fetch(GITHUB_URL, options); - //try{ - //console.log(await resp.json()); - //}catch(err){ - //console.log("No JSON in response"); - //} + //try{ + //let resp = await fetch(GITHUB_URL, options); + //try{ + //console.log(await resp.json()); + //}catch(err){ + //console.log("No JSON in response"); + //} - //}catch(err){ - //console.error("Failed to fetch:"); - //console.error(err); - //const res = Response.json( - //{'message': `Failed: ${err.message}`}, - //{status: 500}); - //res.headers.set("Access-Control-Allow-Origin", "*"); - //res.headers.append("Access-Control-Allow-Headers", "*"); - //res.headers.append("Access-Control-Allow-Methods", "*"); - //return res; - //} - // const res = new Response("Success"); - const res = Response.json({"message":"Success"}); - res.headers.set("Access-Control-Allow-Origin", "*"); - res.headers.append("Access-Control-Allow-Headers", "*"); - res.headers.append("Access-Control-Allow-Methods", "*"); - return res; + //}catch(err){ + //console.error("Failed to fetch:"); + //console.error(err); + //const res = Response.json( + //{'message': `Failed: ${err.message}`}, + //{status: 500}); + //res.headers.set("Access-Control-Allow-Origin", "*"); + //res.headers.append("Access-Control-Allow-Headers", "*"); + //res.headers.append("Access-Control-Allow-Methods", "*"); + //return res; + //} + // const res = new Response("Success"); + const res = Response.json({ "message": "Success" }); + res.headers.set("Access-Control-Allow-Origin", "*"); + res.headers.append("Access-Control-Allow-Headers", "*"); + res.headers.append("Access-Control-Allow-Methods", "*"); + return res; //} } diff --git a/functions/notify_ci.js b/functions/notify_ci.js index 6e1dd41..7f08514 100644 --- a/functions/notify_ci.js +++ b/functions/notify_ci.js @@ -5,26 +5,26 @@ const return_cors_headers = { 'Access-Control-Allow-Methods': 'GET, POST' }; const headers = { - 'Accept': 'application/vnd.github.v3+json', - 'Authorization': `token ${GITHUB_TOKEN}` + 'Accept': 'application/vnd.github.v3+json', + 'Authorization': `token ${GITHUB_TOKEN}` }; - // 'user-agent': 'bioimage-bot' +// 'user-agent': 'bioimage-bot' export default async (event, context) => { const data = await event.json(); - if(!data.model_nickname){ - const error_message = "Failed: model_nickname not found in request json"; + if (!data.resource_path) { + const error_message = "Failed: resource_path not found in request json"; console.error() - const res = Response.json({'message': error_message, 'status': 500}); + const res = Response.json({ 'message': error_message, 'status': 500 }); res.headers.set("Access-Control-Allow-Origin", "*"); res.headers.append("Access-Control-Allow-Headers", "*"); res.headers.append("Access-Control-Allow-Methods", "*"); return res; } - if(!data.model_zip_url){ - const error_message = "Failed: model_zip_url not found in request json"; + if (!data.package_url) { + const error_message = "Failed: package_url not found in request json"; console.error() - const res = Response.json({'message': error_message, 'status': 500}); + const res = Response.json({ 'message': error_message, 'status': 500 }); res.headers.set("Access-Control-Allow-Origin", "*"); res.headers.append("Access-Control-Allow-Headers", "*"); res.headers.append("Access-Control-Allow-Methods", "*"); @@ -32,55 +32,55 @@ export default async (event, context) => { } - const options = { + const options = { method: "POST", - headers: headers, + headers: headers, body: JSON.stringify({ 'ref': GITHUB_BRANCH, - 'inputs':{ - 'model_nickname': data.model_nickname, - 'model_zip_url': data.model_zip_url, + 'inputs': { + 'resource_path': data.resource_path, + 'package_url': data.package_url, } }) }; let resp_obj = {}; - try{ + try { const resp = await fetch(GITHUB_URL, options); - if( resp.status === 204){ + if (resp.status === 204) { // According to API docs, just expect a 204 - resp_obj = {'status': resp.status}; - }else{ + resp_obj = { 'status': resp.status }; + } else { console.error("Bad response from CI: ${resp.status}"); let text = ""; - try{ + try { text = await resp.text() - }catch{ + } catch { text = "(no-text)"; } const res = Response.json( - {'message': `Failed to decode json from CI [status: ${resp.status}, content: ${text}]`}, - {'status': 500}); + { 'message': `Failed to decode json from CI [status: ${resp.status}, content: ${text}]` }, + { 'status': 500 }); res.headers.set("Access-Control-Allow-Origin", "*"); res.headers.append("Access-Control-Allow-Headers", "*"); res.headers.append("Access-Control-Allow-Methods", "*"); return res; } - }catch(err){ + } catch (err) { console.error("Failed to fetch from CI endpoint:"); console.error(GITHUB_URL); console.error(err); const res = Response.json( - {'message': `Failed: ${err.message}`}, - {status: 500}); + { 'message': `Failed: ${err.message}` }, + { status: 500 }); res.headers.set("Access-Control-Allow-Origin", "*"); res.headers.append("Access-Control-Allow-Headers", "*"); res.headers.append("Access-Control-Allow-Methods", "*"); return res; } // const res = new Response("Success"); - const reply_obj = {"message":`Contacted CI: ${resp_obj.message}`, 'status': 200}; + const reply_obj = { "message": `Contacted CI: ${resp_obj.message}`, 'status': 200 }; console.log("Response from CI:"); console.log(resp_obj); const res = Response.json(reply_obj); diff --git a/src/components/Uploader/Review.svelte b/src/components/Uploader/Review.svelte index 12352ec..44e2b5b 100644 --- a/src/components/Uploader/Review.svelte +++ b/src/components/Uploader/Review.svelte @@ -7,12 +7,12 @@ import JSONTree from 'svelte-json-tree'; let model_name_message = ""; - let model_nickname = uploader.model_nickname; + let resource_path = uploader.resource_path; let rdf = uploader.rdf; let ready_to_publish = uploader.ready_to_publish(); const dispatch = createEventDispatcher(); - + function is_done() { dispatch('done'); } @@ -24,29 +24,29 @@ async function regenerate_nickname(){ await uploader.regenerate_nickname(); - model_nickname = uploader.model_nickname; + resource_path = uploader.resource_path; ready_to_publish = uploader.ready_to_publish(); console.log("Ready to publish?", ready_to_publish); rdf = uploader.rdf; //rerender = !rerender; } - if(!model_nickname) regenerate_nickname(); - + if(!resource_path) regenerate_nickname(); + {#if !uploader.logged_in()} - Please login to the BioEngine to complete upload + Please login to the BioEngine to complete upload {/if}

{#if model_name_message }({model_name_message}){/if} - {#if model_nickname} - Your model nickname is: - {model_nickname.name} {model_nickname.icon}  + {#if resource_path} + Your model nickname is: + {resource_path.name} {resource_path.icon}  {/if}

diff --git a/src/components/Uploader/UploadStatus.svelte b/src/components/Uploader/UploadStatus.svelte index a1c06c8..83a0459 100644 --- a/src/components/Uploader/UploadStatus.svelte +++ b/src/components/Uploader/UploadStatus.svelte @@ -11,23 +11,23 @@ let error_element: Object; let last_error_object: Error; let step: UploaderStep = uploader.status.step; - let model_name = uploader.model_nickname.name; + let model_name = uploader.resource_path.name; const dispatch = createEventDispatcher(); - + function copy_error_to_clipboard(text: string){ // Copy the text inside the text field if(navigator){ if(navigator.clipboard){ const error_log = { text: text, - message: last_error_object.message, - stack: last_error_object.stack, + message: last_error_object.message, + stack: last_error_object.stack, name: last_error_object.name, - cause: last_error_object.cause + cause: last_error_object.cause }; navigator.clipboard.writeText(JSON.stringify(error_log)); toast.success("Copied error to clipboard"); - return + return } } console.error("Clipboard unavailable"); @@ -73,23 +73,23 @@ {#if step === UploaderStep.FINISHED } -

Almost there,

+

Almost there,

There's nothing you need to do right now. Your model is uploaded and the CI-bots have started their work!

-

You can check the status of the CI at any point from here

+

You can check the status of the CI at any point from here

-{:else} +{:else} {status_message} {step} {#if step === UploaderStep.UPLOADING } -

Uploading

+

Uploading

{#if upload_value} {upload_value} {:else} Progress {/if} {/if} -{/if} +{/if} diff --git a/src/components/Uploader/index.svelte b/src/components/Uploader/index.svelte index 7a951ab..edf6023 100644 --- a/src/components/Uploader/index.svelte +++ b/src/components/Uploader/index.svelte @@ -23,9 +23,9 @@ } let step = "add"; let rerender = false; - + uploader.add_render_callback(() => { - rerender = !rerender; + rerender = !rerender; }); function reset(){ @@ -70,7 +70,7 @@ {:else if step == "uploading"} {step="add"}} /> - + {:else} Opps! something went wrong 😬 diff --git a/src/lib/uploader.ts b/src/lib/uploader.ts index b1cf68a..d65e0fd 100644 --- a/src/lib/uploader.ts +++ b/src/lib/uploader.ts @@ -1,17 +1,17 @@ -import * as imjoyRPC from 'imjoy-rpc'; import * as imjoyCore from 'imjoy-core'; +import * as imjoyRPC from 'imjoy-rpc'; // import axios from 'axios'; ///dist/browser/axios.cjs'; import { default as axios } from 'axios'; import { FileFromJSZipZipOject, clean_rdf } from "./utils.ts"; //import { fetch_with_progress } from "./utils.ts"; -import {default as JSZip} from "jszip"; import yaml from "js-yaml"; +import { default as JSZip } from "jszip"; -const regex_zip = /\.zip$/gi ; -const regex_rdf = /(rdf\.yml|rdf\.yaml|bioimage\.yml|bioimage\.yaml)$/gi ; +const regex_zip = /\.zip$/gi; +const regex_rdf = /(rdf\.yml|rdf\.yaml|bioimage\.yml|bioimage\.yaml)$/gi; const hostname = `${window.location.protocol}//${window.location.host}`; @@ -20,132 +20,132 @@ const notify_ci_url = `${hostname}/.netlify/functions/notify_ci`; export enum UploaderStep { - NOT_STARTED = "not-started", - ZIPPING = "zipping", - UPLOADING = "uploading", - NOTIFYING_CI = "notifying-ci", - FINISHED = "finished", - FAILED = "failed", + NOT_STARTED = "not-started", + ZIPPING = "zipping", + UPLOADING = "uploading", + NOTIFYING_CI = "notifying-ci", + FINISHED = "finished", + FAILED = "failed", } -class UploaderStatus{ +class UploaderStatus { message = ""; - step=UploaderStep.NOT_STARTED; + step = UploaderStep.NOT_STARTED; upload_progress_value = ''; upload_progress_max = ''; - constructor(){ + constructor() { } - reset(){ + reset() { this.message = ""; - this.step=UploaderStep.NOT_STARTED; + this.step = UploaderStep.NOT_STARTED; this.upload_progress_value = ""; this.upload_progress_max = ""; } } -class ModelNickname{ - name = ""; +class ResourceId { + id = ""; emoji = ""; } -export class Uploader{ +export class Uploader { static MAX_CONNECTION_RETRIES = 3; static server_url = "https://ai.imjoy.io"; api: any; connection_retry = 0; - error_object: Error | null = null; + error_object: Error | null = null; files: File[] = []; login_url: string | null = null; - model_nickname: ModelNickname | null = null; - model_zip_url: string | null = null; - rdf : any = null; + resource_path: ResourceId | null = null; + package_url: string | null = null; + rdf: any = null; render_callbacks: (() => void)[] = []; - server: any = null; + server: any = null; server_url: string | null = null; - show_login_window: (url:string) => void; - status: UploaderStatus; + show_login_window: (url: string) => void; + status: UploaderStatus; storage: any = null; storage_info: any = null; - token: string | null= ''; + token: string | null = ''; validator: any = null - zip_urls: {get: string, put: string} | null = null; + zip_urls: { get: string, put: string } | null = null; //this.status = {message:"", is_finished: false, is_uploading: false, ci_failed: false}; //server_url = "https://hypha.bioimage.io"; //server_url = "https://hypha.bioimage.io/public/apps/hypha-login/"; - constructor(){ + constructor() { console.log("Creating uploader..."); this.token = window.sessionStorage.getItem('token'); //this.status = {message:"", is_finished: false, is_uploading: false, ci_failed: false}; - this.status = new UploaderStatus(); - this.show_login_window = (url) => {window.open(url, '_blank')}; + this.status = new UploaderStatus(); + this.show_login_window = (url) => { window.open(url, '_blank') }; } - async init(){ + async init() { await this.initHypha(); } - reset(){ - this.model_nickname = null; + reset() { + this.resource_path = null; this.rdf = null; - this.status.reset(); + this.status.reset(); } - set_login_url(ctx: any){ + set_login_url(ctx: any) { this.show_login_window(ctx.login_url); this.login_url = ctx.login_url } - async initHypha(){ + async initHypha() { // Init Imjoy-Core const imjoy = new imjoyCore.ImJoy({ imjoy_api: {}, //imjoy config }); - - await imjoy.start({workspace: 'default'}); + + await imjoy.start({ workspace: 'default' }); console.log('ImJoy started'); this.api = imjoy.api; // Init Imjoy-Hypha - if(this.connection_retry > Uploader.MAX_CONNECTION_RETRIES){ - console.error("Max retries reached. Please try again later or contact support"); + if (this.connection_retry > Uploader.MAX_CONNECTION_RETRIES) { + console.error("Max retries reached. Please try again later or contact support"); return this; } console.log("Initializing Hypha..."); - console.log(` connecting to ${Uploader.server_url}`); - if(!this.token){ + console.log(` connecting to ${Uploader.server_url}`); + if (!this.token) { console.log(" Getting token..."); console.log(" from:"); console.log(imjoyRPC); console.log(` using url: ${Uploader.server_url}`); this.token = await imjoyRPC.hyphaWebsocketClient.login({ - server_url: Uploader.server_url, + server_url: Uploader.server_url, login_callback: this.set_login_url.bind(this), }); window.sessionStorage.setItem('token', this.token!); console.log(' token saved'); console.log(' 🥳🥳🥳🥳'); } - console.log(`Token: ${this.token!.slice(0,5)}...`); - - try{ + console.log(`Token: ${this.token!.slice(0, 5)}...`); + + try { this.server = await imjoyRPC.hyphaWebsocketClient.connectToServer({ - name: 'BioImageIO.this', - server_url: Uploader.server_url, - token: this.token, + name: 'BioImageIO.this', + server_url: Uploader.server_url, + token: this.token, }); this.render(); - }catch(error){ + } catch (error) { console.error("Connection to Hypha failed:"); console.error(error); this.connection_retry = this.connection_retry + 1; @@ -159,37 +159,37 @@ export class Uploader{ console.log(this); } - show_login_message(context: any){ + show_login_message(context: any) { this.login_url = context.login_url; } - async load_from_file(input_file: File){ - if(input_file.name.search(regex_zip) !== -1){ + async load_from_file(input_file: File) { + if (input_file.name.search(regex_zip) !== -1) { await this.load_zip_file(input_file); - }else if (input_file.name.search(regex_rdf) !== -1){ + } else if (input_file.name.search(regex_rdf) !== -1) { await this.load_rdf_file(input_file); - }else{ + } else { throw Error("Invalid file given"); } } - - async load_from_files(files: File[]){ + + async load_from_files(files: File[]) { console.debug("Loading model from files"); const candidates = files.filter((file) => file.name.search(regex_rdf) !== -1) // Obtain the RDF file - if( candidates.length > 1){ + if (candidates.length > 1) { console.error("Given too many RDF files. Please make sure at most one RDF file is present"); console.debug("Found files:"); - for(const item of files){ + for (const item of files) { console.debug(item.name); } throw Error(`Invalid files given: ${candidates.length} model-definition files found!`); } - if (candidates.length === 1){ + if (candidates.length === 1) { const rdf_file = candidates[0]; await this.load_rdf_file(rdf_file); - }else{ + } else { this.rdf = {}; } console.debug('RDF:'); @@ -198,38 +198,38 @@ export class Uploader{ this.files = files; } - async load_zip_file(zip_file: File){ + async load_zip_file(zip_file: File) { console.log("Loading zip file..."); const zip_package = await JSZip.loadAsync(zip_file); console.log(zip_package); const files = []; - for(const item of Object.values(zip_package.files)){ - files.push( await FileFromJSZipZipOject(item)); + for (const item of Object.values(zip_package.files)) { + files.push(await FileFromJSZipZipOject(item)); }; await this.load_from_files(files); } - async load_rdf_file(rdf_file: File){ + async load_rdf_file(rdf_file: File) { const rdf_text = await rdf_file.text(); this.read_model_text(rdf_text); } - read_model_text(rdf_text: string){ + read_model_text(rdf_text: string) { this.rdf = yaml.load(rdf_text); } - load_validator(){ - if(this.validator) return this.validator; + load_validator() { + if (this.validator) return this.validator; this.validator = this.api.getPlugin( "https://raw.githubusercontent.com/jmetz/spec-bioimage-io/dev/scripts/bio-rdf-validator.imjoy.html" ); return this.validator; } - async validate(){ - - /* + async validate() { + + /* * Lazy loading of validator */ const validator = await this.load_validator(); @@ -237,75 +237,75 @@ export class Uploader{ rdf = clean_rdf(rdf); console.log("RDF after cleaning: ", rdf); const results = await validator.validate(rdf); - if (results.error){ + if (results.error) { throw new Error(results.error); } this.rdf = rdf; } - ready_for_review(){ - if(!this.rdf) return false; - if(!this.files) return false; + ready_for_review() { + if (!this.rdf) return false; + if (!this.files) return false; return true; } - - ready_to_publish(){ - if(!this.ready_for_review()) return false; - if(!this.model_nickname) return false; + + ready_to_publish() { + if (!this.ready_for_review()) return false; + if (!this.resource_path) return false; return true; } - logged_in(){ - - if(!this.server) return false; + logged_in() { + + if (!this.server) return false; return true; } - async regenerate_nickname(){ - try{ - const model_name = Object.assign(new ModelNickname, await (await fetch(generate_name_url)).json()); + async regenerate_nickname() { + try { + const model_name = Object.assign(new ResourceId, await (await fetch(generate_name_url)).json()); console.log("Generated name:", model_name); const error = ""; - this.model_nickname = model_name; + this.resource_path = model_name; this.rdf.nickname = model_name.name; - return {model_name, error}; - }catch(err){ + return { model_name, error }; + } catch (err) { console.error("Failed to generate name:") console.error(err); - console.error(`URL used: ${generate_name_url}`); + console.error(`URL used: ${generate_name_url}`); throw Error(err); - } + } } - async upload_file(file: File, progress_callback: null | ((val: string, tot:string)=>null)){ - if(!this.model_nickname){ - throw new Error("Unable to upload, model_nickname not set"); + async upload_file(file: File, progress_callback: null | ((val: string, tot: string) => null)) { + if (!this.resource_path) { + throw new Error("Unable to upload, resource_path not set"); }; - this.status.message = "Uploading"; + this.status.message = "Uploading"; this.status.step = UploaderStep.UPLOADING; this.render(); - const filename = `${this.model_nickname.name}/${file.name}`; + const filename = `${this.resource_path.id}/${file.name}`; const url_put = await this.storage.generate_presigned_url( - this.storage_info.bucket, + this.storage_info.bucket, this.storage_info.prefix + filename, - {client_method: "put_object", _rkwargs: true} + { client_method: "put_object", _rkwargs: true } + ) + const url_get = await this.storage.generate_presigned_url( + this.storage_info.bucket, + this.storage_info.prefix + filename ) - const url_get = await this.storage.generate_presigned_url( - this.storage_info.bucket, - this.storage_info.prefix + filename - ) console.log( - "Used bucket and prefix:", - this.storage_info.bucket, + "Used bucket and prefix:", + this.storage_info.bucket, this.storage_info.prefix); console.log("url_get:"); console.log(url_get); console.log("url_put"); console.log(url_put); - try{ - const config : any = {}; - if(typeof progress_callback === "function"){ + try { + const config: any = {}; + if (typeof progress_callback === "function") { config.onUploadProgress = (progressEvent: any) => { this.status.upload_progress_value = progressEvent.loaded; this.status.upload_progress_max = progressEvent.total; @@ -314,7 +314,7 @@ export class Uploader{ progress_callback(progressEvent.loaded, progressEvent.total); this.render(); }; - }else{ + } else { config.onUploadProgress = (progressEvent: any) => { this.status.upload_progress_value = progressEvent.loaded; this.status.upload_progress_max = progressEvent.total; @@ -326,20 +326,20 @@ export class Uploader{ const response = await axios.put(url_put, file, config); //const response = await fetch(url_put, {method:"PUT", body:file}); //const response = await fetch_with_progress( - //url_put, - //{ - //method:"PUT", - //body:file, - //upload_listener: (evt) => { - //if (evt.lengthComputable) { - //console.log("upload progress:", evt.loaded / evt.total); - //} - //} - //} + //url_put, + //{ + //method:"PUT", + //body:file, + //upload_listener: (evt) => { + //if (evt.lengthComputable) { + //console.log("upload progress:", evt.loaded / evt.total); + //} + //} + //} //); console.log("Upload result:", response.data); - return {'get': url_get, 'put': url_put}; - }catch(error){ + return { 'get': url_get, 'put': url_put }; + } catch (error) { console.error("Upload failed!"); console.error(`Unable to PUT ${filename} to ${url_put}`); console.error(error); @@ -347,32 +347,32 @@ export class Uploader{ } } - async create_zip(){ + async create_zip() { this.status.message = "Zipping model"; this.status.step = UploaderStep.ZIPPING; this.render(); console.debug("Finding yaml file..."); const rdf_file = this.files.filter(item => item.name === "rdf.yaml") - if(rdf_file.length !== 1){ - this.status.message = "Publishing failed - unable to find rdf.yaml"; + if (rdf_file.length !== 1) { + this.status.message = "Publishing failed - unable to find rdf.yaml"; this.status.step = UploaderStep.FAILED; this.render(); throw new Error("Could not find RDF file in file list"); } const zip = new JSZip(); - for(const file of this.files){ + for (const file of this.files) { zip.file(file.name, file); } - - const blob = await zip.generateAsync({type: "blob"}); + + const blob = await zip.generateAsync({ type: "blob" }); const zipfile = new File([blob], "model.zip"); this.status.message = "Created zip file"; this.render(); return zipfile; } - async publish(){ + async publish() { console.log("Running upload steps (zip, upload, notify CI)"); this.render(); const zipfile = await this.create_zip(); @@ -384,50 +384,51 @@ export class Uploader{ this.storage = await this.server.get_service("s3-storage"); this.storage_info = await this.storage.generate_credential(); this.zip_urls = await this.upload_file(zipfile, null); - - try{ + + try { await this.notify_ci_bot(); - }catch(err){ + } catch (err) { console.error("Nofiying the ci-bot failed:"); console.error(err); this.error_object = err; - this.status.message = err.message, - this.status.step = UploaderStep.FAILED; + this.status.message = err.message, + this.status.step = UploaderStep.FAILED; this.render(); - return + return } this.status.step = UploaderStep.FINISHED; this.render(); } - render(){ + render() { this.render_callbacks.forEach(callback => callback()); } - add_render_callback(callback: ()=>void){ + add_render_callback(callback: () => void) { this.render_callbacks.push(callback); } - + async notify_ci_bot() { - if(!notify_ci_url){ + if (!notify_ci_url) { console.error("notify_ci_url not set") throw new Error("notify_ci_url not set"); - } + } this.status.message = "⌛ Trying to notify bioimage-bot for the new item..."; this.status.step = UploaderStep.NOTIFYING_CI; this.render(); // trigger CI with the bioimageio bot endpoint - try{ + try { const resp = await fetch(notify_ci_url, { - method: 'POST', - headers: {"Content-Type": "application/json"}, - body: JSON.stringify({'model_nickname': this.model_nickname!.name, 'model_zip_url': this.zip_urls!.get})}); + method: 'POST', + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ 'resource_path': this.resource_path!.id, 'package_url': this.zip_urls!.get }) + }); if (resp.status === 200) { const ci_resp = (await resp.json()); - if(ci_resp.status == 200){ + if (ci_resp.status == 200) { this.status.message = `🎉 bioimage-bot has successfully detected the item: ${ci_resp.message}`; - }else{ + } else { throw new Error(`😬 bioimage-bot notification ran into an issue [${ci_resp.status}]: ${ci_resp.message}`); } @@ -435,7 +436,7 @@ export class Uploader{ const ci_resp = await resp.text(); throw new Error(`😬 bioimage-bot failed to detected the new item, please report the issue to the admin team of bioimage.io: ${ci_resp}`); } - }catch(err){ + } catch (err) { throw new Error(`😬 Failed to reach to the bioimageio-bot, please report the issue to the admin team of bioimage.io: ${err}`); } this.render();