From ff586b4fb1108aae4c52b74ae7a0bcedb2fc2efa Mon Sep 17 00:00:00 2001 From: fynnbe Date: Wed, 7 Feb 2024 15:56:23 +0100 Subject: [PATCH] resource_id -> resource_path --- .github/scripts/s3_client.py | 44 +++++++++++---------- .github/scripts/unzip_model.py | 18 ++++----- .github/scripts/update_log.py | 16 ++++---- .github/scripts/update_status.py | 16 ++++---- .github/scripts/update_status.sh | 10 ++--- .github/scripts/upload_model_to_zenodo.py | 10 ++--- .github/workflows/ci_runner.yaml | 18 ++++----- .github/workflows/publish.yaml | 8 ++-- functions/create_status.js | 2 +- functions/notify_ci.js | 6 +-- src/components/Uploader/Review.svelte | 10 ++--- src/components/Uploader/UploadStatus.svelte | 4 +- src/components/Uploader/index.svelte | 2 +- src/lib/uploader.ts | 16 ++++---- 14 files changed, 91 insertions(+), 89 deletions(-) diff --git a/.github/scripts/s3_client.py b/.github/scripts/s3_client.py index 6e1297b..0b37751 100644 --- a/.github/scripts/s3_client.py +++ b/.github/scripts/s3_client.py @@ -128,23 +128,23 @@ def load_file(self, path) -> str: # response = requests.get(url) # return response.content - def check_versions(self, resource_id: str) -> Iterator[VersionStatus]: + def check_versions(self, resource_path: str) -> Iterator[VersionStatus]: """ Check model repository for version of model-name. Returns dictionary of version-status pairs. """ - logger.debug("Checking versions for {}", resource_id) - version_folders = self.ls(f"{resource_id}/", only_folders=True) + logger.debug("Checking versions for {}", resource_path) + version_folders = self.ls(f"{resource_path}/", only_folders=True) # For each folder get the contents of status.json for version in version_folders: - yield self.get_version_status(resource_id, version) + yield self.get_version_status(resource_path, version) - def get_unpublished_version(self, resource_id: str) -> str: + def get_unpublished_version(self, resource_path: str) -> str: """Get the unpublisted version""" - versions = list(self.check_versions(resource_id)) + versions = list(self.check_versions(resource_path)) if len(versions) == 0: return "1" unpublished = [version for version in versions if version.status == "staging"] @@ -155,49 +155,51 @@ def get_unpublished_version(self, resource_id: str) -> str: raise ValueError("Opps! We seem to have > 1 staging versions!!") return unpublished[0].version - def get_version_status(self, resource_id: str, version: str) -> VersionStatus: - status = self.get_status(resource_id, version) + def get_version_status(self, resource_path: str, version: str) -> VersionStatus: + status = self.get_status(resource_path, version) status_str = status.get("status", "status-field-unset") - version_path = f"{resource_id}/{version}" + version_path = f"{resource_path}/{version}" return VersionStatus(version, status_str, version_path) - def get_status(self, resource_id: str, version: str) -> dict: - version_path = f"{resource_id}/{version}" - logger.debug("resource_id: {}, version: {}", resource_id, version) + def get_status(self, resource_path: str, version: str) -> dict: + version_path = f"{resource_path}/{version}" + logger.debug("resource_path: {}, version: {}", resource_path, version) status_path = f"{version_path}/status.json" logger.debug("Getting status using path {}", status_path) status = self.load_file(status_path) status = json.loads(status) return status - def put_status(self, resource_id: str, version: str, status: dict): - logger.debug("Updating status for {}-{}, with {}", resource_id, version, status) + def put_status(self, resource_path: str, version: str, status: dict): + logger.debug( + "Updating status for {}-{}, with {}", resource_path, version, status + ) contents = json.dumps(status).encode() file_object = io.BytesIO(contents) self.put( - f"{resource_id}/{version}/status.json", + f"{resource_path}/{version}/status.json", file_object, length=len(contents), content_type="application/json", ) - def get_log(self, resource_id: str, version: str) -> dict: - version_path = f"{resource_id}/{version}" - logger.debug("resource_id: {}, version: {}", resource_id, version) + def get_log(self, resource_path: str, version: str) -> dict: + version_path = f"{resource_path}/{version}" + logger.debug("resource_path: {}, version: {}", resource_path, version) path = f"{version_path}/log.json" logger.debug("Getting log using path {}", path) log = self.load_file(path) log = json.loads(log) return log - def put_log(self, resource_id: str, version: str, log: dict): - logger.debug("Updating log for {}-{}, with {}", resource_id, version, log) + def put_log(self, resource_path: str, version: str, log: dict): + logger.debug("Updating log for {}-{}, with {}", resource_path, version, log) contents = json.dumps(log).encode() file_object = io.BytesIO(contents) self.put( - f"{resource_id}/{version}/log.json", + f"{resource_path}/{version}/log.json", file_object, length=len(contents), content_type="application/json", diff --git a/.github/scripts/unzip_model.py b/.github/scripts/unzip_model.py index a5ce27d..1eaf8b7 100644 --- a/.github/scripts/unzip_model.py +++ b/.github/scripts/unzip_model.py @@ -11,7 +11,7 @@ def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("resource_id", help="Resource ID") + parser.add_argument("resource_path", help="Resource ID") parser.add_argument( "package_url", help="Resource package URL (needs to be publicly accessible or presigned)", @@ -29,22 +29,22 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() - resource_id = args.resource_id + resource_path = args.resource_path package_url = args.package_url try: - unzip_from_url(resource_id, package_url) + unzip_from_url(resource_path, package_url) except Exception: err_message = f"An error occurred in the CI:\n {traceback.format_exc()}" print(err_message) - update_status(resource_id, {"status": err_message}) + update_status(resource_path, {"status": err_message}) raise -def unzip_from_url(resource_id, package_url): +def unzip_from_url(resource_path, package_url): filename = "model.zip" client = create_client() - versions = client.check_versions(resource_id) + versions = client.check_versions(resource_path) if len(versions) == 0: version = "1" @@ -54,11 +54,11 @@ def unzip_from_url(resource_id, package_url): raise NotImplementedError("Updating/publishing new version not implemented") # TODO: Need to make sure status is staging - status = client.get_status(resource_id, version) + status = client.get_status(resource_path, version) status_str = status.get("status", "missing-status") if status_str != "staging": raise ValueError( - "Model {} at version {} is status: {}", resource_id, version, status_str + "Model {} at version {} is status: {}", resource_path, version, status_str ) # Download the model zip file @@ -69,7 +69,7 @@ def unzip_from_url(resource_id, package_url): for filename in zipobj.namelist(): # file_object = io.BytesIO(zipobj) file_object = zipobj.open(filename) - path = f"{resource_id}/{version}/{filename}" + path = f"{resource_path}/{version}/{filename}" client.put( path, diff --git a/.github/scripts/update_log.py b/.github/scripts/update_log.py index c14c923..12e95c4 100644 --- a/.github/scripts/update_log.py +++ b/.github/scripts/update_log.py @@ -8,7 +8,7 @@ def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("resource_id", help="Model name") + parser.add_argument("resource_path", help="Model name") parser.add_argument("category", help="Log category") parser.add_argument("summary", help="Log summary") parser.add_argument("--version", help="Version") @@ -25,34 +25,34 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() - resource_id = args.resource_id + resource_path = args.resource_path category = args.category summary = args.summary version = args.version - add_log_entry(resource_id, category, summary, version=version) + add_log_entry(resource_path, category, summary, version=version) -def add_log_entry(resource_id, category, summary, version=None): +def add_log_entry(resource_path, category, summary, version=None): timenow = datetime.datetime.now().isoformat() client = create_client() logger.info( "Updating log for {} with category {} and summary", - resource_id, + resource_path, category, summary, ) if version is None: - version = client.get_unpublished_version(resource_id) + version = client.get_unpublished_version(resource_path) logger.info("Version detected: {}", version) else: logger.info("Version requested: {}", version) - log = client.get_log(resource_id, version) + log = client.get_log(resource_path, version) if category not in log: log[category] = [] log[category].append({"timestamp": timenow, "log": summary}) - client.put_log(resource_id, version, log) + client.put_log(resource_path, version, log) if __name__ == "__main__": diff --git a/.github/scripts/update_status.py b/.github/scripts/update_status.py index cb3aaf1..ad1c2d8 100644 --- a/.github/scripts/update_status.py +++ b/.github/scripts/update_status.py @@ -8,7 +8,7 @@ def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("resource_id", help="Model name") + parser.add_argument("resource_path", help="Model name") parser.add_argument("status", help="Status") parser.add_argument("--version", help="Version") parser.add_argument("--step", help="Step", default=0, type=int) @@ -26,32 +26,32 @@ def get_args(argv: Optional[list] = None): def main(): args = get_args() - resource_id = args.resource_id + resource_path = args.resource_path version = args.version step = args.step num_steps = args.num_steps status = args.status - update_status(resource_id, status, version=version, step=step, num_steps=num_steps) + update_status(resource_path, status, version=version, step=step, num_steps=num_steps) -def update_status(resource_id: str, status_text: str, version: Optional[str] = None, step: Optional[int], num_steps: int = 6): +def update_status(resource_path: str, status_text: str, version: Optional[str] = None, step: Optional[int], num_steps: int = 6): assert step is None or step <= num_steps timenow = datetime.datetime.now().isoformat() client = create_client() logger.info( "Updating status for {} with text {} [steps={}, num_steps={}]", - resource_id, + resource_path, status_text, step, num_steps, ) if version is None: - version = client.get_unpublished_version(resource_id) + version = client.get_unpublished_version(resource_path) logger.info("Version detected: {}", version) else: logger.info("Version requested: {}", version) - status = client.get_status(resource_id, version) + status = client.get_status(resource_path, version) if "messages" not in status: status["messages"] = [] @@ -61,7 +61,7 @@ def update_status(resource_id: str, status_text: str, version: Optional[str] = N status["num_steps"] = num_steps status["last_message"] = status_text status["messages"].append({"timestamp": timenow, "text": status_text}) - client.put_status(resource_id, version, status) + client.put_status(resource_path, version, status) if __name__ == "__main__": diff --git a/.github/scripts/update_status.sh b/.github/scripts/update_status.sh index 0aaa9cb..dcc8505 100644 --- a/.github/scripts/update_status.sh +++ b/.github/scripts/update_status.sh @@ -5,15 +5,15 @@ # S3_FOLDER # S3_ACCESS_KEY_ID # S3_SECRET_ACCESS_KEY -# First arg is now resource_id +# First arg is now resource_path FILENAME=status.json -RESOURCE_ID=$1 +resource_path=$1 STATUS=$2 -if [ -z "$RESOURCE_ID" ]; then - printf '%s\n' "RESOURCE_ID is unset or empty" >&2; +if [ -z "$resource_path" ]; then + printf '%s\n' "resource_path is unset or empty" >&2; exit 1 fi if [ -z "$S3_HOST" ]; then @@ -40,7 +40,7 @@ fi #curl -X PUT -H 'Content-Type: application/json' -d '{"status": "'"$2"'"}' "$1" -RESOURCE="/${S3_BUCKET}/${S3_FOLDER}/${RESOURCE_ID}/${FILENAME}" +RESOURCE="/${S3_BUCKET}/${S3_FOLDER}/${resource_path}/${FILENAME}" CONTENT_TYPE="application/json" DATE=`date -R` _SIGNATURE="PUT\n\n${CONTENT_TYPE}\n${DATE}\n${RESOURCE}" diff --git a/.github/scripts/upload_model_to_zenodo.py b/.github/scripts/upload_model_to_zenodo.py index 70652b4..f7b3ad6 100644 --- a/.github/scripts/upload_model_to_zenodo.py +++ b/.github/scripts/upload_model_to_zenodo.py @@ -57,7 +57,7 @@ def assert_good_response(response, message, info=None): def create_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser() - parser.add_argument("--resource_id", help="Model name", required=True) + parser.add_argument("--resource_path", help="Model name", required=True) parser.add_argument("--version", help="Version", nargs="?", default=None) return parser @@ -79,9 +79,9 @@ def main(): # TODO: GET THE CURRENT VERSION if args.version is None: - version = client.get_unpublished_version(args.resource_id) + version = client.get_unpublished_version(args.resource_path) - s3_path = Path(args.resource_id, version) + s3_path = Path(args.resource_path, version) # List the files at the model URL file_urls = client.get_file_urls(path=s3_path) @@ -144,7 +144,7 @@ def main(): ) update_status( - args.resource_id, + args.resource_path, "Would be publishing now...(but leaving as draft)", step=None, num_steps=None, @@ -159,7 +159,7 @@ def main(): assert_good_response(response, "Failed to publish deposition") update_status( - args.resource_id, + args.resource_path, f"The deposition DOI is {deposition_doi}", step=None, num_steps=None, diff --git a/.github/workflows/ci_runner.yaml b/.github/workflows/ci_runner.yaml index c0bf456..14b428b 100644 --- a/.github/workflows/ci_runner.yaml +++ b/.github/workflows/ci_runner.yaml @@ -3,8 +3,8 @@ name: CI Runner on: workflow_dispatch: inputs: - resource_id: - description: 'Bioimageio ID - to be used to access the resource on S3' + resource_path: + description: 'Bioimageio wide, version specific resource identifier "resource_path/version"' required: true type: string package_url: @@ -38,19 +38,19 @@ jobs: python -m pip install "minio==7.2.3" "ruamel.yaml==0.18.5" "bioimageio.spec==0.4.9.post5" "typer" - name: Unzip model file run: | - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Unzipping package" "1" - python .github/scripts/unzip_model.py "${{inputs.resource_id}}" "${{inputs.package_url}}" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Unzipping package" "1" + python .github/scripts/unzip_model.py "${{inputs.resource_path}}" "${{inputs.package_url}}" - name: Validate format id: validate run: | - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Starting validation" "2" - python .github/scripts/validate_format.py "${{ inputs.resource_id }}" "${{inputs.package_url}}" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Starting validation" "2" + python .github/scripts/validate_format.py "${{ inputs.resource_path }}" "${{inputs.package_url}}" - run: | - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Starting additional tests" "3" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Starting additional tests" "3" if: steps.validate.outputs.has_dynamic_test_cases == 'yes' - run: | - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Validation done" "3" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Validation done" "3" if: steps.validate.outputs.has_dynamic_test_cases == 'no' @@ -90,4 +90,4 @@ jobs: steps: - uses: actions/checkout@v4 - run: | - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Awaiting review" "4" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Awaiting review" "4" diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index aaced97..2a7cb9d 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -3,7 +3,7 @@ name: publish on: workflow_dispatch: inputs: - resource_id: + resource_path: description: 'Bioimageio ID of the resource - to be used to access the resource on S3' required: true type: string @@ -33,6 +33,6 @@ jobs: python -m pip install "minio==7.2.3" "loguru==0.7.2" "packaging==23.2" "spdx-license-list==3.22" "ruamel.yaml==0.18.5" "typer" - name: Publish to Zenodo run: | - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Publishing to Zenodo" "5" - python .github/scripts/upload_model_to_zenodo.py --resource_id "${{inputs.resource_id}}" - python .github/scripts/update_status.py "${{ inputs.resource_id }}" "Publishing complete" "6" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing to Zenodo" "5" + python .github/scripts/upload_model_to_zenodo.py --resource_path "${{inputs.resource_path}}" + python .github/scripts/update_status.py "${{ inputs.resource_path }}" "Publishing complete" "6" diff --git a/functions/create_status.js b/functions/create_status.js index 1bac0ec..bd8bfc2 100644 --- a/functions/create_status.js +++ b/functions/create_status.js @@ -19,7 +19,7 @@ export default async (event, context) => { //'ref': 'main', //'inputs':{ //'status_url': data.status_url, - //'resource_id': data.resource_id, + //'resource_path': data.resource_path, //} //}) //}; diff --git a/functions/notify_ci.js b/functions/notify_ci.js index f9228e2..7f08514 100644 --- a/functions/notify_ci.js +++ b/functions/notify_ci.js @@ -12,8 +12,8 @@ const headers = { export default async (event, context) => { const data = await event.json(); - if (!data.resource_id) { - const error_message = "Failed: resource_id not found in request json"; + if (!data.resource_path) { + const error_message = "Failed: resource_path not found in request json"; console.error() const res = Response.json({ 'message': error_message, 'status': 500 }); res.headers.set("Access-Control-Allow-Origin", "*"); @@ -38,7 +38,7 @@ export default async (event, context) => { body: JSON.stringify({ 'ref': GITHUB_BRANCH, 'inputs': { - 'resource_id': data.resource_id, + 'resource_path': data.resource_path, 'package_url': data.package_url, } }) diff --git a/src/components/Uploader/Review.svelte b/src/components/Uploader/Review.svelte index 33f00f8..44e2b5b 100644 --- a/src/components/Uploader/Review.svelte +++ b/src/components/Uploader/Review.svelte @@ -7,7 +7,7 @@ import JSONTree from 'svelte-json-tree'; let model_name_message = ""; - let resource_id = uploader.resource_id; + let resource_path = uploader.resource_path; let rdf = uploader.rdf; let ready_to_publish = uploader.ready_to_publish(); @@ -24,14 +24,14 @@ async function regenerate_nickname(){ await uploader.regenerate_nickname(); - resource_id = uploader.resource_id; + resource_path = uploader.resource_path; ready_to_publish = uploader.ready_to_publish(); console.log("Ready to publish?", ready_to_publish); rdf = uploader.rdf; //rerender = !rerender; } - if(!resource_id) regenerate_nickname(); + if(!resource_path) regenerate_nickname(); @@ -44,9 +44,9 @@

{#if model_name_message }({model_name_message}){/if} - {#if resource_id} + {#if resource_path} Your model nickname is: - {resource_id.name} {resource_id.icon}  + {resource_path.name} {resource_path.icon}  {/if}

diff --git a/src/components/Uploader/UploadStatus.svelte b/src/components/Uploader/UploadStatus.svelte index 532e358..83a0459 100644 --- a/src/components/Uploader/UploadStatus.svelte +++ b/src/components/Uploader/UploadStatus.svelte @@ -11,7 +11,7 @@ let error_element: Object; let last_error_object: Error; let step: UploaderStep = uploader.status.step; - let model_name = uploader.resource_id.name; + let model_name = uploader.resource_path.name; const dispatch = createEventDispatcher(); function copy_error_to_clipboard(text: string){ @@ -76,7 +76,7 @@

Almost there,

There's nothing you need to do right now. Your model is uploaded and the CI-bots have started their work!

-

You can check the status of the CI at any point from here

+

You can check the status of the CI at any point from here

diff --git a/src/components/Uploader/index.svelte b/src/components/Uploader/index.svelte index c789cf8..edf6023 100644 --- a/src/components/Uploader/index.svelte +++ b/src/components/Uploader/index.svelte @@ -70,7 +70,7 @@ {:else if step == "uploading"} {step="add"}} /> - + {:else} Opps! something went wrong 😬 diff --git a/src/lib/uploader.ts b/src/lib/uploader.ts index b503d35..d65e0fd 100644 --- a/src/lib/uploader.ts +++ b/src/lib/uploader.ts @@ -62,7 +62,7 @@ export class Uploader { error_object: Error | null = null; files: File[] = []; login_url: string | null = null; - resource_id: ResourceId | null = null; + resource_path: ResourceId | null = null; package_url: string | null = null; rdf: any = null; render_callbacks: (() => void)[] = []; @@ -92,7 +92,7 @@ export class Uploader { } reset() { - this.resource_id = null; + this.resource_path = null; this.rdf = null; this.status.reset(); } @@ -251,7 +251,7 @@ export class Uploader { ready_to_publish() { if (!this.ready_for_review()) return false; - if (!this.resource_id) return false; + if (!this.resource_path) return false; return true; } @@ -266,7 +266,7 @@ export class Uploader { const model_name = Object.assign(new ResourceId, await (await fetch(generate_name_url)).json()); console.log("Generated name:", model_name); const error = ""; - this.resource_id = model_name; + this.resource_path = model_name; this.rdf.nickname = model_name.name; return { model_name, error }; } catch (err) { @@ -278,13 +278,13 @@ export class Uploader { } async upload_file(file: File, progress_callback: null | ((val: string, tot: string) => null)) { - if (!this.resource_id) { - throw new Error("Unable to upload, resource_id not set"); + if (!this.resource_path) { + throw new Error("Unable to upload, resource_path not set"); }; this.status.message = "Uploading"; this.status.step = UploaderStep.UPLOADING; this.render(); - const filename = `${this.resource_id.id}/${file.name}`; + const filename = `${this.resource_path.id}/${file.name}`; const url_put = await this.storage.generate_presigned_url( this.storage_info.bucket, this.storage_info.prefix + filename, @@ -422,7 +422,7 @@ export class Uploader { const resp = await fetch(notify_ci_url, { method: 'POST', headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ 'resource_id': this.resource_id!.id, 'package_url': this.zip_urls!.get }) + body: JSON.stringify({ 'resource_path': this.resource_path!.id, 'package_url': this.zip_urls!.get }) }); if (resp.status === 200) { const ci_resp = (await resp.json());