diff --git a/.github/schemas/meta.schema.json b/.github/schemas/meta.schema.json new file mode 100644 index 00000000..245ab7ea --- /dev/null +++ b/.github/schemas/meta.schema.json @@ -0,0 +1,416 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$defs": { + "inputProperty": { + "type": "object", + "properties": { + "label": { + "type": "string", + "description": "The label for the input." + }, + "description": { + "type": "string", + "description": "A description of the input.", + "maxLength": 200 + }, + "format": { + "type": "string", + "description": "The format of the input." + }, + "modality": { + "type": "string", + "description": "The modality of the input." + }, + "bodypartexamined": { + "type": "string", + "description": "The examined body part for the input." + }, + "slicethickness": { + "type": "string", + "description": "The slice thickness of the input." + }, + "non-contrast": { + "type": "boolean", + "description": "Whether non-contrast is used." + }, + "contrast": { + "type": "boolean", + "description": "Whether contrast is used." + } + }, + "required": [ + "label", + "description", + "format", + "modality", + "bodypartexamined", + "slicethickness", + "non-contrast", + "contrast" + ] + }, + "outputSegmentation": { + "type": "object", + "properties": { + "type": { + "const": "Segmentation", + "description": "Output type is Segmentation." + }, + "classes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The classes for segmentation." + } + }, + "required": [ + "type", + "classes" + ] + }, + "outputPrediction": { + "type": "object", + "properties": { + "type": { + "const": "Prediction", + "description": "Output type is Prediction." + }, + "valueType": { + "type": "string", + "description": "The value type for predictions." + }, + "label": { + "type": "string", + "description": "The label of the prediction output." + }, + "description": { + "type": "string", + "description": "A description of the prediction output." + } + }, + "required": [ + "type", + "valueType", + "label", + "description" + ] + }, + "outputClassification": { + "type": "object", + "properties": { + "type": { + "const": "Classification", + "description": "Output type is Classification." + }, + "classes": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The classes for classification." + } + }, + "required": [ + "type", + "classes" + ] + }, + "inputArray": { + "type": "array", + "items": { + "$ref": "#/$defs/inputProperty" + } + }, + "outputArray": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/$defs/outputSegmentation" + }, + { + "$ref": "#/$defs/outputPrediction" + }, + { + "$ref": "#/$defs/outputClassification" + } + ] + } + }, + "commonInfo": { + "type": "object", + "properties": { + "title": { + "type": "string" + }, + "text": { + "type": "string" + }, + "references": { + "type": "array", + "items": { + "type": "object", + "properties": { + "label": { + "type": "string" + }, + "uri": { + "type": "string", + "format": "uri" + } + }, + "required": [ + "label", + "uri" + ] + } + }, + "tables": { + "type": "array", + "items": { + "type": "object", + "properties": { + "label": { + "type": "string" + }, + "entries": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "required": [ + "label", + "entries" + ] + } + } + }, + "required": [ + "title", + "text" + ] + } + }, + "type": "object", + "required": ["id", "name", "title", "summary", "details", "info"], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The unique identifier of the model (UUID Version 4)." + }, + "name": { + "type": "string", + "pattern": "^[a-z0-9_]+$", + "description": "The name of the model, using lowercase letters and hyphens only." + }, + "title": { + "type": "string", + "description": "The title of the model." + }, + "summary": { + "type": "object", + "required": ["description", "inputs", "outputs", "model", "data"], + "properties": { + "description": { + "type": "string", + "description": "A description of the model." + }, + "inputs": { + "$ref": "#/$defs/inputArray" + }, + "outputs": { + "$ref": "#/$defs/outputArray" + }, + "model": { + "type": "object", + "properties": { + "architecture": { + "type": "string", + "description": "The architecture of the model." + }, + "training": { + "type": "string", + "description": "The training method of the model.", + "enum": ["supervised", "semi-supervised", "unsupervised", "other"] + }, + "cmpapproach": { + "type": "string", + "description": "The approach for comparison (2D, 3D, ensemble).", + "enum": ["2D", "3D", "ensemble"] + } + }, + "required": [ + "architecture", + "training", + "cmpapproach" + ] + }, + "data": { + "type": "object", + "properties": { + "training": { + "type": "object", + "properties": { + "vol_samples": { + "type": "integer", + "description": "The number of volume samples used for training." + } + } + }, + "evaluation": { + "type": "object", + "properties": { + "vol_samples": { + "type": "integer", + "description": "The number of volume samples used for evaluation." + } + } + }, + "public": { + "type": "boolean", + "description": "Wheather the model was evaluated on public data." + }, + "external": { + "type": "boolean", + "description": "Wheather the model was cross-validated." + } + }, + "required": [ + "training", + "evaluation", + "public", + "external" + ] + } + } + }, + "details": { + "type": "object", + "required": ["name", "version", "devteam", "type", "date", "cite", "license", "publications", "github"], + "properties": { + "name": { + "type": "string", + "description": "The name of the model." + }, + "version": { + "type": "string", + "description": "The version of the model." + }, + "devteam": { + "type": "string", + "description": "The development team of the model." + }, + "type": { + "type": "string", + "description": "The type of the model." + }, + "date": { + "type": "object", + "properties": { + "weights": { + "type": "string", + "description": "The date of model weights (yyy-mm-dd).", + "format": "date" + }, + "code": { + "type": "string", + "description": "The date of model code (yyy-mm-dd).", + "format": "date" + }, + "pub": { + "type": "string", + "description": "The publication year (yyy-mm-dd).", + "format": "date" + } + } + }, + "cite": { + "type": "string", + "description": "Citation information for the model." + }, + "license": { + "type": "object", + "properties": { + "code": { + "type": "string", + "description": "The code license of the model." + }, + "weights": { + "type": "string", + "description": "The weights license of the model." + } + } + }, + "publications": { + "type": "array", + "items": { + "type": "object", + "properties": { + "title": { + "type": "string", + "description": "The title of the publication." + }, + "uri": { + "type": "string", + "format": "uri", + "description": "The URI of the publication." + } + }, + "required": [ + "title", + "uri" + ] + } + }, + "github": { + "type": "string", + "format": "uri", + "description": "The GitHub repository of the model." + }, + "zenodo": { + "type": "string", + "format": "uri", + "description": "The Zenodo record of the model." + }, + "colab": { + "type": "string", + "format": "uri", + "description": "The Colab notebook for the model." + }, + "slicer": { + "type": "boolean", + "description": "Whether Slicer is supported.", + "default": false + } + } + }, + "info": { + "type": "object", + "required": ["use", "analyses", "evaluation", "training"], + "properties": { + "use": { + "$ref": "#/$defs/commonInfo" + }, + "analyses": { + "$ref": "#/$defs/commonInfo" + }, + "evaluation": { + "$ref": "#/$defs/commonInfo" + }, + "training": { + "$ref": "#/$defs/commonInfo" + }, + "ethics": { + "$ref": "#/$defs/commonInfo" + }, + "limitations": { + "$ref": "#/$defs/commonInfo" + } + } + } + } +} \ No newline at end of file diff --git a/.github/scripts/mhub_check.py b/.github/scripts/mhub_check.py new file mode 100644 index 00000000..6a1c0c2e --- /dev/null +++ b/.github/scripts/mhub_check.py @@ -0,0 +1,63 @@ +import utils +import os, sys, json + +# introduction +print() +print("------------------------------------------------") +print("MHub Compliance Checks started.") +print("We will check for a correct folder setup, Dockerfile and meta.json compliance.") +print() + +# print event path variable +PR = os.environ['PR_NUMBER'] +print("Pull request: ", PR) + +# get the first argument to this script which should be the list of modified files from an earlyer step +modified_files = json.loads(os.environ['MODIFIED_FILES']) +print("Modified files: ", "\n ".join(modified_files)) + +# modified models list +modified_models = list(set(fp.split("/")[1] for fp in modified_files)) +print("Modified models: ", ", ".join(modified_models)) + +# we allow modifications only to a single file for now +# TODO: iterate model list (we can outsource model checks and then call a check_model script with the model name as argument) +if len(modified_models) != 1: + print("CHECK FAILED: ", "Exactly one model must be modified in a pull request.") + sys.exit(1) + +# model name +model_name = modified_models[0] + +# run compliance checks +try: + # check folder structure + utils.validateModelFolder(base='models', model_name=model_name) + + # check meta.json + utils.validateModelMetaJson(model_meta_json_file=os.path.join('models', model_name, 'meta.json')) + + # validate dockerfile + utils.validateDockerfile(base='models', model_name=model_name) + +except utils.MHubComplianceError as e: + print() + print("---------------- CHECK FAILED ----------------") + print("This PR violates one or more MHub compliance rules:") + print(str(e)) + print() + sys.exit(1) + +except Exception as e: + print() + print("---------------- CHECK FAILED ----------------") + print("An unexpected error occured during compliance checks.") + print() + sys.exit(1) + +# all checks passed +print() +print("---------------- CHECK PASSED ----------------") +print("All compliance checks passed.") +print("Note: compliance checks are a beta feature. Passing all automated compliance checks does not guarantee that your model is compliant with the MHub standard. We will now perform a manual review of your model. Testing your model on a public dataset is obligatory.") +print() \ No newline at end of file diff --git a/.github/scripts/utils.py b/.github/scripts/utils.py new file mode 100644 index 00000000..eb0eddb6 --- /dev/null +++ b/.github/scripts/utils.py @@ -0,0 +1,187 @@ +from typing import List, Union +from enum import Enum +import requests, os +import json +import jsonschema + +# NOTE: all file path operations are relative to the repository root. + +# references for documentation +class DocuRef(Enum): + MODEL_FOLDER_STRUCTURE = "https://github.com/MHubAI/documentation/blob/main/documentation/mhub_models/model_folder_structure.md" + DOCKERFILE = "https://github.com/MHubAI/documentation/blob/main/documentation/mhub_models/the_mhub_dockerfile.md" + CONFIG = "https://github.com/MHubAI/documentation/blob/main/documentation/mhubio/the_mhubio_config_file.md" + MHUBIO_MODULES = "https://github.com/MHubAI/documentation/blob/main/documentation/mhubio/mhubio_modules.md" + MODEL_META_JSON = "https://github.com/MHubAI/documentation/blob/main/documentation/mhub_models/model_json.md" + +class MHubComplianceError(Exception): + """Raised when a model is not compliant with MHub standards""" + + def __init__(self, message: str, docu_ref: Union[DocuRef, List[DocuRef]]): + if isinstance(docu_ref, list): + msg = f"{message} (see {', '.join([d.value for d in docu_ref])})" + else: + msg = f"{message} ( see {docu_ref.value} for more information)" + + super().__init__(msg) + +def get_modified_files_from_PR(prid, repo = 'models') -> List[str]: + + # GitHub API URL to list files modified in the PR + api_url = f"https://api.github.com/repos/MHubAI/{repo}/pulls/{prid}/files" + + # Send a GET request to the GitHub API + response = requests.get(api_url) + + if response.status_code != 200: + raise Exception(f"Failed to fetch modified files: {response.status_code}") + + # Parse the JSON response and extract the file paths + modified_files = [file["filename"] for file in response.json()] + + # return list of modified files + return modified_files + +def get_modified_models_from_modified_files(modified_files: List[str]) -> List[str]: + modified_models = [] + + # Parse the JSON response and extract the file paths + for file in modified_files: + + # get the model name (/models//...) + model_name = file.split("/")[1] + modified_models.append(model_name) + + # remove duplicates + modified_models = list(set(modified_models)) + + return modified_models + +def validateModelFolder(base: str, model_name: str): + + model_path = os.path.join(base, model_name) + + # check if the model folder exists + if not os.path.isdir(model_path): + raise MHubComplianceError(f"Model folder {model_path} does not exist", DocuRef.MODEL_FOLDER_STRUCTURE) + + # check if the model folder contains the following and no additional ressources + # - /dockerfiles/Dockerfile + # - /config/default.yml + # - /utils + # - /meta.json + + # check if the model folder contains a Dockerfile + dockerfile_path = os.path.join(model_path, "dockerfiles", "Dockerfile") + if not os.path.isfile(dockerfile_path): + raise MHubComplianceError(f"Model folder {model_path} does not contain a Dockerfile", [DocuRef.MODEL_FOLDER_STRUCTURE, DocuRef.DOCKERFILE]) + + # check if the model folder contains a default config + config_path = os.path.join(model_path, "config", "default.yml") + if not os.path.isfile(config_path): + raise MHubComplianceError(f"Model folder {model_path} does not contain a default workflow configuration", [DocuRef.MODEL_FOLDER_STRUCTURE, DocuRef.CONFIG]) + + # check if the model folder contains a utils folder + # NOTE: utils is not mandatory, however, all MHub-IO modules must be inside the utils folder if they exist. + # we can check modified files for any *.py and demand they're inside the utils folder. + #utils_path = os.path.join(model_path, "utils") + #if not os.path.isdir(utils_path): + # raise MHubComplianceError(f"Model folder {model_path} does not contain a utils folder") + + # check if the model folder contains a model.json + model_json_path = os.path.join(model_path, "meta.json") + if not os.path.isfile(model_json_path): + raise MHubComplianceError(f"Model folder {model_path} does not contain a meta.json", [DocuRef.MODEL_FOLDER_STRUCTURE, DocuRef.MODEL_META_JSON]) + + +def validateModelMetaJson(model_meta_json_file: str): + + # load schema + with open(os.path.join('.github', 'schemas', 'meta.schema.json'), "r") as f: + schema = json.load(f) + + # load model meta json + with open(model_meta_json_file, "r") as f: + model_meta_json = json.load(f) + + # validate + try: + jsonschema.validate(instance=model_meta_json, schema=schema) + except jsonschema.ValidationError as e: + raise MHubComplianceError(f"Model meta json is not compliant with the schema: {e.message}", DocuRef.MODEL_META_JSON) + +def validateDockerfile(base: str, model_name: str): + + # get dockerfile path + model_dockerfile = os.path.join(base, model_name, "dockerfiles", "Dockerfile") + + # read dockerfile + with open(model_dockerfile, "r") as f: + dockerfile = f.read() + + # split dockerfile into lines + lines = dockerfile.split("\n") + + # remove empty lines + lines = [line for line in lines if line.strip() != ""] + + # check that the dockerfile contains only a single FROM command which + # is the first line of the file and is `FROM mhubai/base:latest` + if not lines[0].strip() == "FROM mhubai/base:latest": + raise MHubComplianceError(f"Dockerfile does not contain the correct FROM command: {lines[0]}", DocuRef.DOCKERFILE) + + # some status variables from parsing the dockerfile + dockerfile_defines_arg_mhub_models_repo = False + dockerfile_contains_mhubio_import = False + + # check that dockerfile contains no ADD or COPY commands + # We also don't allow changing the WORKDIR which is set to /app in the base and must be consistent across all models + # so no new line is allowed to start with ADD, COPY, WORKDIR, .. + for i, line in enumerate(lines): + + # forbidden keywords + + if line.startswith("WORKDIR"): + raise MHubComplianceError(f"WORKDIR must not be set to any other than `/app` as defined in our base image. {line}", DocuRef.DOCKERFILE) + + if line.startswith("ADD") or line.startswith("COPY"): + raise MHubComplianceError(f"Dockerfile contains ADD or COPY command: {line}", DocuRef.DOCKERFILE) + + if line.startswith("FROM") and i > 0: + raise MHubComplianceError(f"Dockerfile contains FROM command not at the beginning of the file: {line}", DocuRef.DOCKERFILE) + + # required keywords & status variables + + if line == "ARG MHUB_MODELS_REPO": + dockerfile_defines_arg_mhub_models_repo = True + + if line == f"RUN buildutils/import_mhub_model.sh {model_name} ${{MHUB_MODELS_REPO}}": + dockerfile_contains_mhubio_import = True + + # check if the dockerfile contains the required ARG MHUB_MODELS_REPO and model import + if not dockerfile_defines_arg_mhub_models_repo: + raise MHubComplianceError(f"Dockerfile does not define 'ARG MHUB_MODELS_REPO'", DocuRef.DOCKERFILE) + + if not dockerfile_contains_mhubio_import: + raise MHubComplianceError(f"Dockerfile does not contain the required mhubio import command: 'RUN buildutils/import_mhub_model.sh {model_name} ${{MHUB_MODELS_REPO}}'.", DocuRef.DOCKERFILE) + + # check that the entrypoint of the dockerfile matches + # ENTRYPOINT ["mhub.run"] | ENTRYPOINT ["python", "-m", "mhubio.run"] + if not lines[-2].strip() in ['ENTRYPOINT ["mhub.run"]', 'ENTRYPOINT ["python3", "-m", "mhubio.run"]']: + raise MHubComplianceError(f"Dockerfile does not contain the correct entrypoint: {lines[-2]}", DocuRef.DOCKERFILE) + + # CMD ["--workflow", "default"] | CMD ["--config", "/app/models/$model_name/config/default.yml"] + if not lines[-1].strip() in ['CMD ["--workflow", "default"]', f'CMD ["--config", "/app/models/{model_name}/config/default.yml"]']: + raise MHubComplianceError(f"Dockerfile does not contain the correct entrypoint: {lines[-1]}", DocuRef.DOCKERFILE) + + +def get_model_configuration_files(base: str, model_name: str) -> List[str]: + + # get config path + model_config_dir = os.path.join(base, model_name, "config") + + # get workflow files + model_workflows = [cf[:-4] for cf in os.listdir(model_config_dir) if cf.endswith(".yml")] + + # return list of configuration files + return model_workflows \ No newline at end of file diff --git a/.github/workflows/model_compliance.yml b/.github/workflows/model_compliance.yml new file mode 100644 index 00000000..07fc448b --- /dev/null +++ b/.github/workflows/model_compliance.yml @@ -0,0 +1,49 @@ +name: MHub Contribution Check + +on: + pull_request: + branches: + - "main" + +env: + PR_NUMBER: ${{ github.event.number }} + +jobs: + test: + name: Setup Compliance + runs-on: ubuntu-latest + + steps: + + # Checkout the latest code from the repo + - name: Checkout repo + uses: actions/checkout@v4 + + # Setup which version of Python to use + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.8 + + # Display the Python version being used + - name: Display Python version + run: python -c "import sys; print(sys.version)" + + # Install Python dependencies + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install requests jsonschema + + # Get the list of files modified in the PR + - name: Get files modified in PR + id: modified_files + uses: Ana06/get-changed-files@v2.2.0 + with: + format: json + + # Run check script + - name: Run MHub compliance test + run: python .github/scripts/mhub_check.py + env: + MODIFIED_FILES: ${{ steps.modified_files.outputs.all }} diff --git a/base/utils/download_weights.sh b/base/buildutils/download_weights.sh similarity index 100% rename from base/utils/download_weights.sh rename to base/buildutils/download_weights.sh diff --git a/base/buildutils/import_mhub_model.sh b/base/buildutils/import_mhub_model.sh new file mode 100755 index 00000000..7968b452 --- /dev/null +++ b/base/buildutils/import_mhub_model.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Script to import the MHub model definition from GitHub. +# provide the name of the model as a parameter. +# Usage: utils/import_mhub_model.sh <(repo_url=https://github.com/MHubAI/models.git::main)> <(branch=main)> + +# parameters extraction +MODEL_NAME=$1 +REPO_AND_BRANCH=${2:-https://github.com/MHubAI/models.git::main} +REPO_URL=$(echo $REPO_AND_BRANCH | awk -F :: '{print $1}') +REPO_BRANCH=$(echo $REPO_AND_BRANCH | awk -F :: '{print $2}') +REPO_BRANCH=${REPO_BRANCH:-$3} +REPO_BRANCH=${REPO_BRANCH:-main} + +# printout paramaters (this happens during the docker build...) +echo "Importing model definition from MHub models repository." +echo "├── MODEL NAME ..... ${MODEL_NAME}" +echo "├── REPOSITORY ..... ${REPO_URL}" +echo "└── BRANCH ......... ${REPO_BRANCH}" +echo + +# fail if model name is empty +if [ -z "$MODEL_NAME" ]; then + echo "Error: no model name provided." + exit 1 +fi + +# print a warning that the model definition is not from the +# the official MHub Models repository and therefore only +# suitable for development +if [ "$REPO_URL@$REPO_BRANCH" != "https://github.com/MHubAI/models.git@main" ]; then + echo + echo "Warning: the model definition is not from the official MHub Models repository and therefore only suitable for development." + echo +fi + +# perform a sparse checkout of the model definition folder +# (models/) from the referenced repository and branch +git init +git fetch ${REPO_URL} ${REPO_BRANCH} +git merge FETCH_HEAD +git sparse-checkout set "models/${MODEL_NAME}" +rm -r .git \ No newline at end of file diff --git a/base/dockerfiles/Dockerfile b/base/dockerfiles/Dockerfile index b594fd85..ba55e8f2 100644 --- a/base/dockerfiles/Dockerfile +++ b/base/dockerfiles/Dockerfile @@ -58,13 +58,14 @@ RUN pip3 install --upgrade pip && pip3 install --no-cache-dir \ RUN pip3 install git+https://github.com/MHubAI/mhubio.git \ && pip3 install git+https://github.com/MHubAI/segdb.git \ && git init \ - && git sparse-checkout set "base/utils" "base/bin" \ + && git sparse-checkout set "base/buildutils" "base/bin" \ && git fetch https://github.com/MHubAI/models.git main \ && git merge FETCH_HEAD \ - && mv base/utils . \ + && mv base/buildutils . \ && chmod +x base/bin/* \ && cp base/bin/* /usr/bin/ \ - && rm -r base + && rm -r base \ + && rm -r .git # Install DCMQI by pulling the latest release from GitHub (via GitHub API) # Run everything in a single RUN command to avoid creating intermediate layers (and allowing environment variables to be used) diff --git a/models/casust/dockerfiles/Dockerfile b/models/casust/dockerfiles/Dockerfile index 0e320041..59605844 100644 --- a/models/casust/dockerfiles/Dockerfile +++ b/models/casust/dockerfiles/Dockerfile @@ -14,16 +14,10 @@ RUN pip3 install --no-cache-dir \ torchvision \ torchio -# Clone the main branch of MHubAI/models -RUN git stash \ - && git sparse-checkout set "models/casust" \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD - # Clone the casust model RUN git clone https://github.com/LennyN95/CaSuSt /app/models/casust/src -# pull weights for casust so that the user doesn't need to every time a container is run +# Pull weights for casust so that the user doesn't need to every time a container is run ENV WEIGHTS_DIR="/app/models/casust/src/weights/" ENV WEIGHTS_URL="https://zenodo.org/record/7836696/files/casust_weights_7roi.zip" ENV WEIGHTS_FN="casust_weights_7roi.zip" @@ -32,7 +26,7 @@ RUN wget --directory-prefix ${WEIGHTS_DIR} ${WEIGHTS_URL} RUN unzip ${WEIGHTS_DIR}${WEIGHTS_FN} -d ${WEIGHTS_DIR} RUN rm ${WEIGHTS_DIR}${WEIGHTS_FN} -# pull weights for platipy's nnU-Net so that the user doesn't need to every time a container is run +# Pull weights for platipy's nnU-Net so that the user doesn't need to every time a container is run ENV WEIGHTS_DIR="/root/.platipy/nnUNet_models/nnUNet/" ENV WEIGHTS_URL="https://zenodo.org/record/6585664/files/Task400_OPEN_HEART_3d_lowres.zip" ENV WEIGHTS_FN="Task400_OPEN_HEART_3d_lowres.zip" @@ -41,9 +35,13 @@ RUN wget --directory-prefix ${WEIGHTS_DIR} ${WEIGHTS_URL} RUN unzip ${WEIGHTS_DIR}${WEIGHTS_FN} -d ${WEIGHTS_DIR} RUN rm ${WEIGHTS_DIR}${WEIGHTS_FN} -# specify nnunet specific environment variables +# Specify nnunet specific environment variables ENV WEIGHTS_FOLDER=$WEIGHTS_DIR +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh casust ${MHUB_MODELS_REPO} + # Default run script ENTRYPOINT ["python3", "-m", "mhubio.run"] CMD ["--config", "/app/models/casust/config/default.yml"] \ No newline at end of file diff --git a/models/gc_lunglobes/dockerfiles/Dockerfile b/models/gc_lunglobes/dockerfiles/Dockerfile index a9c4f23e..66f5a9ca 100644 --- a/models/gc_lunglobes/dockerfiles/Dockerfile +++ b/models/gc_lunglobes/dockerfiles/Dockerfile @@ -15,13 +15,9 @@ RUN pip3 install --no-cache-dir \ # SimpleITK downgrade required for legacy Resample::Execute operation RUN pip3 install --no-cache-dir --force-reinstall SimpleITK==1.2.4 -# FIXME: temporary fix waiting for the latest base image update -# Clone the main branch of MHubAI/models -RUN git stash \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD \ - && git sparse-checkout set "models/gc_lunglobes" \ - && git fetch https://github.com/MHubAI/models.git main +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh gc_lunglobes ${MHUB_MODELS_REPO} # Install Xie's pulmonary lobe segmentation algorithm and model weights RUN git clone https://github.com/DIAGNijmegen/bodyct-pulmonary-lobe-segmentation.git src && \ diff --git a/models/lungmask/dockerfiles/Dockerfile b/models/lungmask/dockerfiles/Dockerfile index 3e4de71e..70006ea7 100644 --- a/models/lungmask/dockerfiles/Dockerfile +++ b/models/lungmask/dockerfiles/Dockerfile @@ -4,14 +4,9 @@ FROM mhubai/base:latest RUN pip3 install --no-cache-dir \ lungmask==0.2.16 -# FIXME: temporary fix waiting for the latest base image update -# Clone the main branch of MHubAI/models -RUN git stash \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD \ - && git sparse-checkout set "models/lungmask" \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh lungmask ${MHUB_MODELS_REPO} # pull the weights for the lung segmentation 2D U-Net model ENV WEIGHTS_DIR="/root/.cache/torch/hub/checkpoints/" diff --git a/models/nnunet_liver/dockerfiles/Dockerfile b/models/nnunet_liver/dockerfiles/Dockerfile index 2ef3fbe9..3c798621 100644 --- a/models/nnunet_liver/dockerfiles/Dockerfile +++ b/models/nnunet_liver/dockerfiles/Dockerfile @@ -11,11 +11,9 @@ ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True RUN pip3 install --no-cache-dir \ nnunet==1.7.1 -# Clone the main branch of MHubAI/models -RUN git stash \ - && git sparse-checkout set "models/nnunet_liver" \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh nnunet_liver ${MHUB_MODELS_REPO} # pull weights for platipy's nnU-Net so that the user doesn't need to every time a container is run ENV WEIGHTS_DIR="/root/.nnunet/nnUNet_models/nnUNet/" diff --git a/models/nnunet_pancreas/dockerfiles/Dockerfile b/models/nnunet_pancreas/dockerfiles/Dockerfile index fef79d98..0e96b52c 100644 --- a/models/nnunet_pancreas/dockerfiles/Dockerfile +++ b/models/nnunet_pancreas/dockerfiles/Dockerfile @@ -11,11 +11,9 @@ ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True RUN pip3 install --no-cache-dir \ nnunet==1.7.1 -# Clone the main branch of MHubAI/models -RUN git stash \ - && git sparse-checkout set "models/nnunet_pancreas" \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh nnunet_pancreas ${MHUB_MODELS_REPO} # pull weights for platipy's nnU-Net so that the user doesn't need to every time a container is run ENV WEIGHTS_DIR="/root/.nnunet/nnUNet_models/nnUNet/" diff --git a/models/platipy/dockerfiles/Dockerfile b/models/platipy/dockerfiles/Dockerfile index 527c01bf..0c30f0e0 100644 --- a/models/platipy/dockerfiles/Dockerfile +++ b/models/platipy/dockerfiles/Dockerfile @@ -16,11 +16,9 @@ RUN pip3 install --no-cache-dir \ nnunet \ platipy[cardiac]==0.7.0 -# Clone the main branch of MHubAI/models -RUN git stash \ - && git sparse-checkout set "models/platipy" \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh platipy ${MHUB_MODELS_REPO} # pull weights for platipy's nnU-Net so that the user doesn't need to every time a container is run ENV WEIGHTS_DIR="/root/.platipy/nnUNet_models/nnUNet/" diff --git a/models/totalsegmentator/dockerfiles/Dockerfile b/models/totalsegmentator/dockerfiles/Dockerfile index 8c906979..049be54f 100644 --- a/models/totalsegmentator/dockerfiles/Dockerfile +++ b/models/totalsegmentator/dockerfiles/Dockerfile @@ -10,15 +10,13 @@ ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True # Install TotalSegmentator RUN pip3 install --no-cache-dir totalsegmentator==1.5.6 -# Clone the main branch of MHubAI/models -RUN git stash \ - && git sparse-checkout set "models/totalsegmentator" \ - && git fetch https://github.com/MHubAI/models.git main \ - && git merge FETCH_HEAD +# Import the MHub model definiton +ARG MHUB_MODELS_REPO +RUN buildutils/import_mhub_model.sh totalsegmentator ${MHUB_MODELS_REPO} # Download weights using the script in utils # Usage: utils/download_weights.sh ... -RUN utils/download_weights.sh \ +RUN buildutils/download_weights.sh \ /root/.totalsegmentator/nnunet/results/nnUNet/3d_fullres/ \ https://zenodo.org/record/6802052/files/Task256_TotalSegmentator_3mm_1139subj.zip \ https://zenodo.org/record/6802342/files/Task251_TotalSegmentator_part1_organs_1139subj.zip \