From 4e3b2130d32c107fea58d79ab6c113ce96e1aaac Mon Sep 17 00:00:00 2001 From: Phillip Chlap Date: Wed, 28 Sep 2022 13:22:57 +1000 Subject: [PATCH] Logging for backend service --- Dockerfile | 6 +- dev.Dockerfile | 2 +- platipy/backend/__init__.py | 39 +++++- platipy/backend/api.py | 67 ++++----- platipy/{backend => }/client.py | 0 platipy/imaging/projects/cardiac/run.py | 62 ++++++--- poetry.lock | 176 ++++++++++++------------ pyproject.toml | 2 +- service.Dockerfile | 2 +- services/sample/service.py | 9 +- 10 files changed, 204 insertions(+), 161 deletions(-) rename platipy/{backend => }/client.py (100%) diff --git a/Dockerfile b/Dockerfile index 306852ba..c3898d01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM ubuntu:20.04 RUN apt-get update; DEBIAN_FRONTEND="noninteractive" apt-get install -y python3-pip libgl1-mesa-glx libsm6 libxext6 libxrender-dev libglib2.0-0 curl RUN ln -s /usr/bin/python3 /usr/bin/python -# RUN pip install --upgrade pip +RUN pip install --upgrade pip COPY poetry.lock /platipy/poetry.lock COPY pyproject.toml /platipy/pyproject.toml @@ -12,4 +12,6 @@ RUN curl -sSL https://install.python-poetry.org | python - --version 1.2.1 RUN echo 'export PATH="/root/.local/bin:$PATH"' >> ~/.bashrc RUN echo "/usr/lib/python3.8/site-packages" >> /usr/local/lib/python3.8/dist-packages/site-packages.pth -RUN /root/.local/bin/poetry config virtualenvs.create false +ENV PATH="/root/.local/bin:${PATH}" + +RUN poetry config virtualenvs.create false diff --git a/dev.Dockerfile b/dev.Dockerfile index 2a4abd0c..2bb5bb8b 100644 --- a/dev.Dockerfile +++ b/dev.Dockerfile @@ -2,4 +2,4 @@ FROM platipy/platipy RUN apt-get update; DEBIAN_FRONTEND="noninteractive" apt-get install -y redis-server git libgl1-mesa-glx libsm6 libxext6 libxrender-dev libglib2.0-0 pandoc curl -RUN env -C /platipy /root/.local/bin/poetry install --with dev,docs --all-extras +RUN cd /platipy && poetry install --with dev,docs --all-extras diff --git a/platipy/backend/__init__.py b/platipy/backend/__init__.py index dc536f4c..ae836e57 100644 --- a/platipy/backend/__init__.py +++ b/platipy/backend/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +import sys import os import uuid @@ -19,18 +21,45 @@ from flask_sqlalchemy import SQLAlchemy from flask_restful import Api from celery import Celery -from loguru import logger +import celery.signals from platipy.backend.application import FlaskApp - env_work = os.getcwd() if "WORK" in os.environ: env_work = os.environ["WORK"] - -# Configure Log file location log_file_path = os.path.join(env_work, "service.log") -logger.add(log_file_path, rotation="1 day") + + +def configure_logging(): + logger = logging.getLogger() + + logger.handlers.clear() + logger.setLevel(logging.DEBUG) + + file_handler = logging.handlers.RotatingFileHandler( + log_file_path, + maxBytes=100 * 1024 * 1024, # Max 100 MB per log file before rotating + backupCount=100, # Keep up to 100 log files in history + ) + file_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + file_handler.setFormatter(file_formatter) + file_handler.setLevel(logging.DEBUG) + logger.addHandler(file_handler) + + console_handler = logging.StreamHandler(sys.stdout) + console_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + console_handler.setFormatter(console_formatter) + console_handler.setLevel(logging.DEBUG) + logger.addHandler(console_handler) + + +@celery.signals.setup_logging.connect +def on_celery_setup_logging(**kwargs): + configure_logging() + + +configure_logging() # Create Flask app app = FlaskApp(__name__) diff --git a/platipy/backend/api.py b/platipy/backend/api.py index 5406acb8..7c5f7fa5 100644 --- a/platipy/backend/api.py +++ b/platipy/backend/api.py @@ -21,7 +21,6 @@ import werkzeug import logging -logger = logging.getLogger(__name__) import flask_restful from flask_restful import Api, reqparse @@ -35,6 +34,8 @@ from .models import db, AlchemyEncoder, APIKey, Dataset, DataObject, DicomLocation from .tasks import run_task, retrieve_task +logger = logging.getLogger(__name__) + class CustomConfig(object): RESTFUL_JSON = {"separators": (", ", ": "), "indent": 2, "cls": AlchemyEncoder} @@ -104,23 +105,23 @@ class DicomLocationEndpoint(Resource): "name", required=True, help="Name to identify this Dicom location", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "host", required=True, help="Dicom location host name or IP address", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "port", type=int, required=True, help="The port of the Dicom location", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( - "ae_title", help="AE Title of the Dicom location", location=["args", "headers", 'values'] + "ae_title", help="AE Title of the Dicom location", location=["args", "headers", "values"] ) def get(self): @@ -167,31 +168,29 @@ class DataObjectEndpoint(Resource): "dataset", required=True, help="Dataset ID to add Data Object to", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "type", choices=("DICOM", "FILE"), required=True, help="DICOM for Dicom objects to be fetched from the Dataset Dicom Location. FILE for file sent with request.", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "dicom_retrieve", choices=("MOVE", "GET", "SEND"), help="Used for DICOM type. The Dicom objects will be retrieved using this method.", - location=["args", "headers", 'values'], - ) - parser.add_argument("seriesUID", location=["args", "headers", 'values']) - parser.add_argument("meta_data", location=["args", "headers", 'values']) - parser.add_argument("file_name", location=["args", "headers", 'values']) - parser.add_argument( - "file_data", type=werkzeug.datastructures.FileStorage, location="files" + location=["args", "headers", "values"], ) + parser.add_argument("seriesUID", location=["args", "headers", "values"]) + parser.add_argument("meta_data", location=["args", "headers", "values"]) + parser.add_argument("file_name", location=["args", "headers", "values"]) + parser.add_argument("file_data", type=werkzeug.datastructures.FileStorage, location="files") parser.add_argument( "parent", help="Data Object ID to which this data object should be linked", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) def get(self, dataobject_id): @@ -222,9 +221,7 @@ def post(self): # Get the parent dataset if one was given parent = None if args["parent"]: - parent = DataObject.query.filter_by( - dataset_id=ds.id, id=args["parent"] - ).first() + parent = DataObject.query.filter_by(dataset_id=ds.id, id=args["parent"]).first() if not parent: return {"Error": "Parent Data Object not found"}, 404 @@ -414,9 +411,7 @@ def get(self, dataobject_id): return {"Error": "File could not be found, perhaps it has expired"}, 404 logger.info("Downloading file: %s", f) - return send_from_directory( - os.path.dirname(f), os.path.basename(f), as_attachment=True - ) + return send_from_directory(os.path.dirname(f), os.path.basename(f), as_attachment=True) return {"Error": "Data Object not found"}, 404 @@ -437,14 +432,14 @@ class DatasetEndpoint(Resource): parser.add_argument( "from_dicom_location", help="ID of DicomLocation from which to retrieve DICOM data", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "to_dicom_location", help="ID of DicomLocation the send output data to", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) - parser.add_argument("timeout", type=int, default=24, location=["args", "headers", 'values']) + parser.add_argument("timeout", type=int, default=24, location=["args", "headers", "values"]) def get(self, dataset_id): @@ -515,9 +510,7 @@ def get(self): result = [] for a in app.algorithms: - result.append( - {"name": a, "default_settings": app.algorithms[a].default_settings} - ) + result.append({"name": a, "default_settings": app.algorithms[a].default_settings}) return result @@ -528,18 +521,18 @@ class TriggerEndpoint(Resource): "algorithm", required=True, help="The name of the algorithm to trigger", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "dataset", required=True, help="The ID of the dataset to pass to the algorithm", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) parser.add_argument( "config", help="JSON configuration for algorithm. Default configuration will be used if not set.", - location=["args", "headers", 'values'], + location=["args", "headers", "values"], ) def post(self): @@ -548,11 +541,7 @@ def post(self): if not args["algorithm"] in app.algorithms: return ( - { - "Error": "No algorithm found with name: {0}".format( - args["algorithm"] - ) - }, + {"Error": "No algorithm found with name: {0}".format(args["algorithm"])}, 404, ) @@ -598,12 +587,8 @@ def post(self): api.add_resource(DatasetReadyEndpoint, "/api/dataset/ready/") api.add_resource(DataObjectsEndpoint, "/api/dataobjects") -api.add_resource( - DataObjectEndpoint, "/api/dataobject", "/api/dataobject/" -) -api.add_resource( - DataObjectDownloadEndpoint, "/api/dataobject/download/" -) +api.add_resource(DataObjectEndpoint, "/api/dataobject", "/api/dataobject/") +api.add_resource(DataObjectDownloadEndpoint, "/api/dataobject/download/") api.add_resource(AlgorithmEndpoint, "/api/algorithm") diff --git a/platipy/backend/client.py b/platipy/client.py similarity index 100% rename from platipy/backend/client.py rename to platipy/client.py diff --git a/platipy/imaging/projects/cardiac/run.py b/platipy/imaging/projects/cardiac/run.py index 40064281..7d38b498 100644 --- a/platipy/imaging/projects/cardiac/run.py +++ b/platipy/imaging/projects/cardiac/run.py @@ -22,8 +22,6 @@ import SimpleITK as sitk import numpy as np -from loguru import logger - from platipy.imaging.registration.utils import apply_transform, convert_mask_to_reg_structure from platipy.imaging.registration.linear import ( @@ -55,7 +53,12 @@ from platipy.imaging.utils.crop import label_to_roi, crop_to_roi from platipy.imaging.generation.mask import extend_mask from platipy.imaging.label.utils import binary_encode_structure_list, correct_volume_overlap -from platipy.imaging.projects.nnunet.run import run_segmentation, available_nnunet_models, setup_nnunet_environment, NNUNET_SETTINGS_DEFAULTS +from platipy.imaging.projects.nnunet.run import ( + run_segmentation, + available_nnunet_models, + setup_nnunet_environment, + NNUNET_SETTINGS_DEFAULTS, +) from platipy.utils import download_and_extract_zip_file logger = logging.getLogger(__name__) @@ -375,11 +378,24 @@ } OPEN_ATLAS_SETTINGS["postprocessing_settings"]["structures_for_binaryfillhole"] = [ - "Atrium_L","Ventricle_L","Atrium_R","Ventricle_R","A_Aorta","A_Pulmonary","V_Venacava_S","Heart", + "Atrium_L", + "Ventricle_L", + "Atrium_R", + "Ventricle_R", + "A_Aorta", + "A_Pulmonary", + "V_Venacava_S", + "Heart", ] OPEN_ATLAS_SETTINGS["postprocessing_settings"]["structures_for_overlap_correction"] = [ - "Atrium_L","Ventricle_L","Atrium_R","Ventricle_R","A_Aorta","A_Pulmonary","V_Venacava_S", + "Atrium_L", + "Ventricle_L", + "Atrium_R", + "Ventricle_R", + "A_Aorta", + "A_Pulmonary", + "V_Venacava_S", ] OPEN_ATLAS_SETTINGS["return_proba_as_contours"] = True @@ -391,6 +407,7 @@ } HYBRID_SETTINGS_DEFAULTS["nnunet_settings"]["folds"] = "all" + def install_open_atlas(atlas_path): """Fetch atlas from Zenodo and place into atlas_path @@ -419,31 +436,36 @@ def install_atlas_from_zipfile(zip_file_path, atlas_path): if not atlas_path.parent.exists(): atlas_path.parent.mkdir(parents=True) - + shutil.copytree(temp_atlas_path, atlas_path) + def display_open_cardiac_zip_url_locations(): open_nnunet_heart_model = "Task400_OPEN_HEART_1FOLD" nnunet_models = available_nnunet_models() nnunet_zip_url = nnunet_models[open_nnunet_heart_model]["url"] - + print("Please download the following two zip files:") print(nnunet_zip_url) print(OPEN_ATLAS_URL) print() - print("Once downloaded, pass where these are located on your filesystem to the " - "install_hybrid_cardiac_from_zip function.") + print( + "Once downloaded, pass where these are located on your filesystem to the " + "install_hybrid_cardiac_from_zip function." + ) def install_hybrid_cardiac_from_zip(path_to_nnunet_zip, path_to_atlas_zip): - from nnunet.inference.pretrained_models.download_pretrained_model import install_model_from_zip_file - + from nnunet.inference.pretrained_models.download_pretrained_model import ( + install_model_from_zip_file, + ) + # Install nnUNet model setup_nnunet_environment() install_model_from_zip_file(path_to_nnunet_zip) - + # Install atlas model install_atlas_from_zipfile(path_to_atlas_zip, Path(ATLAS_PATH).parent) @@ -555,7 +577,7 @@ def run_cardiac_segmentation(img, guide_structure=None, settings=CARDIAC_SETTING final_volume = np.product(image.GetSize()) - logger.info(" > Volume reduced by factor %.2f", original_volume/final_volume) + logger.info(" > Volume reduced by factor %.2f", original_volume / final_volume) for struct in atlas_structure_list: structures[struct] = crop_to_roi( @@ -632,7 +654,7 @@ def run_cardiac_segmentation(img, guide_structure=None, settings=CARDIAC_SETTING logger.info("Calculated crop box:") logger.info(" > %s", crop_box_index) logger.info(" > %s", crop_box_size) - logger.info(" > Vol reduction = %.2f", np.product(img.GetSize())/np.product(crop_box_size)) + logger.info(" > Vol reduction = %.2f", np.product(img.GetSize()) / np.product(crop_box_size)) """ Step 2 - Rigid registration of target images @@ -642,7 +664,7 @@ def run_cardiac_segmentation(img, guide_structure=None, settings=CARDIAC_SETTING linear_registration_settings = settings["linear_registration_settings"] logger.info( - "Running %s tranform to align atlas images", linear_registration_settings['reg_method'] + "Running %s tranform to align atlas images", linear_registration_settings["reg_method"] ) for atlas_id in atlas_id_list: @@ -918,9 +940,10 @@ def run_cardiac_segmentation(img, guide_structure=None, settings=CARDIAC_SETTING if settings["return_proba_as_contours"]: atlas_contours = [ - process_probability_image(atlas_set[atlas_id]["DIR"][structure_name], 0.5) for atlas_id in atlas_id_list + process_probability_image(atlas_set[atlas_id]["DIR"][structure_name], 0.5) + for atlas_id in atlas_id_list ] - + results_prob[structure_name] = binary_encode_structure_list(atlas_contours) else: @@ -934,10 +957,11 @@ def run_cardiac_segmentation(img, guide_structure=None, settings=CARDIAC_SETTING results_prob[guide_structure_name] = guide_structure else: - + if settings["return_proba_as_contours"]: atlas_contours = [ - process_probability_image(atlas_set[atlas_id]["DIR"][structure_name], 0.5) for atlas_id in atlas_id_list + process_probability_image(atlas_set[atlas_id]["DIR"][structure_name], 0.5) + for atlas_id in atlas_id_list ] probability_img = binary_encode_structure_list(atlas_contours) template_img_prob = sitk.Cast((img * 0), sitk.sitkUInt32) diff --git a/poetry.lock b/poetry.lock index f5938508..d6472616 100644 --- a/poetry.lock +++ b/poetry.lock @@ -18,7 +18,7 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} yarl = ">=1.0,<2.0" [package.extras] -speedups = ["aiodns", "brotli", "cchardet"] +speedups = ["Brotli", "aiodns", "cchardet"] [[package]] name = "aiosignal" @@ -83,7 +83,7 @@ trio = ["trio (>=0.16)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -186,13 +186,13 @@ optional = false python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope-interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope-interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope-interface"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] -name = "babel" +name = "Babel" version = "2.10.3" description = "Internationalization utilities" category = "dev" @@ -206,7 +206,7 @@ pytz = ">=2015.7" name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -344,7 +344,7 @@ s3 = ["boto3 (>=1.9.125)"] slmq = ["softlayer-messaging (>=1.0.3)"] solar = ["ephem"] sqlalchemy = ["sqlalchemy"] -sqs = ["kombu"] +sqs = ["kombu[sqs]"] tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=1.3.1)"] @@ -466,7 +466,7 @@ python-versions = ">=3.7" name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -532,7 +532,7 @@ python-versions = "*" devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] -name = "flask" +name = "Flask" version = "2.2.2" description = "A simple framework for building complex web applications." category = "main" @@ -551,7 +551,7 @@ async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] [[package]] -name = "flask-restful" +name = "Flask-RESTful" version = "0.3.9" description = "Simple framework for creating REST APIs" category = "main" @@ -568,7 +568,7 @@ six = ">=1.3.0" docs = ["sphinx"] [[package]] -name = "flask-sqlalchemy" +name = "Flask-SQLAlchemy" version = "2.5.1" description = "Adds SQLAlchemy support to your Flask application." category = "main" @@ -640,7 +640,7 @@ optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.extras] -docs = ["sphinx"] +docs = ["Sphinx"] [[package]] name = "gunicorn" @@ -683,17 +683,17 @@ pillow = ">=8.3.2" all-plugins = ["astropy", "av", "imageio-ffmpeg", "opencv-python", "psutil", "tifffile"] all-plugins-pypy = ["av", "imageio-ffmpeg", "psutil", "tifffile"] build = ["wheel"] -dev = ["black", "flake8", "fsspec", "invoke", "pytest", "pytest-cov"] +dev = ["black", "flake8", "fsspec[github]", "invoke", "pytest", "pytest-cov"] docs = ["numpydoc", "pydata-sphinx-theme", "sphinx"] ffmpeg = ["imageio-ffmpeg", "psutil"] fits = ["astropy"] -full = ["astropy", "av", "black", "flake8", "fsspec", "gdal", "imageio-ffmpeg", "invoke", "itk", "numpydoc", "opencv-python", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx", "tifffile", "wheel"] +full = ["astropy", "av", "black", "flake8", "fsspec[github]", "gdal", "imageio-ffmpeg", "invoke", "itk", "numpydoc", "opencv-python", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx", "tifffile", "wheel"] gdal = ["gdal"] itk = ["itk"] linting = ["black", "flake8"] opencv = ["opencv-python"] pyav = ["av"] -test = ["fsspec", "invoke", "pytest", "pytest-cov"] +test = ["fsspec[github]", "invoke", "pytest", "pytest-cov"] tifffile = ["tifffile"] [[package]] @@ -719,7 +719,7 @@ zipp = ">=0.5" [package.extras] docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["flufl-flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -772,7 +772,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=6.0)", "pytest-cov", "p name = "ipython" version = "7.34.0" description = "IPython: Productive Interactive Computing" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -802,7 +802,7 @@ qtconsole = ["qtconsole"] test = ["ipykernel", "nbformat", "nose (>=0.10.1)", "numpy (>=1.17)", "pygments", "requests", "testpath"] [[package]] -name = "ipython-genutils" +name = "ipython_genutils" version = "0.2.0" description = "Vestigial utilities from IPython" category = "dev" @@ -835,7 +835,7 @@ python-versions = ">=3.7" name = "jedi" version = "0.18.1" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -847,7 +847,7 @@ qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "jinja2" +name = "Jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "main" @@ -937,7 +937,7 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-server" -version = "1.19.0" +version = "1.19.1" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -985,7 +985,7 @@ tomli = "*" tornado = ">=6.1.0" [package.extras] -test = ["check-manifest", "coverage", "jupyterlab-server", "pre-commit", "pytest (>=6.0)", "pytest-check-links (>=0.5)", "pytest-console-scripts", "pytest-cov", "requests", "requests-cache", "virtualenv"] +test = ["check-manifest", "coverage", "jupyterlab-server[test]", "pre-commit", "pytest (>=6.0)", "pytest-check-links (>=0.5)", "pytest-console-scripts", "pytest-cov", "requests", "requests-cache", "virtualenv"] ui-tests = ["build"] [[package]] @@ -1016,7 +1016,7 @@ requests = "*" [package.extras] openapi = ["openapi-core (>=0.14.2)", "ruamel-yaml"] -test = ["codecov", "ipykernel", "jupyter-server", "openapi-core (>=0.14.2,<0.15.0)", "openapi-spec-validator (<0.5)", "pytest (>=5.3.2)", "pytest-console-scripts", "pytest-cov", "ruamel-yaml", "strict-rfc3339"] +test = ["codecov", "ipykernel", "jupyter-server[test]", "openapi-core (>=0.14.2,<0.15.0)", "openapi-spec-validator (<0.5)", "pytest (>=5.3.2)", "pytest-console-scripts", "pytest-cov", "ruamel-yaml", "strict-rfc3339"] [[package]] name = "kiwisolver" @@ -1101,7 +1101,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] -htmlsoup = ["beautifulsoup4"] +htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.7)"] [[package]] @@ -1117,7 +1117,7 @@ docutils = ">=0.19" mistune = "0.8.4" [[package]] -name = "markupsafe" +name = "MarkupSafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" @@ -1147,7 +1147,7 @@ setuptools_scm = ">=4,<7" name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -1163,7 +1163,7 @@ optional = false python-versions = ">=3.6" [[package]] -name = "medpy" +name = "MedPy" version = "0.4.0" description = "Medical image processing in Python" category = "main" @@ -1552,7 +1552,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -1572,7 +1572,7 @@ python-versions = ">=3.7" name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" +category = "main" optional = false python-versions = "*" @@ -1583,12 +1583,12 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" +category = "main" optional = false python-versions = "*" [[package]] -name = "pillow" +name = "Pillow" version = "9.2.0" description = "Python Imaging Library (Fork)" category = "main" @@ -1600,7 +1600,7 @@ docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] -name = "pkgutil-resolve-name" +name = "pkgutil_resolve_name" version = "1.3.10" description = "Resolve a name to an object." category = "dev" @@ -1682,7 +1682,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -1714,10 +1714,10 @@ python-versions = ">=3.6.1" docs = ["matplotlib", "numpy", "numpydoc", "pillow", "sphinx", "sphinx-copybutton", "sphinx-gallery", "sphinx-rtd-theme", "sphinxcontrib-napoleon"] [[package]] -name = "pygments" +name = "Pygments" version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -1762,26 +1762,28 @@ dev = ["black", "bump2version", "gitchangelog", "mypy", "pystache"] [[package]] name = "pymedphys" -version = "0.37.1" +version = "0.38.0" description = "Medical Physics library" category = "main" optional = true python-versions = ">=3.7,<4.0" [package.dependencies] +ipython = ">=7.31.1" typing-extensions = "*" [package.extras] +cli = ["toml"] comparables = ["flashgamma"] -dev = ["attrs", "black (>=20.8b1,<21.0)", "dbfread", "dicompyler-core", "doc8", "fsspec", "hypothesis (<6)", "imageio", "jupyter-book (>=0.8.3)", "keyring", "matplotlib", "mypy", "natsort", "networkx", "numpy (>=1.20.2)", "packaging", "pandas (>=1.0.0)", "pillow", "plotly", "pre-commit", "psutil", "pydicom (>=2.0.0)", "pylibjpeg-libjpeg", "pylinac (==2.3.2)", "pylint", "pymssql", "pynetdicom", "pytest", "pytest-rerunfailures", "pytest-sugar", "python-dateutil", "pyyaml", "readme-renderer", "reportlab (<=3.5.54)", "requests", "rope", "scikit-image (>=0.18.1)", "scikit-learn", "scipy", "shapely (>=1.7.0)", "sphinx-argparse", "sphinx-book-theme (==0.0.42)", "sphinxcontrib-napoleon", "sqlalchemy", "streamlit (==0.82.0)", "streamlit-ace (==0.0.4)", "tabulate", "timeago", "tomlkit", "tqdm", "watchdog", "xarray", "xlsxwriter", "xmltodict"] +dev = ["Pillow (>=9.0.0)", "PyYAML", "astroid", "attrs", "black (>=21.10b0,<22.0)", "dbfread", "dicompyler-core", "doc8", "fsspec", "hypothesis (<6)", "imageio (<2.11)", "jupyter-book (>=0.12.0)", "keyring", "matplotlib", "mypy", "natsort", "networkx", "numpy (>=1.20.2)", "packaging", "pandas (>=1.0.0)", "plotly", "pre-commit", "psutil", "pydicom (>=2.0.0)", "pylibjpeg-libjpeg (<1.3.0)", "pylinac (==2.3.2)", "pylint", "pymssql", "pynetdicom", "pyoxidizer", "pytest", "pytest-rerunfailures", "pytest-sugar", "python-dateutil", "pywin32 (>=301)", "readme-renderer", "reportlab (>=3.6)", "requests", "rope", "scikit-image (>=0.18.1)", "scikit-learn", "scipy", "shapely (>=1.7.0)", "sphinx-argparse", "sphinx-book-theme", "sphinxcontrib-napoleon", "sqlalchemy (<1.4)", "streamlit (==1.2.0)", "streamlit-ace (==0.1.0)", "tabulate", "timeago", "toml", "tomlkit", "tqdm", "watchdog", "xarray", "xlsxwriter", "xmltodict"] dicom = ["pydicom (>=2.0.0)", "pynetdicom"] -docs = ["jupyter-book (>=0.8.3)", "networkx", "sphinx-argparse", "sphinx-book-theme (==0.0.42)", "sphinxcontrib-napoleon"] -doctests = ["black (>=20.8b1,<21.0)", "pylinac (==2.3.2)", "sphinx-book-theme (==0.0.42)", "tabulate"] +docs = ["jupyter-book (>=0.12.0)", "networkx", "sphinx-argparse", "sphinx-book-theme", "sphinxcontrib-napoleon"] +doctests = ["black (>=21.10b0,<22.0)", "pylinac (==2.3.2)", "sphinx-book-theme", "tabulate"] icom = ["numpy (>=1.20.2)"] -mosaiq = ["pandas (>=1.0.0)", "pymssql", "scikit-learn", "sqlalchemy"] -propagate = ["black (>=20.8b1,<21.0)", "tomlkit"] -tests = ["hypothesis (<6)", "psutil", "pylint", "pytest", "pytest-rerunfailures", "pytest-sugar", "python-dateutil", "tqdm"] -user = ["attrs", "dbfread", "dicompyler-core", "fsspec", "imageio", "keyring", "matplotlib", "natsort", "numpy (>=1.20.2)", "packaging", "pandas (>=1.0.0)", "pillow", "plotly", "pydicom (>=2.0.0)", "pylibjpeg-libjpeg", "pylinac (==2.3.2)", "pymssql", "pynetdicom", "python-dateutil", "pyyaml", "reportlab (<=3.5.54)", "requests", "scikit-image (>=0.18.1)", "scikit-learn", "scipy", "shapely (>=1.7.0)", "sqlalchemy", "streamlit (==0.82.0)", "streamlit-ace (==0.0.4)", "timeago", "tomlkit", "tqdm", "watchdog", "xarray", "xlsxwriter", "xmltodict"] +mosaiq = ["pandas (>=1.0.0)", "pymssql", "scikit-learn", "sqlalchemy (<1.4)"] +propagate = ["black (>=21.10b0,<22.0)", "tomlkit"] +tests = ["astroid", "hypothesis (<6)", "psutil", "pylint", "pytest", "pytest-rerunfailures", "pytest-sugar", "python-dateutil", "tqdm"] +user = ["Pillow (>=9.0.0)", "PyYAML", "attrs", "dbfread", "dicompyler-core", "fsspec", "imageio (<2.11)", "keyring", "matplotlib", "natsort", "numpy (>=1.20.2)", "packaging", "pandas (>=1.0.0)", "plotly", "pydicom (>=2.0.0)", "pylibjpeg-libjpeg (<1.3.0)", "pylinac (==2.3.2)", "pymssql", "pynetdicom", "python-dateutil", "pywin32 (>=301)", "reportlab (>=3.6)", "requests", "scikit-image (>=0.18.1)", "scikit-learn", "scipy", "shapely (>=1.7.0)", "sqlalchemy (<1.4)", "streamlit (==1.2.0)", "streamlit-ace (==0.1.0)", "timeago", "toml", "tomlkit", "tqdm", "watchdog", "xarray", "xlsxwriter", "xmltodict"] [[package]] name = "pynetdicom" @@ -1868,7 +1870,7 @@ optional = false python-versions = "*" [[package]] -name = "pywavelets" +name = "PyWavelets" version = "1.3.0" description = "PyWavelets, wavelet transform module" category = "main" @@ -1970,7 +1972,7 @@ tifffile = ">=2019.7.26" [package.extras] data = ["pooch (>=1.3.0)"] docs = ["cloudpickle (>=0.2.1)", "dask[array] (>=0.15.0,!=2.17.0)", "ipywidgets", "matplotlib (>=3.0.1)", "myst-parser", "numpydoc (>=1.0)", "pandas (>=0.23.0)", "plotly (>=4.10.0)", "pooch (>=1.3.0)", "pytest-runner", "scikit-learn", "seaborn (>=0.7.1)", "sphinx (>=1.8,<=2.4.4)", "sphinx-copybutton", "sphinx-gallery (>=0.7.0,!=0.8.0)", "tifffile (>=2020.5.30)"] -optional = ["astropy (>=3.1.2)", "cloudpickle (>=0.2.1)", "dask[array] (>=1.0.0,!=2.17.0)", "pooch (>=1.3.0)", "pyamg", "qtpy", "simpleitk"] +optional = ["SimpleITK", "astropy (>=3.1.2)", "cloudpickle (>=0.2.1)", "dask[array] (>=1.0.0,!=2.17.0)", "pooch (>=1.3.0)", "pyamg", "qtpy"] test = ["codecov", "flake8", "pooch (>=1.3.0)", "pytest (>=5.2.0)", "pytest-cov (>=2.7.0)", "pytest-faulthandler", "pytest-localserver"] [[package]] @@ -2005,7 +2007,7 @@ python-versions = ">=3.7" numpy = ">=1.16.5" [[package]] -name = "send2trash" +name = "Send2Trash" version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "dev" @@ -2013,8 +2015,8 @@ optional = false python-versions = "*" [package.extras] -nativelib = ["pyobjc-framework-cocoa", "pywin32"] -objc = ["pyobjc-framework-cocoa"] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] win32 = ["pywin32"] [[package]] @@ -2027,8 +2029,8 @@ python-versions = ">=3.7" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "setuptools-scm" @@ -2048,7 +2050,7 @@ test = ["pytest (>=6.2)", "virtualenv (>20)"] toml = ["setuptools (>=42)"] [[package]] -name = "simpleitk" +name = "SimpleITK" version = "2.2.0" description = "SimpleITK is a simplified interface to the Insight Toolkit (ITK) for image registration and segmentation" category = "main" @@ -2099,8 +2101,8 @@ optional = false python-versions = ">=3.6" [[package]] -name = "sphinx" -version = "5.2.1" +name = "Sphinx" +version = "5.2.2" description = "Python documentation generator" category = "dev" optional = false @@ -2127,8 +2129,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.971)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx-basic-ng" @@ -2249,7 +2251,7 @@ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] -name = "sqlalchemy" +name = "SQLAlchemy" version = "1.4.41" description = "Database Abstraction Library" category = "main" @@ -2279,7 +2281,7 @@ postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "terminado" @@ -2331,8 +2333,8 @@ python-versions = ">=3.6" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -test = ["coverage", "pytest", "pytest-cov", "pytest-flake8", "pytest-isort"] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["coverage[toml]", "pytest", "pytest-cov", "pytest-flake8", "pytest-isort"] [[package]] name = "toml" @@ -2401,7 +2403,7 @@ linecache2 = "*" name = "traitlets" version = "5.4.0" description = "" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" @@ -2514,7 +2516,7 @@ optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] -name = "werkzeug" +name = "Werkzeug" version = "2.2.2" description = "The comprehensive WSGI web application library." category = "main" @@ -2583,7 +2585,7 @@ python-versions = ">=3.7" [package.extras] docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] backend = ["Flask", "Flask-RESTful", "Flask-SQLAlchemy", "celery", "redis", "psutil", "gunicorn", "Jinja2", "pymedphys", "loguru"] @@ -2593,7 +2595,7 @@ nnunet = ["nnunet"] [metadata] lock-version = "1.1" python-versions = "^3.7.1" -content-hash = "b219027efc63eefe21af0e86c90a6b68a69fc52cc99205e6ba5a05bc31c284cf" +content-hash = "a9fcb69fe2043a2e83ccfdd2107f5f4ee713288b7c547e277112deb7025e2f51" [metadata.files] aiohttp = [ @@ -2763,7 +2765,7 @@ attrs = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] -babel = [ +Babel = [ {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, ] @@ -2969,15 +2971,15 @@ fastjsonschema = [ {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"}, {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"}, ] -flask = [ +Flask = [ {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, ] -flask-restful = [ +Flask-RESTful = [ {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, ] -flask-sqlalchemy = [ +Flask-SQLAlchemy = [ {file = "Flask-SQLAlchemy-2.5.1.tar.gz", hash = "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912"}, {file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"}, ] @@ -3145,7 +3147,7 @@ ipython = [ {file = "ipython-7.34.0-py3-none-any.whl", hash = "sha256:c175d2440a1caff76116eb719d40538fbb316e214eda85c5515c303aacbfb23e"}, {file = "ipython-7.34.0.tar.gz", hash = "sha256:af3bdb46aa292bce5615b1b2ebc76c2080c5f77f54bda2ec72461317273e7cd6"}, ] -ipython-genutils = [ +ipython_genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] @@ -3161,7 +3163,7 @@ jedi = [ {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, ] -jinja2 = [ +Jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] @@ -3186,8 +3188,8 @@ jupyter-core = [ {file = "jupyter_core-4.11.1.tar.gz", hash = "sha256:2e5f244d44894c4154d06aeae3419dd7f1b0ef4494dc5584929b398c61cfd314"}, ] jupyter-server = [ - {file = "jupyter_server-1.19.0-py3-none-any.whl", hash = "sha256:310531a4ed1a5088daec9b930cfda08952d1e3809d0605e4b137af0a6da11669"}, - {file = "jupyter_server-1.19.0.tar.gz", hash = "sha256:5d9b58c92011db1854a8ddeeac46e16bb37281e8f45156d4d0b2fb745f9561dd"}, + {file = "jupyter_server-1.19.1-py3-none-any.whl", hash = "sha256:ea3587840f2a906883c9eecb6bc85ef87ba1b7ba4cb6eafbacfac4a568862106"}, + {file = "jupyter_server-1.19.1.tar.gz", hash = "sha256:d1cc3596945849742bc3eedf0699feeb50ad6c6045ebef02a9298b7f13c27e9f"}, ] jupyterlab = [ {file = "jupyterlab-3.4.7-py3-none-any.whl", hash = "sha256:30c64bc0aa0ba09959ab6fd5c74f08a6ae64656b46a29e2522142a5fda0dc486"}, @@ -3398,7 +3400,7 @@ m2r2 = [ {file = "m2r2-0.3.3-py3-none-any.whl", hash = "sha256:2ee32a5928c3598b67c70e6d22981ec936c03d5bfd2f64229e77678731952f16"}, {file = "m2r2-0.3.3.tar.gz", hash = "sha256:f9b6e9efbc2b6987dbd43d2fd15a6d115ba837d8158ae73295542635b4086e75"}, ] -markupsafe = [ +MarkupSafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -3485,7 +3487,7 @@ mccabe = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] -medpy = [ +MedPy = [ {file = "MedPy-0.4.0.tar.gz", hash = "sha256:f8a94937dbb947ab069e767862dc6b86896b153c41ce8ed9369c7d79c0033a88"}, ] mistune = [ @@ -3732,7 +3734,7 @@ pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] -pillow = [ +Pillow = [ {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, @@ -3792,7 +3794,7 @@ pillow = [ {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, ] -pkgutil-resolve-name = [ +pkgutil_resolve_name = [ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] @@ -3866,7 +3868,7 @@ pydicom = [ {file = "pydicom-2.3.0-py3-none-any.whl", hash = "sha256:8ff31e077cc51d19ac3b8ca988ac486099cdebfaf885989079fdc7c75068cdd8"}, {file = "pydicom-2.3.0.tar.gz", hash = "sha256:dbfa081c9ad9ac8ff8a8efbd71784104db9eecf02fd775f7d7773f2183f89386"}, ] -pygments = [ +Pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] @@ -3879,8 +3881,8 @@ pylint-flask-sqlalchemy = [ {file = "pylint_flask_sqlalchemy-0.2.0.tar.gz", hash = "sha256:8ede5baba1a465d8ba39d8383ffcf0889d7a6afeff44bd24177fcf529ba8aa81"}, ] pymedphys = [ - {file = "pymedphys-0.37.1-py3-none-any.whl", hash = "sha256:d17eed1e3e12220dcd1cc9ff80dd4724a25bfec6c1b6903ec6fe624d98ed1349"}, - {file = "pymedphys-0.37.1.tar.gz", hash = "sha256:d0ebc6ff2ddf8b0c9569cfe60627a0a4e02a749d8ad2ca5ef74b6a683078f27b"}, + {file = "pymedphys-0.38.0-py3-none-any.whl", hash = "sha256:cfc6ac8429744b1da99e8f84dc66f8d5480ebb3170e314198577d8fc241862e7"}, + {file = "pymedphys-0.38.0.tar.gz", hash = "sha256:1320aed5ada5de1bb783ba301a62b443f3b633c4e85caefcbfa99810eb755aba"}, ] pynetdicom = [ {file = "pynetdicom-2.0.2-py3-none-any.whl", hash = "sha256:6726173d25a51f66f2a4557d816c0f93b3b2a8435ce3d319e6cdd8e48bf657d5"}, @@ -3962,7 +3964,7 @@ pytz = [ {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, ] -pywavelets = [ +PyWavelets = [ {file = "PyWavelets-1.3.0-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:eebaa9c28600da336743fefd650332460c132792660e70eb09abf343b0664b87"}, {file = "PyWavelets-1.3.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:3eeffcf2f7eebae5cc27cb11a7d0d96118e2e9f75ac38ff1a05373d5fe75accb"}, {file = "PyWavelets-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:35a945bea9da6db9755e42e06e871846514ae91bde3ae24a08a1d090b003a23b"}, @@ -4181,7 +4183,7 @@ scipy = [ {file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"}, {file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"}, ] -send2trash = [ +Send2Trash = [ {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, ] @@ -4193,7 +4195,7 @@ setuptools-scm = [ {file = "setuptools_scm-6.4.2-py3-none-any.whl", hash = "sha256:acea13255093849de7ccb11af9e1fb8bde7067783450cee9ef7a93139bddf6d4"}, {file = "setuptools_scm-6.4.2.tar.gz", hash = "sha256:6833ac65c6ed9711a4d5d2266f8024cfa07c533a0e55f4c12f6eff280a5a9e30"}, ] -simpleitk = [ +SimpleITK = [ {file = "SimpleITK-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d8cf83a9165b6ee3f3f4f599e609f55d823171af47f67ef11a8237f60245e0e"}, {file = "SimpleITK-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4b129a0e53633432000e9561a80a0631c09efda83e754f623eb1d890ed169b4"}, {file = "SimpleITK-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d14e2b8a69862d1ac46e5bfbea9af0159a05e928bdad016e1639e5dbb927f803"}, @@ -4233,9 +4235,9 @@ soupsieve = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] -sphinx = [ - {file = "Sphinx-5.2.1.tar.gz", hash = "sha256:c009bb2e9ac5db487bcf53f015504005a330ff7c631bb6ab2604e0d65bae8b54"}, - {file = "sphinx-5.2.1-py3-none-any.whl", hash = "sha256:3dcf00fcf82cf91118db9b7177edea4fc01998976f893928d0ab0c58c54be2ca"}, +Sphinx = [ + {file = "Sphinx-5.2.2.tar.gz", hash = "sha256:7225c104dc06169eb73b061582c4bc84a9594042acae6c1582564de274b7df2f"}, + {file = "sphinx-5.2.2-py3-none-any.whl", hash = "sha256:9150a8ed2e98d70e778624373f183c5498bf429dd605cf7b63e80e2a166c35a5"}, ] sphinx-basic-ng = [ {file = "sphinx_basic_ng-0.0.1a12-py3-none-any.whl", hash = "sha256:e8b6efd2c5ece014156de76065eda01ddfca0fee465aa020b1e3c12f84570bbe"}, @@ -4277,7 +4279,7 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] -sqlalchemy = [ +SQLAlchemy = [ {file = "SQLAlchemy-1.4.41-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:13e397a9371ecd25573a7b90bd037db604331cf403f5318038c46ee44908c44d"}, {file = "SQLAlchemy-1.4.41-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2d6495f84c4fd11584f34e62f9feec81bf373787b3942270487074e35cbe5330"}, {file = "SQLAlchemy-1.4.41-cp27-cp27m-win32.whl", hash = "sha256:e570cfc40a29d6ad46c9aeaddbdcee687880940a3a327f2c668dd0e4ef0a441d"}, @@ -4487,7 +4489,7 @@ websocket-client = [ {file = "websocket-client-1.4.1.tar.gz", hash = "sha256:f9611eb65c8241a67fb373bef040b3cf8ad377a9f6546a12b620b6511e8ea9ef"}, {file = "websocket_client-1.4.1-py3-none-any.whl", hash = "sha256:398909eb7e261f44b8f4bd474785b6ec5f5b499d4953342fe9755e01ef624090"}, ] -werkzeug = [ +Werkzeug = [ {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, ] diff --git a/pyproject.toml b/pyproject.toml index 5c1d2f01..d0f88177 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ redis = { version = "^3.5.3", optional = true } psutil = { version = "^5.8.0", optional = true } gunicorn = { version = "^20.0.4", optional = true } Jinja2 = { version = "^3.1", optional = true } -pymedphys = { version = "^0.37.1", optional = true } +pymedphys = { version = "^0.38.0", optional = true } loguru = { version = "^0.6.0", optional = true } diff --git a/service.Dockerfile b/service.Dockerfile index 3dd22d41..4c64cf7a 100644 --- a/service.Dockerfile +++ b/service.Dockerfile @@ -3,7 +3,7 @@ FROM platipy/platipy RUN apt-get update; DEBIAN_FRONTEND="noninteractive" apt-get install -y redis-server COPY . /code -RUN env -C /code /root/.local/bin/poetry install -E backend +RUN cd /code && poetry install -E backend --without dev,docs COPY entrypoint.sh /entrypoint.sh RUN chmod +x /entrypoint.sh diff --git a/services/sample/service.py b/services/sample/service.py index 000b0bf9..cf549a15 100644 --- a/services/sample/service.py +++ b/services/sample/service.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +from pathlib import Path import SimpleITK as sitk @@ -20,9 +21,7 @@ logger = logging.getLogger(__name__) -SAMPLE_SETTINGS = { - "HU_BONE_THRESHOLD": 200 -} +SAMPLE_SETTINGS = {"HU_BONE_THRESHOLD": 200} @app.register("Bone Segmentation Sample", default_settings=SAMPLE_SETTINGS) @@ -41,6 +40,8 @@ def bone_segmentation(data_objects, working_dir, settings): logger.info("Running bone segmentation sample") logger.info("Using settings: %s", settings) + working_dir = Path(working_dir) + output_objects = [] for data_obj in data_objects: @@ -58,7 +59,7 @@ def bone_segmentation(data_objects, working_dir, settings): mask_file = working_dir.joinpath("mask.nii.gz") sitk.WriteImage(mask, mask_file) - mask_data_object = DataObject(type="FILE", path=mask_file, parent=data_obj) + mask_data_object = DataObject(type="FILE", path=str(mask_file), parent=data_obj) output_objects.append(mask_data_object) return output_objects