Skip to content

Commit

Permalink
Logging for backend service
Browse files Browse the repository at this point in the history
  • Loading branch information
pchlap committed Sep 28, 2022
1 parent 430bdf2 commit 4e3b213
Show file tree
Hide file tree
Showing 10 changed files with 204 additions and 161 deletions.
6 changes: 4 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ FROM ubuntu:20.04
RUN apt-get update; DEBIAN_FRONTEND="noninteractive" apt-get install -y python3-pip libgl1-mesa-glx libsm6 libxext6 libxrender-dev libglib2.0-0 curl

RUN ln -s /usr/bin/python3 /usr/bin/python
# RUN pip install --upgrade pip
RUN pip install --upgrade pip

COPY poetry.lock /platipy/poetry.lock
COPY pyproject.toml /platipy/pyproject.toml
Expand All @@ -12,4 +12,6 @@ RUN curl -sSL https://install.python-poetry.org | python - --version 1.2.1
RUN echo 'export PATH="/root/.local/bin:$PATH"' >> ~/.bashrc
RUN echo "/usr/lib/python3.8/site-packages" >> /usr/local/lib/python3.8/dist-packages/site-packages.pth

RUN /root/.local/bin/poetry config virtualenvs.create false
ENV PATH="/root/.local/bin:${PATH}"

RUN poetry config virtualenvs.create false
2 changes: 1 addition & 1 deletion dev.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ FROM platipy/platipy

RUN apt-get update; DEBIAN_FRONTEND="noninteractive" apt-get install -y redis-server git libgl1-mesa-glx libsm6 libxext6 libxrender-dev libglib2.0-0 pandoc curl

RUN env -C /platipy /root/.local/bin/poetry install --with dev,docs --all-extras
RUN cd /platipy && poetry install --with dev,docs --all-extras
39 changes: 34 additions & 5 deletions platipy/backend/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,25 +12,54 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import sys
import os
import uuid

from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Api
from celery import Celery
from loguru import logger
import celery.signals

from platipy.backend.application import FlaskApp


env_work = os.getcwd()
if "WORK" in os.environ:
env_work = os.environ["WORK"]

# Configure Log file location
log_file_path = os.path.join(env_work, "service.log")
logger.add(log_file_path, rotation="1 day")


def configure_logging():
logger = logging.getLogger()

logger.handlers.clear()
logger.setLevel(logging.DEBUG)

file_handler = logging.handlers.RotatingFileHandler(
log_file_path,
maxBytes=100 * 1024 * 1024, # Max 100 MB per log file before rotating
backupCount=100, # Keep up to 100 log files in history
)
file_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_handler.setFormatter(file_formatter)
file_handler.setLevel(logging.DEBUG)
logger.addHandler(file_handler)

console_handler = logging.StreamHandler(sys.stdout)
console_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
console_handler.setFormatter(console_formatter)
console_handler.setLevel(logging.DEBUG)
logger.addHandler(console_handler)


@celery.signals.setup_logging.connect
def on_celery_setup_logging(**kwargs):
configure_logging()


configure_logging()

# Create Flask app
app = FlaskApp(__name__)
Expand Down
67 changes: 26 additions & 41 deletions platipy/backend/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import werkzeug

import logging
logger = logging.getLogger(__name__)

import flask_restful
from flask_restful import Api, reqparse
Expand All @@ -35,6 +34,8 @@
from .models import db, AlchemyEncoder, APIKey, Dataset, DataObject, DicomLocation
from .tasks import run_task, retrieve_task

logger = logging.getLogger(__name__)


class CustomConfig(object):
RESTFUL_JSON = {"separators": (", ", ": "), "indent": 2, "cls": AlchemyEncoder}
Expand Down Expand Up @@ -104,23 +105,23 @@ class DicomLocationEndpoint(Resource):
"name",
required=True,
help="Name to identify this Dicom location",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"host",
required=True,
help="Dicom location host name or IP address",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"port",
type=int,
required=True,
help="The port of the Dicom location",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"ae_title", help="AE Title of the Dicom location", location=["args", "headers", 'values']
"ae_title", help="AE Title of the Dicom location", location=["args", "headers", "values"]
)

def get(self):
Expand Down Expand Up @@ -167,31 +168,29 @@ class DataObjectEndpoint(Resource):
"dataset",
required=True,
help="Dataset ID to add Data Object to",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"type",
choices=("DICOM", "FILE"),
required=True,
help="DICOM for Dicom objects to be fetched from the Dataset Dicom Location. FILE for file sent with request.",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"dicom_retrieve",
choices=("MOVE", "GET", "SEND"),
help="Used for DICOM type. The Dicom objects will be retrieved using this method.",
location=["args", "headers", 'values'],
)
parser.add_argument("seriesUID", location=["args", "headers", 'values'])
parser.add_argument("meta_data", location=["args", "headers", 'values'])
parser.add_argument("file_name", location=["args", "headers", 'values'])
parser.add_argument(
"file_data", type=werkzeug.datastructures.FileStorage, location="files"
location=["args", "headers", "values"],
)
parser.add_argument("seriesUID", location=["args", "headers", "values"])
parser.add_argument("meta_data", location=["args", "headers", "values"])
parser.add_argument("file_name", location=["args", "headers", "values"])
parser.add_argument("file_data", type=werkzeug.datastructures.FileStorage, location="files")
parser.add_argument(
"parent",
help="Data Object ID to which this data object should be linked",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)

def get(self, dataobject_id):
Expand Down Expand Up @@ -222,9 +221,7 @@ def post(self):
# Get the parent dataset if one was given
parent = None
if args["parent"]:
parent = DataObject.query.filter_by(
dataset_id=ds.id, id=args["parent"]
).first()
parent = DataObject.query.filter_by(dataset_id=ds.id, id=args["parent"]).first()

if not parent:
return {"Error": "Parent Data Object not found"}, 404
Expand Down Expand Up @@ -414,9 +411,7 @@ def get(self, dataobject_id):
return {"Error": "File could not be found, perhaps it has expired"}, 404

logger.info("Downloading file: %s", f)
return send_from_directory(
os.path.dirname(f), os.path.basename(f), as_attachment=True
)
return send_from_directory(os.path.dirname(f), os.path.basename(f), as_attachment=True)

return {"Error": "Data Object not found"}, 404

Expand All @@ -437,14 +432,14 @@ class DatasetEndpoint(Resource):
parser.add_argument(
"from_dicom_location",
help="ID of DicomLocation from which to retrieve DICOM data",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"to_dicom_location",
help="ID of DicomLocation the send output data to",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument("timeout", type=int, default=24, location=["args", "headers", 'values'])
parser.add_argument("timeout", type=int, default=24, location=["args", "headers", "values"])

def get(self, dataset_id):

Expand Down Expand Up @@ -515,9 +510,7 @@ def get(self):

result = []
for a in app.algorithms:
result.append(
{"name": a, "default_settings": app.algorithms[a].default_settings}
)
result.append({"name": a, "default_settings": app.algorithms[a].default_settings})
return result


Expand All @@ -528,18 +521,18 @@ class TriggerEndpoint(Resource):
"algorithm",
required=True,
help="The name of the algorithm to trigger",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"dataset",
required=True,
help="The ID of the dataset to pass to the algorithm",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)
parser.add_argument(
"config",
help="JSON configuration for algorithm. Default configuration will be used if not set.",
location=["args", "headers", 'values'],
location=["args", "headers", "values"],
)

def post(self):
Expand All @@ -548,11 +541,7 @@ def post(self):

if not args["algorithm"] in app.algorithms:
return (
{
"Error": "No algorithm found with name: {0}".format(
args["algorithm"]
)
},
{"Error": "No algorithm found with name: {0}".format(args["algorithm"])},
404,
)

Expand Down Expand Up @@ -598,12 +587,8 @@ def post(self):
api.add_resource(DatasetReadyEndpoint, "/api/dataset/ready/<string:dataset_id>")

api.add_resource(DataObjectsEndpoint, "/api/dataobjects")
api.add_resource(
DataObjectEndpoint, "/api/dataobject", "/api/dataobject/<string:dataobject_id>"
)
api.add_resource(
DataObjectDownloadEndpoint, "/api/dataobject/download/<string:dataobject_id>"
)
api.add_resource(DataObjectEndpoint, "/api/dataobject", "/api/dataobject/<string:dataobject_id>")
api.add_resource(DataObjectDownloadEndpoint, "/api/dataobject/download/<string:dataobject_id>")

api.add_resource(AlgorithmEndpoint, "/api/algorithm")

Expand Down
File renamed without changes.
Loading

0 comments on commit 4e3b213

Please sign in to comment.