diff --git a/.devcontainer/scripts/post_create.sh b/.devcontainer/scripts/post_create.sh index ed5c418..07ef7cf 100644 --- a/.devcontainer/scripts/post_create.sh +++ b/.devcontainer/scripts/post_create.sh @@ -1,7 +1,14 @@ #!/bin/bash # this script install system dependencies for vectorizing and sets up conda -sudo apt-get update -sudo apt-get install -y build-essential python-dev libagg-dev libpotrace-dev pkg-config libffi-dev libcairo2-dev +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +# install system dependencies +bash "$SCRIPT_DIR"/../../scripts/install_system_dependencies.sh + +# setup conda conda env create -n dev -f envs/dev.yaml -conda init \ No newline at end of file +conda init + +# setup pre-commit +pre-commit install --install-hooks \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..9cc1876 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,176 @@ +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ + +# Other files +heroku.yml +docker-compose.yml +.pre-commit-config.yaml +.vscode/ +.gitignore +.github/ +.devcontainer/ +.env.example +LICENSE +README.md +Dockerfile +.dockerignore +.git/ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..dfe3800 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,56 @@ +name: ci +on: push + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + id-token: write + packages: read + +jobs: + lint: + name: Lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.11" + - uses: pre-commit/action@v3.0.0 + + test: + name: Test + runs-on: ubuntu-latest + env: + PORT: "5000" + S3_BUCKET: "-" + S3_TEST_BUCKET: "kittl-uploads-storage-staging" + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: 'eu-central-1' + role-to-assume: 'arn:aws:iam::339713006905:role/vectorizing-github-actions' + - name: Set up Python 3.11 + uses: actions/setup-python@v2 + with: + python-version: 3.11 + - name: Set up system dependencies + run: | + bash scripts/install_system_dependencies.sh + - name: Set up `dev` conda environment + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: dev + environment-file: envs/dev.yaml + auto-activate-base: false + - name: Run tests + shell: bash -el {0} + run: | + conda activate dev + python -m pytest vectorizing/tests/test.py diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index b3da75b..095dbdb 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,42 +1,15 @@ name: deploy + on: push: branches: - production - main + - jobs: - publish-docker-image: - runs-on: ubuntu-latest - steps: - - name: Check out repository code - uses: actions/checkout@v2 - - name: Login to GitHub Container Registry - uses: docker/login-action@v1 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-central-1 - - name: Publish the Docker image - run: | - docker build . --tag ghcr.io/kittl/vectorizing:${{ github.ref_name }} - docker push ghcr.io/kittl/vectorizing:${{ github.ref_name }} - - name: Deploy vectorizing in staging cluster - uses: kodermax/kubectl-aws-eks@master - env: - KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG }} - if: github.ref_name == 'main' - with: - args: rollout restart deployment/vectorizing --context staging - - name: Deploy vectorizing in production cluster - uses: kodermax/kubectl-aws-eks@master - env: - KUBE_CONFIG_DATA: ${{ secrets.KUBE_CONFIG }} - if: github.ref_name == 'production' - with: - args: rollout restart deployment/vectorizing --context production \ No newline at end of file + deploy-py-server-argo: + name: Deploy PY server Argo + uses: ./.github/workflows/helpers-deploy-argo.yaml + secrets: + GH_PAT: ${{ secrets.GH_PAT }} + ECR_ROLE_ARN: ${{ github.ref == 'refs/heads/production' && secrets.PRODUCTION_ECR_ROLE_ARN || secrets.STAGING_ECR_ROLE_ARN }} diff --git a/.github/workflows/helpers-build-docker-image.yaml b/.github/workflows/helpers-build-docker-image.yaml new file mode 100644 index 0000000..e60cbb6 --- /dev/null +++ b/.github/workflows/helpers-build-docker-image.yaml @@ -0,0 +1,93 @@ +# This callable workflow builds and publish a package docker image + +name: helpers-build-docker-image + +on: + workflow_call: + inputs: + aws-region: + default: eu-central-1 + description: The AWS region to use for ECR + required: false + type: string + docker_file_path: + description: > + "The path to the Dockerfile to use for building the image. For example: + `path/to/project/Dockerfile`" + type: string + required: true + extra-image-tags: + default: '' + description: Extra tags to use for the image (one per line) + required: false + type: string + platforms: + default: linux/amd64 + description: The platforms to build for + required: false + type: string + secrets: + ECR_ROLE_ARN: + description: The ECR role ARN + required: true + outputs: + image-tag: + description: The docker image tag + value: ${{ jobs.build.outputs.image-tag }} + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + id-token: write + packages: read + +jobs: + build: + name: Build docker image and push to ECR + runs-on: ubuntu-latest + outputs: + image-tag: ${{ steps.get-image-tag.outputs.image-tag }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: ${{ inputs.aws-region }} + role-to-assume: ${{ secrets.ECR_ROLE_ARN }} + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + - name: Get image tag output + shell: bash + id: get-image-tag + run: echo "image-tag=sha-${GITHUB_SHA:0:7}" >> $GITHUB_OUTPUT + - name: Docker meta tags + id: meta + uses: docker/metadata-action@v5 + with: + flavor: | + latest=false + images: | + ${{ format('{0}/{1}', steps.login-ecr.outputs.registry, github.event.repository.name) }} + tags: | + type=semver,pattern={{version}} + type=sha + ${{ inputs.extra-image-tags }} + - name: Build and push + uses: docker/build-push-action@v6 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + context: . + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: ${{ inputs.platforms }} + provenance: false + push: true + file: ${{ inputs.docker_file_path }} diff --git a/.github/workflows/helpers-deploy-argo.yaml b/.github/workflows/helpers-deploy-argo.yaml new file mode 100644 index 0000000..f933a46 --- /dev/null +++ b/.github/workflows/helpers-deploy-argo.yaml @@ -0,0 +1,58 @@ +# This callable workflow builds and publish a package docker image via the helpers-build-docker-image workflow, +# and then updates the image tag in the Helm chart values file in the development-application-config repo. + +name: helpers-deploy-argo + +on: + workflow_call: + secrets: + GH_PAT: + description: "The GitHub Personal Access Token to use for checking out the helm-config repository" + required: true + ECR_ROLE_ARN: + description: "The ECR role ARN" + required: true + +permissions: + contents: write + id-token: write + packages: read + +jobs: + build-docker-image: + uses: ./.github/workflows/helpers-build-docker-image.yaml + name: Build Docker image + with: + docker_file_path: Dockerfile + secrets: inherit + + update-helm-values: + name: Update Helm values + runs-on: ubuntu-latest + needs: build-docker-image + env: + ENVIRONMENT: ${{ github.ref == 'refs/heads/production' && 'production' || 'staging' }} + REPOSITORY: ${{ github.event.repository.name }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Checkout helm config Repository + uses: actions/checkout@v4 + with: + repository: Kittl/development-applications-config + ref: main + path: helm-config + token: ${{ secrets.GH_PAT }} + persist-credentials: false + - name: Update Image Version in the related HelmChart values.yaml + uses: fjogeleit/yaml-update-action@v0.14.0 + with: + valueFile: ${{ format('{0}/{1}/values.yaml', env.ENVIRONMENT, env.REPOSITORY) }} + propertyPath: 'deployment.image.tag' + value: ${{ needs.build-docker-image.outputs.image-tag }} + repository: Kittl/development-applications-config + branch: main + createPR: false + message: 'Update Image Version to ${{ needs.build-docker-image.outputs.image-tag }}' + token: ${{ secrets.GH_PAT }} + workDir: helm-config diff --git a/.gitignore b/.gitignore index 00e0eed..1b2aedb 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ htmlcov/ dist/ build/ *.egg-info/ -diff_output \ No newline at end of file +diff_output +.idea/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..2175768 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,82 @@ +repos: + # a set of useful Python-based pre-commit hooks + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + # list of definitions and supported hooks: https://pre-commit.com/hooks.html + - id: trailing-whitespace # removes any whitespace at the ends of lines + - id: check-toml # check toml syntax by loading all toml files + - id: check-yaml # check yaml syntax by loading all yaml files + - id: check-json # check-json syntax by loading all json files + - id: check-merge-conflict # check for files with merge conflict strings + args: ["--assume-in-merge"] # and run this check even when not explicitly in a merge + - id: check-added-large-files # check that no "large" files have been added + args: ["--maxkb=10240"] # where large means 10MB+, as in Hugging Face's git server + - id: debug-statements # check for python debug statements (import pdb, breakpoint, etc.) + - id: detect-private-key # checks for private keys (BEGIN X PRIVATE KEY, etc.) + + # black python autoformatting + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + + # flake8 python linter with all the fixins + - repo: https://github.com/PyCQA/flake8 + rev: 6.0.0 + hooks: + # additional configuration of flake8 and extensions in pyproject.toml + - id: flake8 + additional_dependencies: + [ + flake8-annotations, + flake8-bandit, + flake8-bugbear, + flake8-black, + flake8-docstrings, + flake8-import-order, + darglint, + mypy, + pycodestyle, + pydocstyle, + Flake8-pyproject + ] + + # removed unused imports and variables + - repo: https://github.com/PyCQA/autoflake + rev: v2.0.1 + hooks: + - id: autoflake + name: autoflake + entry: autoflake + language: python + "types": [python] + require_serial: true + args: + - "--in-place" + - "--expand-star-imports" + - "--remove-duplicate-keys" + - "--remove-unused-variables" + + # add trailing commas to calls and literals + - repo: https://github.com/asottile/add-trailing-comma + rev: v2.4.0 + hooks: + - id: add-trailing-comma + args: + - --py36-plus + + # upgrade syntax for new version of python + - repo: https://github.com/asottile/pyupgrade + rev: v3.3.1 + hooks: + - id: pyupgrade + args: + - --py36-plus + + # sort python imports in the right order + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort (python) \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index f549081..be618e7 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -8,16 +8,16 @@ "python": "/opt/conda/envs/dev/bin/python", "env": { "FLASK_APP": "vectorizing", - "FLASK_DEBUG": "1", + "FLASK_DEBUG": "1" }, "args": [ "run", "--port", "8080", - "--no-debugger", + "--no-debugger" ], "justMyCode": true - }, + } ] } diff --git a/Dockerfile b/Dockerfile index 9381022..d0c97ca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,45 @@ -FROM python:3.9 -RUN apt-get update -y -RUN apt-get install wget build-essential python3-dev libagg-dev libpotrace-dev pkg-config libgl1 -y - -WORKDIR / -COPY requirements/dev.txt / -RUN pip install -r dev.txt -COPY . / +# Use an official Python runtime as a parent image +FROM python:3.11-slim + +# Install necessary packages +RUN apt-get update -y && apt-get install -y \ + wget \ + build-essential \ + python3-dev \ + libagg-dev \ + libpotrace-dev \ + pkg-config \ + libgl1 \ + --no-install-recommends && \ + rm -rf /var/lib/apt/lists/* + +# Create a non-root user and group with specific IDs for consistency +RUN addgroup --gid 1001 appuser && \ + adduser --uid 1001 --gid 1001 --disabled-password --gecos "" appuser + +# Set the working directory +WORKDIR /app + +# Copy only requirements to leverage Docker cache +COPY requirements/dev.txt /app/requirements.txt + +# Install Python dependencies +RUN pip install --no-cache-dir -r /app/requirements.txt + +# Copy the rest of the application code +COPY . /app + +# Change ownership of the application files +RUN chown -R appuser:appuser /app + +# Switch to the non-root user +USER appuser + +# Set environment variables ENV PORT=5000 -CMD gunicorn -w 4 'vectorizing:create_app()' --timeout 0 -b 0.0.0.0:$PORT \ No newline at end of file + +# Expose the port +EXPOSE $PORT + +# Define the command to run the application +CMD ["gunicorn", "-w", "4", "vectorizing:create_app()", "--timeout", "0", "-b", "0.0.0.0:5000"] diff --git a/README.md b/README.md index ecf0c88..1a12192 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,20 @@ The first time the execution can take few minutes, as it is pulling the dev cont bash scripts/compile_envs.sh ``` -This will compile dependencies and environments, ensuring a consistent development workflow and deployment. + This will compile dependencies and environments, ensuring a consistent development workflow and deployment. + +4. If you want to add or remove **system** dependencies, update the script: [`scripts/install_system_dependencies.sh`](scripts/install_system_dependencies.sh). This is used both in CI and at dev container creation, to keep them consistent. + +## Linting and formatting + +To perform linting and formatting, run from the root of the repo: + +``` +pre-commit run --all-files +``` + +The first execution might take a bit longer, as it will set up the virtual environment +where the linter and the formatter will run. ## Server diff --git a/docker-compose.yml b/docker-compose.yml index 11ba9fd..65c7e94 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: "3.9" +version: "3.11" x-build: &build build: . x-env: &env diff --git a/envs/dev.yaml b/envs/dev.yaml index 1770fec..b165c52 100644 --- a/envs/dev.yaml +++ b/envs/dev.yaml @@ -4,92 +4,91 @@ channels: dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - - ca-certificates=2023.08.22=h06a4308_0 + - bzip2=1.0.8=h5eee18b_6 + - ca-certificates=2024.7.2=h06a4308_0 - ld_impl_linux-64=2.38=h1181459_1 - - libffi=3.4.4=h6a678d5_0 + - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 + - libuuid=1.41.5=h5eee18b_0 - ncurses=6.4=h6a678d5_0 - - openssl=3.0.10=h7f8727e_2 - - pip=23.2.1=py39h06a4308_0 - - python=3.9.17=h955ad1f_0 + - openssl=3.0.14=h5eee18b_0 + - pip=24.2=py311h06a4308_0 + - python=3.11.5=h955ad1f_0 - readline=8.2=h5eee18b_0 - - setuptools=68.0.0=py39h06a4308_0 - - sqlite=3.41.2=h5eee18b_0 - - tk=8.6.12=h1ccaba5_0 - - tzdata=2023c=h04d1e81_0 - - xz=5.4.2=h5eee18b_0 - - zlib=1.2.13=h5eee18b_0 + - setuptools=72.1.0=py311h06a4308_0 + - sqlite=3.45.3=h5eee18b_0 + - tk=8.6.14=h39e8969_0 + - tzdata=2024a=h04d1e81_0 + - xz=5.4.6=h5eee18b_1 + - zlib=1.2.13=h5eee18b_1 - pip: - - blinker==1.6.2 - - boto3==1.28.45 - - botocore==1.31.45 - - build==1.0.3 - - cairocffi==1.6.1 + - blinker==1.8.2 + - boto3==1.35.2 + - botocore==1.35.2 + - build==1.2.1 + - cairocffi==1.7.1 - cairosvg==2.7.1 - - certifi==2023.7.22 - - cffi==1.15.1 + - certifi==2024.7.4 + - cffi==1.17.0 - cfgv==3.4.0 - - charset-normalizer==3.2.0 + - charset-normalizer==3.3.2 - click==8.1.7 - cssselect2==0.7.0 - cuid==0.4 - defusedxml==0.7.1 - - distlib==0.3.7 - - exceptiongroup==1.1.3 + - distlib==0.3.8 + - exceptiongroup==1.2.2 - faiss-cpu==1.7.4 - - filelock==3.12.3 + - filelock==3.15.4 - flask==2.3.3 - - flask-cors==4.0.0 + - flask-cors==4.0.1 - gunicorn==21.2.0 - - identify==2.5.28 - - idna==3.4 - - imageio==2.31.3 - - importlib-metadata==6.8.0 + - identify==2.6.0 + - idna==3.7 + - imageio==2.35.1 - iniconfig==2.0.0 - - itsdangerous==2.1.2 - - jinja2==3.1.2 + - itsdangerous==2.2.0 + - jinja2==3.1.4 - jmespath==1.0.1 - - lazy-loader==0.3 + - lazy-loader==0.4 - llvmlite==0.40.1 - - markupsafe==2.1.3 - - networkx==3.1 - - nodeenv==1.8.0 + - markupsafe==2.1.5 + - networkx==3.3 + - nodeenv==1.9.1 - numba==0.57.1 - numpy==1.24.4 - opencv-python-headless==4.8.0.76 - - packaging==23.1 - - pillow==10.0.0 - - pip-tools==7.3.0 - - platformdirs==3.10.0 - - pluggy==1.3.0 - - pre-commit==3.4.0 + - packaging==24.1 + - pillow==10.0.1 + - pip-tools==7.4.1 + - platformdirs==4.2.2 + - pluggy==1.5.0 + - pre-commit==3.8.0 - pyclipper==1.3.0.post5 - - pycparser==2.21 + - pycparser==2.22 - pypotrace==0.3 - - pyproject-hooks==1.0.0 - - pytest==7.4.2 + - pyproject-hooks==1.1.0 + - pytest==8.3.2 - pytest-dotenv==0.5.2 - - python-dateutil==2.8.2 - - python-dotenv==1.0.0 - - pywavelets==1.4.1 - - pyyaml==6.0.1 + - python-dateutil==2.9.0.post0 + - python-dotenv==1.0.1 + - pywavelets==1.7.0 + - pyyaml==6.0.2 - requests==2.31.0 - - s3transfer==0.6.2 + - s3transfer==0.10.2 - scikit-image==0.21.0 - - scipy==1.11.2 + - scipy==1.14.1 - sentry-sdk==1.30.0 - sewar==0.4.6 - six==1.16.0 - - tifffile==2023.8.30 - - tinycss2==1.2.1 + - tifffile==2024.8.10 + - tinycss2==1.3.0 - tomli==2.0.1 - - typing-extensions==4.7.1 - - urllib3==1.26.16 - - virtualenv==20.24.5 + - urllib3==2.2.2 + - virtualenv==20.26.3 - webencodings==0.5.1 - - werkzeug==2.3.7 - - wheel==0.41.2 - - zipp==3.16.2 + - werkzeug==3.0.3 + - wheel==0.44.0 prefix: /opt/conda/envs/dev diff --git a/envs/prod.yaml b/envs/prod.yaml index 11bcd76..792ddd0 100644 --- a/envs/prod.yaml +++ b/envs/prod.yaml @@ -4,63 +4,63 @@ channels: dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - - ca-certificates=2023.08.22=h06a4308_0 + - bzip2=1.0.8=h5eee18b_6 + - ca-certificates=2024.7.2=h06a4308_0 - ld_impl_linux-64=2.38=h1181459_1 - - libffi=3.4.4=h6a678d5_0 + - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 + - libuuid=1.41.5=h5eee18b_0 - ncurses=6.4=h6a678d5_0 - - openssl=3.0.10=h7f8727e_2 - - pip=23.2.1=py39h06a4308_0 - - python=3.9.17=h955ad1f_0 + - openssl=3.0.14=h5eee18b_0 + - pip=24.2=py311h06a4308_0 + - python=3.11.5=h955ad1f_0 - readline=8.2=h5eee18b_0 - - setuptools=68.0.0=py39h06a4308_0 - - sqlite=3.41.2=h5eee18b_0 - - tk=8.6.12=h1ccaba5_0 - - tzdata=2023c=h04d1e81_0 - - wheel=0.38.4=py39h06a4308_0 - - xz=5.4.2=h5eee18b_0 - - zlib=1.2.13=h5eee18b_0 + - setuptools=72.1.0=py311h06a4308_0 + - sqlite=3.45.3=h5eee18b_0 + - tk=8.6.14=h39e8969_0 + - tzdata=2024a=h04d1e81_0 + - wheel=0.43.0=py311h06a4308_0 + - xz=5.4.6=h5eee18b_1 + - zlib=1.2.13=h5eee18b_1 - pip: - - blinker==1.6.2 - - boto3==1.28.45 - - botocore==1.31.45 - - certifi==2023.7.22 - - charset-normalizer==3.2.0 + - blinker==1.8.2 + - boto3==1.35.2 + - botocore==1.35.2 + - certifi==2024.7.4 + - charset-normalizer==3.3.2 - click==8.1.7 - cuid==0.4 - faiss-cpu==1.7.4 - flask==2.3.3 - - flask-cors==4.0.0 + - flask-cors==4.0.1 - gunicorn==21.2.0 - - idna==3.4 - - imageio==2.31.3 - - importlib-metadata==6.8.0 - - itsdangerous==2.1.2 - - jinja2==3.1.2 + - idna==3.7 + - imageio==2.35.1 + - itsdangerous==2.2.0 + - jinja2==3.1.4 - jmespath==1.0.1 - - lazy-loader==0.3 + - lazy-loader==0.4 - llvmlite==0.40.1 - - markupsafe==2.1.3 - - networkx==3.1 + - markupsafe==2.1.5 + - networkx==3.3 - numba==0.57.1 - numpy==1.24.4 - opencv-python-headless==4.8.0.76 - - packaging==23.1 - - pillow==10.0.0 + - packaging==24.1 + - pillow==10.0.1 - pyclipper==1.3.0.post5 - pypotrace==0.3 - - python-dateutil==2.8.2 - - pywavelets==1.4.1 + - python-dateutil==2.9.0.post0 + - pywavelets==1.7.0 - requests==2.31.0 - - s3transfer==0.6.2 + - s3transfer==0.10.2 - scikit-image==0.21.0 - - scipy==1.11.2 + - scipy==1.14.1 - sentry-sdk==1.30.0 - six==1.16.0 - - tifffile==2023.8.30 - - urllib3==1.26.16 - - werkzeug==2.3.7 - - zipp==3.16.2 + - tifffile==2024.8.10 + - urllib3==2.2.2 + - werkzeug==3.0.3 prefix: /opt/conda/envs/prod diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..158e06f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,28 @@ +[project] +name = "Vectorizing" +description = "Utility to vectorize raster images" +readme = "README.md" +packages = [{include = "src"}] + +[tool.black] +line-length = 88 +target-version = ['py311'] + +[tool.flake8] +max-complexity = 12 # complexity checker threshold +max-line-length = 88 +extend-ignore = [ + # import order + 'I100', + 'I101', + 'I202', +] +import-order-style = "google" +docstring-convention = "numpy" +strictness = "short" +docstring-style = "numpy" +suppress-none-returning = true +mypy-init-return = true + +[tool.isort] +profile = "black" \ No newline at end of file diff --git a/requirements/dev.in b/requirements/dev.in index ff7431e..0fa3f0c 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -7,4 +7,4 @@ pre-commit pip-tools pytest cairosvg~=2.7.1 -sewar~=0.4.6 \ No newline at end of file +sewar~=0.4.6 diff --git a/requirements/dev.txt b/requirements/dev.txt index f6d9600..a60d12c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,120 +1,115 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile /workspaces/tracing/scripts/../requirements/dev.in +# pip-compile /workspaces/vectorizing/scripts/../requirements/dev.in # -blinker==1.6.2 +blinker==1.8.2 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # flask -boto3==1.28.45 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -botocore==1.31.45 +boto3==1.35.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +botocore==1.35.2 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # boto3 # s3transfer -build==1.0.3 +build==1.2.1 # via pip-tools -cairocffi==1.6.1 +cairocffi==1.7.1 # via cairosvg cairosvg==2.7.1 - # via -r /workspaces/tracing/scripts/../requirements/dev.in -certifi==2023.7.22 + # via -r /workspaces/vectorizing/scripts/../requirements/dev.in +certifi==2024.7.4 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # requests # sentry-sdk -cffi==1.15.1 +cffi==1.17.0 # via cairocffi cfgv==3.4.0 # via pre-commit -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # requests click==8.1.7 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # flask # pip-tools cssselect2==0.7.0 # via cairosvg cuid==0.4 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt defusedxml==0.7.1 # via cairosvg -distlib==0.3.7 +distlib==0.3.8 # via virtualenv -exceptiongroup==1.1.3 +exceptiongroup==1.2.2 # via pytest faiss-cpu==1.7.4 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -filelock==3.12.3 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +filelock==3.15.4 # via virtualenv flask==2.3.3 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # flask-cors -flask-cors==4.0.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt +flask-cors==4.0.1 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt gunicorn==21.2.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -identify==2.5.28 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +identify==2.6.0 # via pre-commit -idna==3.4 +idna==3.7 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # requests -imageio==2.31.3 +imageio==2.35.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # scikit-image -importlib-metadata==6.8.0 - # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt - # build - # flask iniconfig==2.0.0 # via pytest -itsdangerous==2.1.2 +itsdangerous==2.2.0 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # flask -jinja2==3.1.2 +jinja2==3.1.4 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # flask jmespath==1.0.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # boto3 # botocore -lazy-loader==0.3 +lazy-loader==0.4 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # scikit-image llvmlite==0.40.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # numba -markupsafe==2.1.3 +markupsafe==2.1.5 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # jinja2 # werkzeug -networkx==3.1 +networkx==3.3 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # scikit-image -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit numba==0.57.1 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt numpy==1.24.4 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # imageio # numba # opencv-python-headless @@ -125,81 +120,84 @@ numpy==1.24.4 # sewar # tifffile opencv-python-headless==4.8.0.76 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -packaging==23.1 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +packaging==24.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # build # gunicorn + # lazy-loader # pytest # scikit-image -pillow==10.0.0 +pillow==10.0.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # cairosvg # imageio # scikit-image # sewar -pip-tools==7.3.0 - # via -r /workspaces/tracing/scripts/../requirements/dev.in -platformdirs==3.10.0 +pip-tools==7.4.1 + # via -r /workspaces/vectorizing/scripts/../requirements/dev.in +platformdirs==4.2.2 # via virtualenv -pluggy==1.3.0 +pluggy==1.5.0 # via pytest -pre-commit==3.4.0 - # via -r /workspaces/tracing/scripts/../requirements/dev.in +pre-commit==3.8.0 + # via -r /workspaces/vectorizing/scripts/../requirements/dev.in pyclipper==1.3.0.post5 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -pycparser==2.21 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +pycparser==2.22 # via cffi pypotrace==0.3 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -pyproject-hooks==1.0.0 - # via build -pytest==7.4.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +pyproject-hooks==1.1.0 # via - # -r /workspaces/tracing/scripts/../requirements/dev.in + # build + # pip-tools +pytest==8.3.2 + # via + # -r /workspaces/vectorizing/scripts/../requirements/dev.in # pytest-dotenv pytest-dotenv==0.5.2 - # via -r /workspaces/tracing/scripts/../requirements/dev.in -python-dateutil==2.8.2 + # via -r /workspaces/vectorizing/scripts/../requirements/dev.in +python-dateutil==2.9.0.post0 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # botocore -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via pytest-dotenv -pywavelets==1.4.1 +pywavelets==1.7.0 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # scikit-image -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit requests==2.31.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -s3transfer==0.6.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +s3transfer==0.10.2 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # boto3 scikit-image==0.21.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt -scipy==1.11.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt +scipy==1.14.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # scikit-image # sewar sentry-sdk==1.30.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.txt + # via -r /workspaces/vectorizing/scripts/../requirements/prod.txt sewar==0.4.6 - # via -r /workspaces/tracing/scripts/../requirements/dev.in + # via -r /workspaces/vectorizing/scripts/../requirements/dev.in six==1.16.0 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # python-dateutil -tifffile==2023.8.30 +tifffile==2024.8.10 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # scikit-image -tinycss2==1.2.1 +tinycss2==1.3.0 # via # cairosvg # cssselect2 @@ -207,32 +205,25 @@ tomli==2.0.1 # via # build # pip-tools - # pyproject-hooks # pytest -typing-extensions==4.7.1 - # via filelock -urllib3==1.26.16 +urllib3==2.2.2 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # botocore # requests # sentry-sdk -virtualenv==20.24.5 +virtualenv==20.26.3 # via pre-commit webencodings==0.5.1 # via # cssselect2 # tinycss2 -werkzeug==2.3.7 +werkzeug==3.0.3 # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt + # -r /workspaces/vectorizing/scripts/../requirements/prod.txt # flask -wheel==0.41.2 +wheel==0.44.0 # via pip-tools -zipp==3.16.2 - # via - # -r /workspaces/tracing/scripts/../requirements/prod.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/requirements/prod.in b/requirements/prod.in index 42d0b16..adca285 100644 --- a/requirements/prod.in +++ b/requirements/prod.in @@ -1,7 +1,7 @@ # add here production requirements cuid~=0.4 -boto3~=1.28.42 +boto3~=1.35.2 flask~=2.3.3 flask-cors~=4.0.0 sentry-sdk~=1.30.0 diff --git a/requirements/prod.txt b/requirements/prod.txt index f810e43..d6c739b 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,63 +1,61 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile /workspaces/tracing/scripts/../requirements/prod.in +# pip-compile /workspaces/vectorizing/scripts/../requirements/prod.in # -blinker==1.6.2 +blinker==1.8.2 # via flask -boto3==1.28.45 - # via -r /workspaces/tracing/scripts/../requirements/prod.in -botocore==1.31.45 +boto3==1.35.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in +botocore==1.35.2 # via # boto3 # s3transfer -certifi==2023.7.22 +certifi==2024.7.4 # via # requests # sentry-sdk -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests click==8.1.7 # via flask cuid==0.4 - # via -r /workspaces/tracing/scripts/../requirements/prod.in + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in faiss-cpu==1.7.4 - # via -r /workspaces/tracing/scripts/../requirements/prod.in + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in flask==2.3.3 # via - # -r /workspaces/tracing/scripts/../requirements/prod.in + # -r /workspaces/vectorizing/scripts/../requirements/prod.in # flask-cors -flask-cors==4.0.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.in +flask-cors==4.0.1 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in gunicorn==21.2.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.in -idna==3.4 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in +idna==3.7 # via requests -imageio==2.31.3 +imageio==2.35.1 # via scikit-image -importlib-metadata==6.8.0 +itsdangerous==2.2.0 # via flask -itsdangerous==2.1.2 - # via flask -jinja2==3.1.2 +jinja2==3.1.4 # via flask jmespath==1.0.1 # via # boto3 # botocore -lazy-loader==0.3 +lazy-loader==0.4 # via scikit-image llvmlite==0.40.1 # via numba -markupsafe==2.1.3 +markupsafe==2.1.5 # via # jinja2 # werkzeug -networkx==3.1 +networkx==3.3 # via scikit-image numba==0.57.1 - # via -r /workspaces/tracing/scripts/../requirements/prod.in + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in numpy==1.24.4 # via # imageio @@ -69,44 +67,43 @@ numpy==1.24.4 # scipy # tifffile opencv-python-headless==4.8.0.76 - # via -r /workspaces/tracing/scripts/../requirements/prod.in -packaging==23.1 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in +packaging==24.1 # via # gunicorn + # lazy-loader # scikit-image -pillow==10.0.0 +pillow==10.0.1 # via - # -r /workspaces/tracing/scripts/../requirements/prod.in + # -r /workspaces/vectorizing/scripts/../requirements/prod.in # imageio # scikit-image pyclipper==1.3.0.post5 - # via -r /workspaces/tracing/scripts/../requirements/prod.in + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in pypotrace==0.3 - # via -r /workspaces/tracing/scripts/../requirements/prod.in -python-dateutil==2.8.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in +python-dateutil==2.9.0.post0 # via botocore -pywavelets==1.4.1 +pywavelets==1.7.0 # via scikit-image requests==2.31.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.in -s3transfer==0.6.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in +s3transfer==0.10.2 # via boto3 scikit-image==0.21.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.in -scipy==1.11.2 + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in +scipy==1.14.1 # via scikit-image sentry-sdk==1.30.0 - # via -r /workspaces/tracing/scripts/../requirements/prod.in + # via -r /workspaces/vectorizing/scripts/../requirements/prod.in six==1.16.0 # via python-dateutil -tifffile==2023.8.30 +tifffile==2024.8.10 # via scikit-image -urllib3==1.26.16 +urllib3==2.2.2 # via # botocore # requests # sentry-sdk -werkzeug==2.3.7 +werkzeug==3.0.3 # via flask -zipp==3.16.2 - # via importlib-metadata diff --git a/scripts/install_system_dependencies.sh b/scripts/install_system_dependencies.sh new file mode 100644 index 0000000..05551bf --- /dev/null +++ b/scripts/install_system_dependencies.sh @@ -0,0 +1,5 @@ +#!/bin/bash +# this script install system dependencies for vectorizing + +sudo apt-get update +sudo apt-get install -y build-essential libagg-dev libpotrace-dev pkg-config libffi-dev libcairo2-dev diff --git a/vectorizing/__init__.py b/vectorizing/__init__.py index ff206f0..8079cd4 100644 --- a/vectorizing/__init__.py +++ b/vectorizing/__init__.py @@ -20,10 +20,8 @@ PYTHON_ENV = os.getenv("PYTHON_ENV", "development") ( - PORT, - S3_BUCKET, - AWS_ACCESS_KEY_ID, - AWS_SECRET_ACCESS_KEY, + PORT, + S3_BUCKET, ) = get_required() ( @@ -44,7 +42,7 @@ def process_color(img, color_count, timer): def validate_args(args): if not 'url' in args: return False - + solver = args.get('solver', DEFAULT_SOLVER) if not solver in SOLVERS: return False @@ -57,7 +55,7 @@ def validate_args(args): only_numbers = all([isinstance(item, int) for item in box]) if not only_numbers: return False - + return SimpleNamespace( crop_box = box, solver = solver, @@ -71,7 +69,7 @@ def invalid_args(): "success": False, "error": "INVALID_PARAMETERS" }), 400 - + def create_app(test_config=None): app = Flask(__name__, instance_relative_config=True) app.debug = PYTHON_ENV == 'development' @@ -79,7 +77,7 @@ def create_app(test_config=None): @app.route('/', methods = ['POST']) def index(): args = request.json - + args = validate_args(args) if not args: return invalid_args() @@ -99,12 +97,12 @@ def index(): if crop_box: img = img.crop(tuple(crop_box)) - + if solver == 0: timer.start_timer('Binary Solver - Total') solved = process_binary(img) timer.end_timer() - + else: timer.start_timer('Color Solver - Total') solved = process_color(img, color_count, timer) @@ -128,34 +126,34 @@ def index(): timer.end_timer() app.logger.info(timer.timelog()) - - return jsonify({ + + return jsonify({ 'success': True, - 'objectId': cuid_str, + 'objectId': cuid_str, 'info': { 'bounds': bounds.to_dict(), 'image_width': width, 'image_height': height } }) - + except (Exception) as e: app.logger.error(e) return jsonify({ "success": False, "error": "INTERNAL_SERVER_ERROR" }), 500 - + @app.route('/health', methods = ['GET']) def healthcheck(): return jsonify({ "success": True, }), 200 - + @app.route('/test-error', methods = ['GET']) def test_error(): raise Exception('Test Error') - + app.logger.info(f'Vectorizing server running on port: {PORT}, environment: {PYTHON_ENV}') if SENTRY_DSN: sentry_sdk.init( @@ -163,4 +161,4 @@ def test_error(): traces_sample_rate = 0.1, environment = PYTHON_ENV, ) - return app \ No newline at end of file + return app diff --git a/vectorizing/server/env.py b/vectorizing/server/env.py index 6d32855..b7f37c9 100644 --- a/vectorizing/server/env.py +++ b/vectorizing/server/env.py @@ -2,9 +2,7 @@ REQUIRED_ENVIRONMENT_VARIABLES = { 'PORT': int, - 'S3_BUCKET': str, - 'AWS_ACCESS_KEY_ID': str, - 'AWS_SECRET_ACCESS_KEY': str + 'S3_BUCKET': str } OPTIONAL_ENVIRONMENT_VARIABLES = { @@ -23,9 +21,9 @@ def get_required(): if missing_required: raise VariableNotDefinedException() - + return [ - cast(os.environ[key]) + cast(os.environ[key]) for key, cast in REQUIRED_ENVIRONMENT_VARIABLES.items() ] @@ -33,4 +31,4 @@ def get_optional(): return [ cast(os.environ[key]) if key in os.environ else None for key, cast in OPTIONAL_ENVIRONMENT_VARIABLES.items() - ] \ No newline at end of file + ] diff --git a/vectorizing/server/s3.py b/vectorizing/server/s3.py index a002585..2e942e8 100644 --- a/vectorizing/server/s3.py +++ b/vectorizing/server/s3.py @@ -1,7 +1,7 @@ import cuid import boto3 -S3 = boto3.client("s3") +S3 = boto3.client("s3", region_name="eu-central-1") def upload_markup (markup, s3_bucket_name): cuid_str = cuid.cuid() @@ -21,7 +21,7 @@ def get_object_url(s3_file_key, s3_bucket_name): Key=s3_file_key, Bucket=s3_bucket_name ) - + except(Exception): return None @@ -40,4 +40,4 @@ def upload_file( s3_bucket_name, s3_file_key, ) - return get_object_url(s3_file_key, s3_bucket_name) \ No newline at end of file + return get_object_url(s3_file_key, s3_bucket_name) diff --git a/vectorizing/util/read.py b/vectorizing/util/read.py index 1f377dc..54edea1 100644 --- a/vectorizing/util/read.py +++ b/vectorizing/util/read.py @@ -45,7 +45,7 @@ def try_read_image_from_path(path): def try_read_image_from_url(url): try: - resp = requests.get(url) + resp = requests.get(url, headers={"User-Agent": "KittlVectorizing/1.0.0"}) img = Image.open(BytesIO(resp.content)) except: raise URLReadError()