diff --git a/dangerzone/conversion/common.py b/dangerzone/conversion/common.py index 4e8e9f313..2a0f3b482 100644 --- a/dangerzone/conversion/common.py +++ b/dangerzone/conversion/common.py @@ -10,12 +10,14 @@ import sys import time from abc import abstractmethod -from typing import Callable, Dict, List, Optional, Tuple, Union +from typing import Callable, Dict, Generator, List, Optional, Tuple, Union TIMEOUT_PER_PAGE: float = 30 # (seconds) TIMEOUT_PER_MB: float = 30 # (seconds) TIMEOUT_MIN: float = 60 # (seconds) +PAGE_BATCH_SIZE = 50 # number of pages to be processed simulatenously + def running_on_qubes() -> bool: # https://www.qubes-os.org/faq/#what-is-the-canonical-way-to-detect-qubes-vm @@ -44,6 +46,24 @@ def calculate_timeout(size: float, pages: Optional[float] = None) -> float: return timeout +def batch_iterator(num_pages: int) -> Generator[Tuple[int, int], None, None]: + """Iterates over batches of PAGE_BATCH_SIZE pages""" + for first_page in range(1, num_pages + 1, PAGE_BATCH_SIZE): + if first_page + PAGE_BATCH_SIZE >= num_pages: # Last batch + last_page = num_pages + else: + last_page = first_page + PAGE_BATCH_SIZE - 1 + yield (first_page, last_page) + + +def get_batch_timeout(timeout: Optional[float], num_pages: int) -> Optional[float]: + if timeout is None: + return None + else: + num_batches = int(num_pages / PAGE_BATCH_SIZE) + 1 + return timeout / num_batches + + class DangerzoneConverter: def __init__(self, progress_callback: Optional[Callable] = None) -> None: self.percentage: float = 0.0 diff --git a/dangerzone/conversion/doc_to_pixels.py b/dangerzone/conversion/doc_to_pixels.py index e828dbb7a..70edae32f 100644 --- a/dangerzone/conversion/doc_to_pixels.py +++ b/dangerzone/conversion/doc_to_pixels.py @@ -19,7 +19,15 @@ import magic from . import errors -from .common import DangerzoneConverter, running_on_qubes +from .common import ( + PAGE_BATCH_SIZE, + DangerzoneConverter, + batch_iterator, + get_batch_timeout, + running_on_qubes, +) + +PAGE_BASE = "/tmp/page" class DocumentToPixels(DangerzoneConverter): @@ -276,39 +284,86 @@ async def convert(self) -> None: # Get a more precise timeout, based on the number of pages timeout = self.calculate_timeout(size, num_pages) - async def pdftoppm_progress_callback(line: bytes) -> None: - """Function called for every line the 'pdftoppm' command outputs - - Sample pdftoppm output: - - $ pdftoppm sample.pdf /tmp/safe -progress - 1 4 /tmp/safe-1.ppm - 2 4 /tmp/safe-2.ppm - 3 4 /tmp/safe-3.ppm - 4 4 /tmp/safe-4.ppm - - Each successful line is in the format "{page} {page_num} {ppm_filename}" - """ - try: - (page_str, num_pages_str, _) = line.decode().split() - num_pages = int(num_pages_str) - page = int(page_str) - except ValueError as e: - # Ignore all non-progress related output, since pdftoppm sends - # everything to stderr and thus, errors can't be distinguished - # easily. We rely instead on the exit code. - return + timeout_per_batch = get_batch_timeout(timeout, num_pages) + for first_page, last_page in batch_iterator(num_pages): + # XXX send data from the previous loop's conversion to + # always be able to process and send data at the same time + if first_page == 1: # If in first pass + await self.pdf_to_rgb( + first_page, last_page, pdf_filename, timeout_per_batch + ) + delayed_send_rgb_files = self.send_rgb_files( + first_page, last_page, num_pages + ) + else: + await asyncio.gather( + self.pdf_to_rgb( + first_page, last_page, pdf_filename, timeout_per_batch + ), + delayed_send_rgb_files, + ) + delayed_send_rgb_files = self.send_rgb_files( + first_page, last_page, num_pages + ) + + await delayed_send_rgb_files + + final_files = ( + glob.glob("/tmp/page-*.rgb") + + glob.glob("/tmp/page-*.width") + + glob.glob("/tmp/page-*.height") + ) + + # XXX: Sanity check to avoid situations like #560. + if not running_on_qubes() and len(final_files) != 3 * num_pages: + raise errors.PageCountMismatch() + + # Move converted files into /tmp/dangerzone + for filename in final_files: + shutil.move(filename, "/tmp/dangerzone") + + self.update_progress("Converted document to pixels") + async def pdf_to_rgb( + self, + first_page: int, + last_page: int, + pdf_filename: str, + timeout: Optional[float], + ) -> None: + await self.run_command( + [ + "pdftoppm", + pdf_filename, + PAGE_BASE, + "-progress", + "-f", + str(first_page), + "-l", + str(last_page), + ], + error_message="Conversion from PDF to PPM failed", + timeout_message=( + f"Error converting from PDF to PPM, pdftoppm timed out after {timeout}" + " seconds" + ), + timeout=timeout, + ) + + async def send_rgb_files( + self, first_page: int, last_page: int, num_pages: int + ) -> None: + for page in range(first_page, last_page + 1): percentage_per_page = 45.0 / num_pages self.percentage += percentage_per_page - self.update_progress(f"Converting page {page}/{num_pages} to pixels") + self.update_progress(f"Converting pages {page}/{num_pages} to pixels") - zero_padding = "0" * (len(num_pages_str) - len(page_str)) - ppm_filename = f"{page_base}-{zero_padding}{page}.ppm" - rgb_filename = f"{page_base}-{page}.rgb" - width_filename = f"{page_base}-{page}.width" - height_filename = f"{page_base}-{page}.height" - filename_base = f"{page_base}-{page}" + zero_padding = "0" * (len(str(num_pages)) - len(str(page))) + ppm_filename = f"{PAGE_BASE}-{zero_padding}{page}.ppm" + rgb_filename = f"{PAGE_BASE}-{page}.rgb" + width_filename = f"{PAGE_BASE}-{page}.width" + height_filename = f"{PAGE_BASE}-{page}.height" + filename_base = f"{PAGE_BASE}-{page}" with open(ppm_filename, "rb") as f: # NOTE: PPM files have multiple ways of writing headers. @@ -339,40 +394,6 @@ async def pdftoppm_progress_callback(line: bytes) -> None: # Delete the ppm file os.remove(ppm_filename) - page_base = "/tmp/page" - - await self.run_command( - [ - "pdftoppm", - pdf_filename, - page_base, - "-progress", - ], - error_message="Conversion from PDF to PPM failed", - timeout_message=( - f"Error converting from PDF to PPM, pdftoppm timed out after {timeout}" - " seconds" - ), - stderr_callback=pdftoppm_progress_callback, - timeout=timeout, - ) - - final_files = ( - glob.glob("/tmp/page-*.rgb") - + glob.glob("/tmp/page-*.width") - + glob.glob("/tmp/page-*.height") - ) - - # XXX: Sanity check to avoid situations like #560. - if not running_on_qubes() and len(final_files) != 3 * num_pages: - raise errors.PageCountMismatch() - - # Move converted files into /tmp/dangerzone - for filename in final_files: - shutil.move(filename, "/tmp/dangerzone") - - self.update_progress("Converted document to pixels") - async def install_libreoffice_ext(self, libreoffice_ext: str) -> None: self.update_progress(f"Installing LibreOffice extension '{libreoffice_ext}'") unzip_args = [ diff --git a/dangerzone/conversion/errors.py b/dangerzone/conversion/errors.py index 2aeb38d04..d817f2a49 100644 --- a/dangerzone/conversion/errors.py +++ b/dangerzone/conversion/errors.py @@ -104,6 +104,11 @@ class PDFtoPPMInvalidDepth(PDFtoPPMException): error_message = "Error converting PDF to Pixels (Invalid PPM depth)" +class PPMtoPNGError(ConversionException): + error_code = ERROR_SHIFT + 55 + error_message = "Document page could not be reassembled from individual pixels" + + class InterruptedConversion(ConversionException): """Protocol received num of bytes different than expected""" @@ -113,6 +118,11 @@ class InterruptedConversion(ConversionException): ) +class PixelFilesMismatch(ConversionException): + error_code = ERROR_SHIFT + 70 + error_message = "The number of pages received is different than expected" + + class UnexpectedConversionError(PDFtoPPMException): error_code = ERROR_SHIFT + 100 error_message = "Some unexpected error occurred while converting the document" diff --git a/dangerzone/conversion/pixels_to_pdf.py b/dangerzone/conversion/pixels_to_pdf.py index 56b1d6604..dfaab61eb 100644 --- a/dangerzone/conversion/pixels_to_pdf.py +++ b/dangerzone/conversion/pixels_to_pdf.py @@ -13,38 +13,32 @@ import sys from typing import Optional -from .common import DangerzoneConverter, running_on_qubes +from .common import ( + DangerzoneConverter, + batch_iterator, + get_batch_timeout, + running_on_qubes, +) class PixelsToPDF(DangerzoneConverter): async def convert( - self, ocr_lang: Optional[str] = None, tempdir: Optional[str] = None + self, ocr_lang: Optional[str] = None, tempdir: Optional[str] = "/tmp" ) -> None: self.percentage = 50.0 - if tempdir is None: - tempdir = "/tmp" - num_pages = len(glob.glob(f"{tempdir}/dangerzone/page-*.rgb")) + num_pages = len(glob.glob(f"{tempdir}/page-*.png")) total_size = 0.0 # Convert RGB files to PDF files percentage_per_page = 45.0 / num_pages for page in range(1, num_pages + 1): - filename_base = f"{tempdir}/dangerzone/page-{page}" - rgb_filename = f"{filename_base}.rgb" - width_filename = f"{filename_base}.width" - height_filename = f"{filename_base}.height" - png_filename = f"{tempdir}/page-{page}.png" - ocr_filename = f"{tempdir}/page-{page}" - pdf_filename = f"{tempdir}/page-{page}.pdf" - - with open(width_filename) as f: - width = f.read().strip() - with open(height_filename) as f: - height = f.read().strip() + filename_base = f"{tempdir}/page-{page}" + png_filename = f"{filename_base}.png" + pdf_filename = f"{filename_base}.pdf" # The first few operations happen on a per-page basis. - page_size = os.path.getsize(filename_base + ".rgb") / 1024**2 + page_size = os.path.getsize(png_filename) / 1024**2 total_size += page_size timeout = self.calculate_timeout(page_size, 1) @@ -52,29 +46,11 @@ async def convert( self.update_progress( f"Converting page {page}/{num_pages} from pixels to searchable PDF" ) - await self.run_command( - [ - "gm", - "convert", - "-size", - f"{width}x{height}", - "-depth", - "8", - f"rgb:{rgb_filename}", - f"png:{png_filename}", - ], - error_message=f"Page {page}/{num_pages} conversion to PNG failed", - timeout_message=( - "Error converting pixels to PNG, convert timed out after" - f" {timeout} seconds" - ), - timeout=timeout, - ) await self.run_command( [ "tesseract", png_filename, - ocr_filename, + filename_base, "-l", ocr_lang, "--dpi", @@ -97,11 +73,7 @@ async def convert( [ "gm", "convert", - "-size", - f"{width}x{height}", - "-depth", - "8", - f"rgb:{rgb_filename}", + f"png:{png_filename}", f"pdf:{pdf_filename}", ], error_message=f"Page {page}/{num_pages} conversion to PDF failed", @@ -112,6 +84,9 @@ async def convert( timeout=timeout, ) + # remove PNG file when it is no longer needed + os.remove(png_filename) + self.percentage += percentage_per_page # Next operations apply to the all the pages, so we need to recalculate the @@ -119,20 +94,29 @@ async def convert( timeout = self.calculate_timeout(total_size, num_pages) # Merge pages into a single PDF + timeout_per_batch = get_batch_timeout(timeout, num_pages) self.update_progress(f"Merging {num_pages} pages into a single PDF") - args = ["pdfunite"] - for page in range(1, num_pages + 1): - args.append(f"{tempdir}/page-{page}.pdf") - args.append(f"{tempdir}/safe-output.pdf") - await self.run_command( - args, - error_message="Merging pages into a single PDF failed", - timeout_message=( - "Error merging pages into a single PDF, pdfunite timed out after" - f" {timeout} seconds" - ), - timeout=timeout, - ) + for first_page, last_page in batch_iterator(num_pages): + args = ["pdfunite"] + accumulator = f"{tempdir}/safe-output.pdf" # PDF which accumulates pages + accumulator_temp = f"{tempdir}/safe-output_tmp.pdf" + if first_page > 1: # Append at the beginning + args.append(accumulator) + for page in range(first_page, last_page + 1): + args.append(f"{tempdir}/page-{page}.pdf") + args.append(accumulator_temp) + await self.run_command( + args, + error_message="Merging pages into a single PDF failed", + timeout_message=( + "Error merging pages into a single PDF, pdfunite timed out after" + f" {timeout_per_batch} seconds" + ), + timeout=timeout_per_batch, + ) + for page in range(first_page, last_page + 1): + os.remove(f"{tempdir}/page-{page}.pdf") + os.rename(accumulator_temp, accumulator) self.percentage += 2 @@ -165,7 +149,7 @@ async def main() -> int: converter = PixelsToPDF() try: - await converter.convert(ocr_lang) + await converter.convert(ocr_lang, tempdir="/tmp/dangerzone") error_code = 0 # Success! except (RuntimeError, TimeoutError, ValueError) as e: diff --git a/dangerzone/isolation_provider/base.py b/dangerzone/isolation_provider/base.py index 6265146f7..70cae54f4 100644 --- a/dangerzone/isolation_provider/base.py +++ b/dangerzone/isolation_provider/base.py @@ -1,11 +1,14 @@ +import io import logging import subprocess from abc import ABC, abstractmethod +from pathlib import Path from typing import Callable, Optional from colorama import Fore, Style +from PIL import Image, UnidentifiedImageError -from ..conversion.errors import ConversionException +from ..conversion import errors from ..document import Document from ..util import replace_control_chars @@ -37,7 +40,7 @@ def convert( document.mark_as_converting() try: success = self._convert(document, ocr_lang) - except ConversionException as e: + except errors.ConversionException as e: success = False self.print_progress_trusted(document, True, str(e), 0) except Exception as e: @@ -101,74 +104,26 @@ def sanitize_conversion_str(self, untrusted_conversion_str: str) -> str: armor_end = DOC_TO_PIXELS_LOG_END return armor_start + conversion_string + armor_end + def convert_pixels_to_png( + self, tempdir: str, page: int, width: int, height: int, rgb_data: bytes + ) -> None: + """ + Reconstruct PPM files and save as PNG to save space + """ + if not (1 <= width <= errors.MAX_PAGE_WIDTH): + raise errors.MaxPageWidthException() + if not (1 <= height <= errors.MAX_PAGE_HEIGHT): + raise errors.MaxPageHeightException() + + ppm_header = f"P6\n{width} {height}\n255\n".encode() + ppm_data = io.BytesIO(ppm_header + rgb_data) + png_path = Path(tempdir) / f"page-{page}.png" + + # Verify the exact data was received + if len(rgb_data) != width * height * 3: + raise errors.InterruptedConversion() -# From global_common: - -# def validate_convert_to_pixel_output(self, common, output): -# """ -# Take the output from the convert to pixels tasks and validate it. Returns -# a tuple like: (success (boolean), error_message (str)) -# """ -# max_image_width = 10000 -# max_image_height = 10000 - -# # Did we hit an error? -# for line in output.split("\n"): -# if ( -# "failed:" in line -# or "The document format is not supported" in line -# or "Error" in line -# ): -# return False, output - -# # How many pages was that? -# num_pages = None -# for line in output.split("\n"): -# if line.startswith("Document has "): -# num_pages = line.split(" ")[2] -# break -# if not num_pages or not num_pages.isdigit() or int(num_pages) <= 0: -# return False, "Invalid number of pages returned" -# num_pages = int(num_pages) - -# # Make sure we have the files we expect -# expected_filenames = [] -# for i in range(1, num_pages + 1): -# expected_filenames += [ -# f"page-{i}.rgb", -# f"page-{i}.width", -# f"page-{i}.height", -# ] -# expected_filenames.sort() -# actual_filenames = os.listdir(common.pixel_dir.name) -# actual_filenames.sort() - -# if expected_filenames != actual_filenames: -# return ( -# False, -# f"We expected these files:\n{expected_filenames}\n\nBut we got these files:\n{actual_filenames}", -# ) - -# # Make sure the files are the correct sizes -# for i in range(1, num_pages + 1): -# with open(f"{common.pixel_dir.name}/page-{i}.width") as f: -# w_str = f.read().strip() -# with open(f"{common.pixel_dir.name}/page-{i}.height") as f: -# h_str = f.read().strip() -# w = int(w_str) -# h = int(h_str) -# if ( -# not w_str.isdigit() -# or not h_str.isdigit() -# or w <= 0 -# or w > max_image_width -# or h <= 0 -# or h > max_image_height -# ): -# return False, f"Page {i} has invalid geometry" - -# # Make sure the RGB file is the correct size -# if os.path.getsize(f"{common.pixel_dir.name}/page-{i}.rgb") != w * h * 3: -# return False, f"Page {i} has an invalid RGB file size" - -# return True, True + try: + Image.open(ppm_data).save(png_path, "PNG") + except UnidentifiedImageError as e: + raise errors.PPMtoPNGError() from e diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 61d641b78..388a160af 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -1,3 +1,4 @@ +import glob import gzip import json import logging @@ -11,7 +12,7 @@ import tempfile from typing import Any, Callable, List, Optional, Tuple -from ..conversion.errors import exception_from_error_code +from ..conversion import errors from ..document import Document from ..util import ( get_resource_path, @@ -304,11 +305,33 @@ def _convert_with_tmpdirs( f"Conversion output (doc to pixels):\n{self.sanitize_conversion_str(untrusted_log)}" ) + num_pages = len(glob.glob(f"{pixel_dir}/page-*.rgb")) + self.verify_received_pixel_files(pixel_dir, num_pages) + + for page in range(1, num_pages + 1): + filename_base = f"{pixel_dir}/page-{page}" + rgb_filename = f"{filename_base}.rgb" + width_filename = f"{filename_base}.width" + height_filename = f"{filename_base}.height" + with open(width_filename) as f: + width = int(f.read().strip()) + with open(height_filename) as f: + height = int(f.read().strip()) + with open(rgb_filename, "rb") as rgb_f: + untrusted_pixels = rgb_f.read() + self.convert_pixels_to_png( + str(pixel_dir), page, width, height, rgb_data=untrusted_pixels + ) + + os.remove(rgb_filename) + os.remove(width_filename) + os.remove(height_filename) + if ret != 0: log.error("documents-to-pixels failed") # XXX Reconstruct exception from error code - raise exception_from_error_code(ret) # type: ignore [misc] + raise errors.exception_from_error_code(ret) # type: ignore [misc] else: # TODO: validate convert to pixels output @@ -358,6 +381,25 @@ def _convert_with_tmpdirs( return success + def verify_received_pixel_files( + self, pixel_dir: pathlib.Path, num_pages: int + ) -> None: + """Make sure we have the files we expect""" + + expected_filenames = ["captured_output.txt"] + for i in range(1, num_pages + 1): + expected_filenames += [ + f"page-{i}.rgb", + f"page-{i}.width", + f"page-{i}.height", + ] + expected_filenames.sort() + actual_filenames = os.listdir(pixel_dir) + actual_filenames.sort() + + if expected_filenames != actual_filenames: + raise errors.PixelFilesMismatch() + def get_max_parallel_conversions(self) -> int: # FIXME hardcoded 1 until timeouts are more limited and better handled # https://github.com/freedomofpress/dangerzone/issues/257 diff --git a/dangerzone/isolation_provider/qubes.py b/dangerzone/isolation_provider/qubes.py index e06b79d07..b4cb5db7f 100644 --- a/dangerzone/isolation_provider/qubes.py +++ b/dangerzone/isolation_provider/qubes.py @@ -76,7 +76,6 @@ def __convert( ) -> bool: success = False - Path(f"{tempdir}/dangerzone").mkdir() percentage = 0.0 with open(document.input_filename, "rb") as f: @@ -110,10 +109,6 @@ def __convert( width = read_int(self.proc.stdout, timeout=sw.remaining) height = read_int(self.proc.stdout, timeout=sw.remaining) - if not (1 <= width <= errors.MAX_PAGE_WIDTH): - raise errors.MaxPageWidthException() - if not (1 <= height <= errors.MAX_PAGE_HEIGHT): - raise errors.MaxPageHeightException() num_pixels = width * height * 3 # three color channels untrusted_pixels = read_bytes( @@ -122,13 +117,9 @@ def __convert( timeout=sw.remaining, ) - # Wrapper code - with open(f"{tempdir}/dangerzone/page-{page}.width", "w") as f_width: - f_width.write(str(width)) - with open(f"{tempdir}/dangerzone/page-{page}.height", "w") as f_height: - f_height.write(str(height)) - with open(f"{tempdir}/dangerzone/page-{page}.rgb", "wb") as f_rgb: - f_rgb.write(untrusted_pixels) + self.convert_pixels_to_png( + tempdir, page, width, height, rgb_data=untrusted_pixels + ) percentage += percentage_per_page @@ -165,7 +156,9 @@ def print_progress_wrapper(error: bool, text: str, percentage: float) -> None: ) log.info(text) - shutil.move(f"{tempdir}/safe-output-compressed.pdf", document.output_filename) + shutil.move( + Path(tempdir) / "safe-output-compressed.pdf", document.output_filename + ) success = True return success diff --git a/install/linux/dangerzone.spec b/install/linux/dangerzone.spec index 99886e0ef..40a84c743 100644 --- a/install/linux/dangerzone.spec +++ b/install/linux/dangerzone.spec @@ -72,6 +72,7 @@ BuildRequires: python3-devel %if 0%{?_qubes} # Qubes-only requirements Requires: python3-magic +Requires: python3-pillow Requires: libreoffice Requires: tesseract # Explicitly require every tesseract model: diff --git a/poetry.lock b/poetry.lock index 51bae3c54..aa99c3a39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -63,7 +63,6 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -195,7 +194,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" @@ -406,7 +404,6 @@ files = [ ] [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] @@ -555,7 +552,6 @@ files = [ [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} typing-extensions = ">=4.1.0" [package.extras] @@ -597,6 +593,73 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pillow" +version = "10.1.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, + {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, + {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, + {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, + {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, + {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, + {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + [[package]] name = "platformdirs" version = "3.11.0" @@ -608,9 +671,6 @@ files = [ {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} - [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] @@ -626,9 +686,6 @@ files = [ {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -656,7 +713,6 @@ files = [ [package.dependencies] altgraph = "*" -importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""} macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} pyinstaller-hooks-contrib = ">=2021.4" setuptools = ">=42.0.0" @@ -747,7 +803,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -917,56 +972,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "typed-ast" -version = "1.5.5" -description = "a fork of Python 2 and 3 ast modules with type comment support" -optional = false -python-versions = ">=3.6" -files = [ - {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, - {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, - {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, - {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, - {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, - {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, - {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, - {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, - {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, - {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, - {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, - {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, - {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, - {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, - {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, - {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, - {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, - {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, - {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, - {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, - {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, - {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, - {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, - {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, - {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, - {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, - {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, - {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, - {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, - {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, - {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, - {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, - {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, - {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, - {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, - {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, - {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, - {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, - {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, - {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, - {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, -] - [[package]] name = "types-markdown" version = "3.4.2.10" @@ -1048,5 +1053,5 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" -python-versions = ">=3.7,<3.12" -content-hash = "fd4f4e9362f23cf48a16bfe8c11ed4eb68a4ae7d515c33f5e98b42ef270cac57" +python-versions = ">=3.8,<3.12" +content-hash = "e64927a095d5c731c11900e2b679f3915a672f4660ad78514666d906f5880df8" diff --git a/pyproject.toml b/pyproject.toml index 48e2382d7..0e748a45a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ include = [ ] [tool.poetry.dependencies] -python = ">=3.7,<3.12" +python = ">=3.8,<3.12" click = "*" appdirs = "*" PySide6 = "^6.4.1" @@ -21,6 +21,7 @@ pyxdg = {version = "*", platform = "linux"} requests = "*" markdown = "*" packaging = "*" +pillow = "^10.1.0" [tool.poetry.scripts] dangerzone = 'dangerzone:main' diff --git a/stdeb.cfg b/stdeb.cfg index cf51d2f8f..f51ac9ced 100644 --- a/stdeb.cfg +++ b/stdeb.cfg @@ -1,6 +1,6 @@ [DEFAULT] Package3: dangerzone -Depends3: podman, python3, python3-pyside2.qtcore, python3-pyside2.qtgui, python3-pyside2.qtwidgets, python3-pyside2.qtsvg, python3-appdirs, python3-click, python3-xdg, python3-colorama, python3-requests, python3-markdown, python3-packaging +Depends3: podman, python3, python3-pyside2.qtcore, python3-pyside2.qtgui, python3-pyside2.qtwidgets, python3-pyside2.qtsvg, python3-appdirs, python3-click, python3-xdg, python3-colorama, python3-requests, python3-markdown, python3-packaging, python3-pillow Build-Depends: dh-python, python3, python3-setuptools, python3-stdeb Suite: bionic X-Python3-Version: >= 3.6 diff --git a/tests/isolation_provider/base.py b/tests/isolation_provider/base.py index 52a7d0f0b..88bb32b37 100644 --- a/tests/isolation_provider/base.py +++ b/tests/isolation_provider/base.py @@ -9,7 +9,13 @@ from dangerzone.isolation_provider import base from dangerzone.isolation_provider.qubes import running_on_qubes -from .. import pdf_11k_pages, sanitized_text, uncommon_text +from .. import ( + pdf_11k_pages, + sample_bad_height, + sample_bad_width, + sanitized_text, + uncommon_text, +) @pytest.mark.skipif( @@ -68,3 +74,18 @@ def test_max_pages_received( with pytest.raises(errors.MaxPagesException): success = provider._convert(doc, ocr_lang=None) assert not success + + def test_max_dimensions( + self, + sample_bad_width: str, + sample_bad_height: str, + provider: base.IsolationProvider, + mocker: MockerFixture, + ) -> None: + provider.progress_callback = mocker.MagicMock() + with pytest.raises(errors.MaxPageWidthException): + success = provider._convert(Document(sample_bad_width), ocr_lang=None) + assert not success + with pytest.raises(errors.MaxPageHeightException): + success = provider._convert(Document(sample_bad_height), ocr_lang=None) + assert not success diff --git a/tests/isolation_provider/test_container.py b/tests/isolation_provider/test_container.py index 4ad7831d1..da5dce8e1 100644 --- a/tests/isolation_provider/test_container.py +++ b/tests/isolation_provider/test_container.py @@ -9,7 +9,13 @@ from dangerzone.isolation_provider.container import Container # XXX Fixtures used in abstract Test class need to be imported regardless -from .. import pdf_11k_pages, sanitized_text, uncommon_text +from .. import ( + pdf_11k_pages, + sample_bad_height, + sample_bad_width, + sanitized_text, + uncommon_text, +) from .base import IsolationProviderTest diff --git a/tests/isolation_provider/test_qubes.py b/tests/isolation_provider/test_qubes.py index 78ce9e21f..ff0b37ffc 100644 --- a/tests/isolation_provider/test_qubes.py +++ b/tests/isolation_provider/test_qubes.py @@ -45,21 +45,6 @@ def test_max_pages_client_side_enforcement( success = provider._convert(doc, ocr_lang=None) assert not success - def test_max_dimensions( - self, - sample_bad_width: str, - sample_bad_height: str, - provider: Qubes, - mocker: MockerFixture, - ) -> None: - provider.progress_callback = mocker.MagicMock() - with pytest.raises(errors.MaxPageWidthException): - success = provider._convert(Document(sample_bad_width), ocr_lang=None) - assert not success - with pytest.raises(errors.MaxPageHeightException): - success = provider._convert(Document(sample_bad_height), ocr_lang=None) - assert not success - def test_out_of_ram( self, provider: Qubes,