diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7ff3741fa..d44ce760a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -35,12 +35,12 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: python config-file: ./.github/codeql/config.yml - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 with: category: ".github/workflows/codeql-analysis.yml:analyze/language:python" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 9af0a6d96..43c9494c3 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,7 +1,7 @@ name: Lint on: [push, pull_request] env: - PYTHON_VERSION: 3.9 + PYTHON_VERSION: "3.10" jobs: lint: @@ -14,7 +14,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 232d36124..beb69330a 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -1,7 +1,7 @@ name: Mypy (Type check) on: [push, pull_request] env: - PYTHON_VERSION: 3.9 + PYTHON_VERSION: "3.10" jobs: mypy: @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python 🐍 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 8f7f220eb..aa9ad1112 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -32,7 +32,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -83,7 +83,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" @@ -112,6 +112,34 @@ jobs: files: ./coverage.xml flags: migration + docker-test-migrations: + name: Docker migration test + runs-on: ubuntu-latest + permissions: + contents: read + + # We want to run on external PRs, but not on our own internal PRs as they'll be run + # by the push to the branch. This prevents duplicated runs on internal PRs. + # Some discussion of this here: + # https://github.community/t/duplicate-checks-on-push-and-pull-request-simultaneous-event/18012 + if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + fetch-depth: 0 + + # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 + - name: Disable network offload + run: sudo ethtool -K eth0 tx off rx off + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Test migrations + run: ./docker/ci/test_migrations.sh + docker-image-build: name: Docker build runs-on: ubuntu-latest @@ -308,7 +336,7 @@ jobs: if: false # Disable temporarily name: Push circ-${{ matrix.image }} runs-on: ubuntu-latest - needs: [test, test-migrations, docker-image-build, docker-image-test] + needs: [test, test-migrations, docker-test-migrations, docker-image-build, docker-image-test] permissions: contents: read packages: write @@ -334,7 +362,7 @@ jobs: uses: docker/setup-buildx-action@v3 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef1fbb1ad..03609950c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,8 +21,7 @@ repos: hooks: - id: pyupgrade args: - - --py38-plus - - --keep-runtime-typing + - --py310-plus - repo: https://github.com/MarcoGorelli/absolufy-imports rev: v0.3.0 diff --git a/README.md b/README.md index 0d274cb90..b41a78332 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,11 @@ # E-kirjasto Circulation Manager +[![Test & Build](https://github.com/NatLibFi/ekirjasto-circulation/actions/workflows/test-build.yml/badge.svg)](https://github.com/NatLibFi/ekirjasto-circulation/actions/workflows/test-build.yml) + +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) +[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) +![Python: 3.10,3.11](https://img.shields.io/badge/Python-3.10%20|%203.11-blue) This is the E-kirjasto fork of the [The Palace Project](https://thepalaceproject.org) Palace Manager (which is a fork of [Library Simplified](http://www.librarysimplified.org/) Circulation Manager). @@ -284,13 +290,13 @@ export SIMPLIFIED_MAIL_SENDER=sender@example.com As mentioned in the [pyenv](#pyenv) section, the `poetry` tool should be executed under a virtual environment in order to guarantee that it will use the Python version you expect. To use a particular Python version, you should create a local virtual environment in the cloned `circulation` repository directory. Assuming that -you want to use, for example, Python 3.9.9: +you want to use, for example, Python 3.11.1: ```sh -pyenv virtualenv 3.9.9 circ +pyenv virtualenv 3.11.1 circ ``` -This will create a new local virtual environment called `circ` that uses Python 3.9.9. Switch to that environment: +This will create a new local virtual environment called `circ` that uses Python 3.11.1. Switch to that environment: ```sh pyenv local circ @@ -298,7 +304,7 @@ pyenv local circ On most systems, using `pyenv` will adjust your shell prompt to indicate which virtual environment you are now in. For example, the version of Python installed in your operating system might be `3.10.1`, but -using a virtual environment can substitute, for example, `3.9.9`: +using a virtual environment can substitute, for example, `3.11.1`: ```sh $ python --version @@ -306,7 +312,7 @@ Python 3.10.1 $ pyenv local circ (circ) $ python --version -Python 3.9.9 +Python 3.11.1 ``` For brevity, these instructions assume that all shell commands will be executed within a virtual environment. @@ -586,7 +592,7 @@ poetry install --only ci ## Testing -The Github Actions CI service runs the unit tests against Python 3.8, 3.9, 3.10, and 3.11 automatically using +The Github Actions CI service runs the unit tests against Python 3.10, and 3.11 automatically using [tox](https://tox.readthedocs.io/en/latest/). Tox has an environment for each python version, the module being tested, and an optional `-docker` factor that will @@ -602,8 +608,6 @@ with service dependencies running in docker containers. | Factor | Python Version | | ------ | -------------- | -| py38 | Python 3.8 | -| py39 | Python 3.9 | | py310 | Python 3.10 | | py311 | Python 3.11 | @@ -752,6 +756,23 @@ module under the hood to do the profiling. This profiler uses [PyInstrument](https://pyinstrument.readthedocs.io/en/latest/) to profile the code. +#### Profiling tests suite + +PyInstrument can also be used to profile the test suite. This can be useful to identify slow tests, or to identify +performance regressions. + +To profile the core test suite, run the following command: + +```sh +pyinstrument -m pytest --no-cov tests/core/ +``` + +To profile the API test suite, run the following command: + +```sh +pyinstrument -m pytest --no-cov tests/api/ +``` + #### Environment Variables - `PALACE_PYINSTRUMENT`: Profiling will the enabled if this variable is set. The saved profile data will be available at @@ -759,21 +780,22 @@ This profiler uses [PyInstrument](https://pyinstrument.readthedocs.io/en/latest/ - The profile data will have the extension `.pyisession`. - The data can be accessed with the - [`pyinstrument.session.Session` class](https://pyinstrument.readthedocs.io/en/latest/reference.html#pyinstrument.session.Session). + [`pyinstrument.session.Session` class](https://pyinstrument.readthedocs.io/en/latest/reference.html#pyinstrument.session.Session). - Example code to print details of the gathered statistics: - ```python - import os - from pathlib import Path - - from pyinstrument.renderers import HTMLRenderer - from pyinstrument.session import Session - - path = Path(os.environ.get("PALACE_PYINSTRUMENT")) - for file in path.glob("*.pyisession"): - session = Session.load(file) - renderer = HTMLRenderer() - renderer.open_in_browser(session) - ``` + + ```python + import os + from pathlib import Path + + from pyinstrument.renderers import HTMLRenderer + from pyinstrument.session import Session + + path = Path(os.environ.get("PALACE_PYINSTRUMENT")) + for file in path.glob("*.pyisession"): + session = Session.load(file) + renderer = HTMLRenderer() + renderer.open_in_browser(session) + ``` ### Other Environment Variables diff --git a/alembic/versions/20230213_6f96516c7a7b_initial.py b/alembic/versions/20230213_6f96516c7a7b_initial.py deleted file mode 100644 index fd9aaca19..000000000 --- a/alembic/versions/20230213_6f96516c7a7b_initial.py +++ /dev/null @@ -1,46 +0,0 @@ -"""initial - -Revision ID: 6f96516c7a7b -Revises: -Create Date: 2022-10-06 06:50:45.512958+00:00 - -""" - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "6f96516c7a7b" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # Remove some tables that are hanging around in some instances - # These have been removed from code some time ago - op.execute( - "ALTER TABLE IF EXISTS libraryalias DROP CONSTRAINT IF EXISTS ix_libraryalias_language" - ) - op.execute( - "ALTER TABLE IF EXISTS libraryalias DROP CONSTRAINT IF EXISTS ix_libraryalias_library_id" - ) - op.execute( - "ALTER TABLE IF EXISTS libraryalias DROP CONSTRAINT IF EXISTS ix_libraryalias_name" - ) - op.execute("DROP TABLE IF EXISTS libraryalias") - - op.execute( - "ALTER TABLE IF EXISTS complaints DROP CONSTRAINT IF EXISTS ix_complaints_license_pool_id" - ) - op.execute( - "ALTER TABLE IF EXISTS complaints DROP CONSTRAINT IF EXISTS ix_complaints_source" - ) - op.execute( - "ALTER TABLE IF EXISTS complaints DROP CONSTRAINT IF EXISTS ix_complaints_type" - ) - op.execute("DROP TABLE IF EXISTS complaints") - - -def downgrade() -> None: - # No need to re-add these tables, since they are long gone - ... diff --git a/alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py b/alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py deleted file mode 100644 index bbd5804de..000000000 --- a/alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Remove admin.credential column - -Revision ID: 0c2fe32b5649 -Revises: 6f96516c7a7b -Create Date: 2023-02-20 12:36:15.204519+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "0c2fe32b5649" -down_revision = "6f96516c7a7b" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_column("admins", "credential") - - -def downgrade() -> None: - op.add_column("admins", sa.Column("credential", sa.Unicode(), nullable=True)) diff --git a/alembic/versions/20230412_dac99ae0c6fd_integration_status.py b/alembic/versions/20230412_dac99ae0c6fd_integration_status.py deleted file mode 100644 index 54f58f54f..000000000 --- a/alembic/versions/20230412_dac99ae0c6fd_integration_status.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Integration Status - -Revision ID: dac99ae0c6fd -Revises: 0c2fe32b5649 -Create Date: 2023-04-12 06:58:21.560292+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "dac99ae0c6fd" -down_revision = "0c2fe32b5649" -branch_labels = None -depends_on = None - -status_enum = sa.Enum("green", "red", name="external_integration_status") - - -def upgrade() -> None: - # ### commands auto generated by Alembic ### - op.create_table( - "externalintegrationerrors", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("time", sa.DateTime(), nullable=True), - sa.Column("error", sa.Unicode(), nullable=True), - sa.Column("external_integration_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["external_integration_id"], - ["externalintegrations.id"], - name="fk_error_externalintegrations_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id"), - ) - - status_enum.create(op.get_bind()) - op.add_column( - "externalintegrations", - sa.Column( - "status", - status_enum, - server_default="green", - nullable=True, - ), - ) - op.add_column( - "externalintegrations", - sa.Column("last_status_update", sa.DateTime(), nullable=True), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic ### - op.drop_table("externalintegrationerrors") - op.drop_column("externalintegrations", "last_status_update") - op.drop_column("externalintegrations", "status") - status_enum.drop(op.get_bind()) - # ### end Alembic commands ### diff --git a/alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py b/alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py deleted file mode 100644 index 2fbaa47c1..000000000 --- a/alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Rename elasticsearch integration - -Revision ID: 3ee5b99f2ae7 -Revises: dac99ae0c6fd -Create Date: 2023-04-24 06:24:45.721475+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "3ee5b99f2ae7" -down_revision = "dac99ae0c6fd" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.execute( - "UPDATE externalintegrations SET protocol='Opensearch' where protocol='Elasticsearch'" - ) - - -def downgrade() -> None: - op.execute( - "UPDATE externalintegrations SET protocol='Elasticsearch' where protocol='Opensearch'" - ) diff --git a/alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py b/alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py deleted file mode 100644 index 8b767a808..000000000 --- a/alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py +++ /dev/null @@ -1,55 +0,0 @@ -"""update sirsi auth config - -Revision ID: 5dcbc92c20b2 -Revises: 3ee5b99f2ae7 -Create Date: 2023-04-27 22:53:36.584426+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "5dcbc92c20b2" -down_revision = "3ee5b99f2ae7" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # Update the SirsiDynix auth config to use library_identifier_restriction - # instead of the old LIBRARY_PREFIX setting. - # This migration leaves the old LIBRARY_PREFIX setting in place, but unused - # in case we need to roll this back. We can clean up the old setting in a - # later migration. - connection = op.get_bind() - settings = connection.execute( - "select ei.id, cs.library_id, cs.value from externalintegrations as ei join " - "configurationsettings cs on ei.id = cs.external_integration_id " - "where ei.protocol = 'api.sirsidynix_authentication_provider' and " - "ei.goal = 'patron_auth' and cs.key = 'LIBRARY_PREFIX'" - ) - - for setting in settings: - connection.execute( - "UPDATE configurationsettings SET value = (%s) " - "WHERE external_integration_id = (%s) and library_id = (%s) " - "and key = 'library_identifier_restriction'", - (setting.value, setting.id, setting.library_id), - ) - connection.execute( - "UPDATE configurationsettings SET value = 'patronType' " - "WHERE external_integration_id = (%s) and library_id = (%s) " - "and key = 'library_identifier_field'", - (setting.id, setting.library_id), - ) - connection.execute( - "UPDATE configurationsettings SET value = 'prefix' " - "WHERE external_integration_id = (%s) and library_id = (%s) " - "and key = 'library_identifier_restriction_type'", - (setting.id, setting.library_id), - ) - - -def downgrade() -> None: - # These updated settings shouldn't cause any issues if left in place - # when downgrading so we leave them alone. - pass diff --git a/alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py b/alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py deleted file mode 100644 index 2e827d11b..000000000 --- a/alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Remove import coverage records without collections - -Revision ID: f9985f6b7767 -Revises: 5dcbc92c20b2 -Create Date: 2023-05-01 10:07:45.737475+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "f9985f6b7767" -down_revision = "5dcbc92c20b2" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.execute( - "DELETE FROM coveragerecords WHERE collection_id IS NULL AND operation='import'" - ) - - -def downgrade() -> None: - pass diff --git a/alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py b/alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py deleted file mode 100644 index e5545be1a..000000000 --- a/alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py +++ /dev/null @@ -1,222 +0,0 @@ -"""Add integration_configurations - -Revision ID: a9ed3f76d649 -Revises: 5a425ebe026c -Create Date: 2023-05-10 19:50:47.458800+00:00 - -""" -import json -from collections import defaultdict -from typing import Dict, Tuple, Type, TypeVar - -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.engine import Connection, Row - -from alembic import op -from api.authentication.base import AuthenticationProvider -from api.integration.registry.patron_auth import PatronAuthRegistry -from core.integration.settings import ( - BaseSettings, - ConfigurationFormItemType, - FormFieldInfo, -) -from core.model import json_serializer - -# revision identifiers, used by Alembic. -revision = "a9ed3f76d649" -down_revision = "5a425ebe026c" -branch_labels = None -depends_on = None - - -def _create_tables() -> None: - op.create_table( - "integration_configurations", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("protocol", sa.Unicode(), nullable=False), - sa.Column("goal", sa.Enum("PATRON_AUTH_GOAL", name="goals"), nullable=False), - sa.Column("name", sa.Unicode(), nullable=False), - sa.Column("settings", postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column( - "self_test_results", postgresql.JSONB(astext_type=sa.Text()), nullable=False - ), - sa.Column("status", sa.Enum("RED", "GREEN", name="status"), nullable=False), - sa.Column("last_status_update", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), - ) - op.create_index( - op.f("ix_integration_configurations_goal"), - "integration_configurations", - ["goal"], - unique=False, - ) - op.create_table( - "integration_errors", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("time", sa.DateTime(), nullable=True), - sa.Column("error", sa.Unicode(), nullable=True), - sa.Column("integration_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["integration_id"], - ["integration_configurations.id"], - name="fk_integration_error_integration_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "integration_library_configurations", - sa.Column("parent_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("settings", postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.ForeignKeyConstraint(["library_id"], ["libraries.id"], ondelete="CASCADE"), - sa.ForeignKeyConstraint( - ["parent_id"], ["integration_configurations.id"], ondelete="CASCADE" - ), - sa.PrimaryKeyConstraint("parent_id", "library_id"), - ) - - -T = TypeVar("T", bound=BaseSettings) - - -def _validate_and_load_settings( - settings_class: Type[T], settings_dict: Dict[str, str] -) -> T: - aliases = { - f.alias: f.name - for f in settings_class.__fields__.values() - if f.alias is not None - } - parsed_settings_dict = {} - for key, setting in settings_dict.items(): - if key in aliases: - key = aliases[key] - field = settings_class.__fields__.get(key) - if field is None or not isinstance(field.field_info, FormFieldInfo): - continue - config_item = field.field_info.form - if ( - config_item.type == ConfigurationFormItemType.LIST - or config_item.type == ConfigurationFormItemType.MENU - ): - parsed_settings_dict[key] = json.loads(setting) - else: - parsed_settings_dict[key] = setting - return settings_class(**parsed_settings_dict) - - -def _migrate_external_integration( - connection: Connection, - integration: Row, - protocol_class: Type[AuthenticationProvider], -) -> Tuple[int, Dict[str, Dict[str, str]]]: - settings = connection.execute( - "select cs.library_id, cs.key, cs.value from configurationsettings cs " - "where cs.external_integration_id = (%s)", - (integration.id,), - ) - settings_dict = {} - library_settings: Dict[str, Dict[str, str]] = defaultdict(dict) - self_test_results = json_serializer({}) - for setting in settings: - if not setting.value: - continue - if setting.key == "self_test_results": - self_test_results = setting.value - continue - if setting.library_id: - library_settings[setting.library_id][setting.key] = setting.value - else: - settings_dict[setting.key] = setting.value - - # Load and validate the settings before storing them in the database. - settings_class = protocol_class.settings_class() - settings_obj = _validate_and_load_settings(settings_class, settings_dict) - integration_configuration = connection.execute( - "insert into integration_configurations " - "(protocol, goal, name, settings, self_test_results, status) " - "values (%s, 'PATRON_AUTH_GOAL', %s, %s, %s, 'GREEN')" - "returning id", - ( - integration.protocol, - integration.name, - json_serializer(settings_obj.dict()), - self_test_results, - ), - ).fetchone() - assert integration_configuration is not None - return integration_configuration[0], library_settings - - -def _migrate_library_settings( - connection: Connection, - integration_id: int, - library_id: int, - library_settings: Dict[str, str], - protocol_class: Type[AuthenticationProvider], -) -> None: - library_settings_class = protocol_class.library_settings_class() - library_settings_obj = _validate_and_load_settings( - library_settings_class, library_settings - ) - connection.execute( - "insert into integration_library_configurations " - "(parent_id, library_id, settings) " - "values (%s, %s, %s)", - ( - integration_id, - library_id, - json_serializer(library_settings_obj.dict()), - ), - ) - - -def _migrate_settings() -> None: - connection = op.get_bind() - external_integrations = connection.execute( - "select ei.id, ei.protocol, ei.name from externalintegrations ei " - "where ei.goal = 'patron_auth'" - ) - - patron_auth_registry = PatronAuthRegistry() - for external_integration in external_integrations: - protocol_class = patron_auth_registry[external_integration.protocol] - integration_id, library_settings = _migrate_external_integration( - connection, external_integration, protocol_class - ) - external_integration_library = connection.execute( - "select library_id from externalintegrations_libraries where externalintegration_id = %s", - (external_integration.id,), - ) - for library in external_integration_library: - _migrate_library_settings( - connection, - integration_id, - library.library_id, - library_settings[library.library_id], - protocol_class, - ) - - -def upgrade() -> None: - # Add new tables for tracking integration configurations and errors. - _create_tables() - - # Migrate settings from the configurationsettings table into integration_configurations. - # We leave the existing settings in the table, but they will no longer be used. - _migrate_settings() - - -def downgrade() -> None: - op.drop_table("integration_library_configurations") - op.drop_table("integration_errors") - op.drop_index( - op.f("ix_integration_configurations_goal"), - table_name="integration_configurations", - ) - op.drop_table("integration_configurations") - sa.Enum(name="goals").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="status").drop(op.get_bind(), checkfirst=False) diff --git a/alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py b/alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py deleted file mode 100644 index e85745be3..000000000 --- a/alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py +++ /dev/null @@ -1,131 +0,0 @@ -"""Migrate millenium APIs to POST - -Revision ID: 5a425ebe026c -Revises: f9985f6b7767 -Create Date: 2023-05-12 08:36:16.603825+00:00 - -""" -import logging -import re - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "5a425ebe026c" -down_revision = "f9985f6b7767" -branch_labels = None -depends_on = None - - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - -KEY = "use_post_requests" - - -def match_expression(url: str) -> bool: - expressions = [ - r"^https?://vlc\.(.*?\.)?palaceproject\.io", - r"^https?://vlc\.thepalaceproject\.org", - r"^(http://)?localhost", - ] - for expr in expressions: - match = re.match(expr, url) - if match is not None: - return True - - return False - - -def upgrade() -> None: - """Set 'use_post_requests' to 'true' for 'api.millenium' integrations. - But only those that are for the following urls - - vlc.thepalaceproject.org - - vlc.*.palaceproject.io - - localhost - """ - conn = op.get_bind() - # Find the relevant external integrations - result_ids = conn.execute( - "SELECT id FROM externalintegrations where protocol='api.millenium_patron'" - ) - - # Query to pull specific config values - config_query = "SELECT value from configurationsettings where external_integration_id={integration_id} and key='{key}'" - - # For each millenium integration found - for [integration_id] in result_ids or []: - # Pull the URL setting - config_results = conn.execute( - config_query.format(integration_id=integration_id, key="url") - ) - url_results = list(config_results) - if config_results and len(url_results) > 0: - url = url_results[0][0] - else: - log.info(f"No URL found for integration: {integration_id}") - continue - - # Check if it is something we want to change at all - if not match_expression(url): - log.info(f"Not an internal millenium implementation: {url}") - continue - - # Pull the post requests setting - config_results = conn.execute( - config_query.format(integration_id=integration_id, key=f"{KEY}") - ) - post_results = list(config_results) - # This setting may or may not exist - if config_results and len(post_results) > 0: - use_post = post_results[0][0] - if use_post is None: - use_post = "false" - else: - use_post = None - - # Make the changes - if use_post is None: - log.info(f"'{KEY}' setting does not exist for {url}, creating...") - conn.execute( - "INSERT INTO configurationsettings(external_integration_id, library_id, key, value)" - + f" VALUES ({integration_id}, NULL, '{KEY}', 'true')" - ) - elif use_post == "false": - log.info(f"'{KEY}' is disabled for {url}, enabling...") - conn.execute( - "UPDATE configurationsettings SET value='true'" - + f"WHERE external_integration_id={integration_id} and key='{KEY}'" - ) - else: - log.info(f"'{KEY}' for {url} is already {use_post}, ignoring...") - - -def downgrade() -> None: - """Set all internal millenium integrations to not use POST""" - conn = op.get_bind() - result_ids = conn.execute( - "SELECT id FROM externalintegrations where protocol='api.millenium_patron'" - ) - for [integration_id] in result_ids: - log.info(f"Forcing '{KEY}' to 'false' for {integration_id}") - conn.execute( - "UPDATE configurationsettings SET value='false'" - + f" WHERE external_integration_id={integration_id} AND key='{KEY}'" - ) - - -if __name__ == "__main__": - # Some testing code - assert match_expression("http://vlc.dev.palaceproject.io/api") == True - assert match_expression("https://vlc.staging.palaceproject.io/PATRONAPI") == True - assert match_expression("localhost:6500/PATRONAPI") == True - assert match_expression("http://localhost:6500/api") == True - assert match_expression("https://vlc.thepalaceproject.org/anything...") == True - assert match_expression("https://vendor.millenium.com/PATRONAPI") == False - - import sys - - log.addHandler(logging.StreamHandler(sys.stdout)) - log.info("Match expression tests passed!!") diff --git a/alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py b/alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py deleted file mode 100644 index 6e7d95bd6..000000000 --- a/alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py +++ /dev/null @@ -1,109 +0,0 @@ -"""revert pr 980 - -Revision ID: 0a1c9c3f5dd2 -Revises: a9ed3f76d649 -Create Date: 2023-05-25 19:07:04.474551+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "0a1c9c3f5dd2" -down_revision = "a9ed3f76d649" -branch_labels = None -depends_on = None - - -ext_int_status_enum = sa.Enum("green", "red", name="external_integration_status") -int_status_enum = sa.Enum("GREEN", "RED", name="status") - - -def upgrade() -> None: - # Drop external integration errors tables - op.drop_table("externalintegrationerrors") - op.drop_column("externalintegrations", "last_status_update") - op.drop_column("externalintegrations", "status") - ext_int_status_enum.drop(op.get_bind()) - - # Drop integration errors tables - op.drop_table("integration_errors") - op.drop_column("integration_configurations", "status") - op.drop_column("integration_configurations", "last_status_update") - int_status_enum.drop(op.get_bind()) - - -def downgrade() -> None: - ext_int_status_enum.create(op.get_bind()) - op.add_column( - "externalintegrations", - sa.Column( - "status", - postgresql.ENUM("green", "red", name="external_integration_status"), - server_default=sa.text("'green'::external_integration_status"), - autoincrement=False, - nullable=True, - ), - ) - op.add_column( - "externalintegrations", - sa.Column( - "last_status_update", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - ) - op.create_table( - "externalintegrationerrors", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("error", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column( - "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - sa.ForeignKeyConstraint( - ["external_integration_id"], - ["externalintegrations.id"], - name="fk_error_externalintegrations_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id", name="externalintegrationerrors_pkey"), - ) - - int_status_enum.create(op.get_bind()) - op.add_column( - "integration_configurations", - sa.Column( - "last_status_update", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - ) - op.add_column( - "integration_configurations", - sa.Column( - "status", - postgresql.ENUM("RED", "GREEN", name="status"), - autoincrement=False, - nullable=False, - server_default=sa.text("'GREEN'::status"), - ), - ) - op.create_table( - "integration_errors", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("error", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("integration_id", sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint( - ["integration_id"], - ["integration_configurations.id"], - name="fk_integration_error_integration_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id", name="integration_errors_pkey"), - ) diff --git a/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py b/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py deleted file mode 100644 index ea99802e9..000000000 --- a/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Migrate license integrations to configuration settings - -Revision ID: 0af587ff8595 -Revises: b883671b7bc5 -Create Date: 2023-05-31 12:34:42.550703+00:00 - -""" - -from typing import Type - -from alembic import op -from api.integration.registry.license_providers import LicenseProvidersRegistry -from core.integration.base import HasLibraryIntegrationConfiguration -from core.integration.settings import BaseSettings -from core.migration.migrate_external_integration import ( - _migrate_external_integration, - _migrate_library_settings, - get_configuration_settings, - get_integrations, - get_library_for_integration, -) - -# revision identifiers, used by Alembic. -revision = "0af587ff8595" -down_revision = "b883671b7bc5" -branch_labels = None -depends_on = None - - -LICENSE_GOAL = "LICENSE_GOAL" - - -def upgrade() -> None: - registry = LicenseProvidersRegistry() - - connection = op.get_bind() - - # Fetch all license type integrations - # The old enum had 'licenses', the new enum has 'LICENSE_GOAL' - integrations = get_integrations(connection, "licenses") - for integration in integrations: - _id, protocol, name = integration - - # Get the right API class for it - api_class = registry.get(protocol, None) - if not api_class: - raise RuntimeError(f"Could not find API class for '{protocol}'") - - # Create the settings and library settings dicts from the configurationsettings - settings_dict, library_settings, self_test_result = get_configuration_settings( - connection, integration - ) - - # License type integrations take their external_account_id data from the collection. - # The configurationsetting for it seems to be unused, so we take the value from the collection - collection = connection.execute( - "select id, external_account_id, name from collections where external_integration_id = %s", - integration.id, - ).fetchone() - if not collection: - raise RuntimeError( - f"Could not fetch collection for integration {integration}" - ) - settings_class: Type[BaseSettings] = api_class.settings_class() - if "external_account_id" in settings_class.__fields__: - settings_dict["external_account_id"] = collection.external_account_id - - # Write the configurationsettings into the integration_configurations table - integration_id = _migrate_external_integration( - connection, - integration, - api_class, - LICENSE_GOAL, - settings_dict, - self_test_result, - name=collection.name, - ) - - # Connect the collection to the settings - connection.execute( - "UPDATE collections SET integration_configuration_id=%s where id=%s", - (integration_id, collection.id), - ) - - # If we have library settings too, then write each one into it's own row - if issubclass(api_class, HasLibraryIntegrationConfiguration): - integration_libraries = get_library_for_integration(connection, _id) - for library in integration_libraries: - _migrate_library_settings( - connection, - integration_id, - library.library_id, - library_settings[library.library_id], - api_class, - ) - - -def downgrade() -> None: - connection = op.get_bind() - connection.execute( - "DELETE from integration_configurations where goal = %s", LICENSE_GOAL - ) diff --git a/alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py b/alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py deleted file mode 100644 index cc334b48f..000000000 --- a/alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Add the license type goal - -Revision ID: b883671b7bc5 -Revises: 0a1c9c3f5dd2 -Create Date: 2023-05-31 10:50:32.045821+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.exc import ProgrammingError - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "b883671b7bc5" -down_revision = "0a1c9c3f5dd2" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # We need to use an autocommit blcok since the next migration is going to use - # the new enum value immediately, so we must ensure the value is commited - # before the next migration runs - # Additionally, since we are autocommiting this change we MUST ensure we - # assume the schemas may already exist while upgrading to this change. - # This happens incase the data migration in 0af587 fails and an automatic rollback occurs. - # In which case, due to the autocommit, these schema changes will not get rolled back - with op.get_context().autocommit_block(): - op.execute(f"ALTER TYPE goals ADD VALUE IF NOT EXISTS 'LICENSE_GOAL'") - - try: - op.add_column( - "collections", - sa.Column("integration_configuration_id", sa.Integer(), nullable=True), - ) - except ProgrammingError as ex: - if "DuplicateColumn" not in str(ex): - raise - - try: - op.create_index( - op.f("ix_collections_integration_configuration_id"), - "collections", - ["integration_configuration_id"], - unique=True, - ) - except ProgrammingError as ex: - if "DuplicateTable" not in str(ex): - raise - - try: - op.create_foreign_key( - None, - "collections", - "integration_configurations", - ["integration_configuration_id"], - ["id"], - ondelete="SET NULL", - ) - except ProgrammingError as ex: - if "DuplicateColumn" not in str(ex): - raise - - -def downgrade() -> None: - """There is no way to drop single values from an Enum from postgres""" - op.drop_constraint( - "collections_integration_configuration_id_fkey", - "collections", - type_="foreignkey", - ) - op.drop_index( - op.f("ix_collections_integration_configuration_id"), table_name="collections" - ) - op.drop_column("collections", "integration_configuration_id") diff --git a/alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py b/alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py deleted file mode 100644 index 1273c14ae..000000000 --- a/alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py +++ /dev/null @@ -1,58 +0,0 @@ -"""opds for distributors unlimited access - -Revision ID: 28717fc6e50f -Revises: 0af587ff8595 -Create Date: 2023-06-06 10:08:35.892018+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "28717fc6e50f" -down_revision = "0af587ff8595" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - connection = op.get_bind() - connection.execute( - """ - UPDATE - licensepools AS lp - SET - licenses_owned = -1, - licenses_available = -1 - FROM - collections c, - externalintegrations e - WHERE - lp.licenses_owned = 1 - and lp.licenses_available = 1 - and lp.collection_id = c.id - and c.external_integration_id = e.id - and e.protocol = 'OPDS for Distributors' - """ - ) - - -def downgrade() -> None: - connection = op.get_bind() - connection.execute( - """ - UPDATE - licensepools AS lp - SET - licenses_owned = 1, - licenses_available = 1 - FROM - collections c, - externalintegrations e - WHERE - lp.licenses_owned = -1 - and lp.licenses_available = -1 - and lp.collection_id = c.id - and c.external_integration_id = e.id - and e.protocol = 'OPDS for Distributors' - """ - ) diff --git a/alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py b/alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py deleted file mode 100644 index ff44f8732..000000000 --- a/alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Remove adobe vendor id tables - -Revision ID: f08f9c6bded6 -Revises: 28717fc6e50f -Create Date: 2023-06-28 19:07:27.735625+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "f08f9c6bded6" -down_revision = "28717fc6e50f" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index( - "ix_drmdeviceidentifiers_credential_id", table_name="drmdeviceidentifiers" - ) - op.drop_index( - "ix_drmdeviceidentifiers_device_identifier", table_name="drmdeviceidentifiers" - ) - op.drop_table("drmdeviceidentifiers") - op.drop_index( - "ix_delegatedpatronidentifiers_library_uri", - table_name="delegatedpatronidentifiers", - ) - op.drop_index( - "ix_delegatedpatronidentifiers_patron_identifier", - table_name="delegatedpatronidentifiers", - ) - op.drop_index( - "ix_delegatedpatronidentifiers_type", table_name="delegatedpatronidentifiers" - ) - op.drop_table("delegatedpatronidentifiers") - - -def downgrade() -> None: - op.create_table( - "delegatedpatronidentifiers", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("type", sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column( - "library_uri", sa.VARCHAR(length=255), autoincrement=False, nullable=True - ), - sa.Column( - "patron_identifier", - sa.VARCHAR(length=255), - autoincrement=False, - nullable=True, - ), - sa.Column( - "delegated_identifier", sa.VARCHAR(), autoincrement=False, nullable=True - ), - sa.PrimaryKeyConstraint("id", name="delegatedpatronidentifiers_pkey"), - sa.UniqueConstraint( - "type", - "library_uri", - "patron_identifier", - name="delegatedpatronidentifiers_type_library_uri_patron_identifi_key", - ), - ) - op.create_index( - "ix_delegatedpatronidentifiers_type", - "delegatedpatronidentifiers", - ["type"], - unique=False, - ) - op.create_index( - "ix_delegatedpatronidentifiers_patron_identifier", - "delegatedpatronidentifiers", - ["patron_identifier"], - unique=False, - ) - op.create_index( - "ix_delegatedpatronidentifiers_library_uri", - "delegatedpatronidentifiers", - ["library_uri"], - unique=False, - ) - op.create_table( - "drmdeviceidentifiers", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("credential_id", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "device_identifier", - sa.VARCHAR(length=255), - autoincrement=False, - nullable=True, - ), - sa.ForeignKeyConstraint( - ["credential_id"], - ["credentials.id"], - name="drmdeviceidentifiers_credential_id_fkey", - ), - sa.PrimaryKeyConstraint("id", name="drmdeviceidentifiers_pkey"), - ) - op.create_index( - "ix_drmdeviceidentifiers_device_identifier", - "drmdeviceidentifiers", - ["device_identifier"], - unique=False, - ) - op.create_index( - "ix_drmdeviceidentifiers_credential_id", - "drmdeviceidentifiers", - ["credential_id"], - unique=False, - ) diff --git a/alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py b/alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py deleted file mode 100644 index 458328b64..000000000 --- a/alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Migrate library key pair - -Revision ID: 04bbd03bf9f1 -Revises: f08f9c6bded6 -Create Date: 2023-07-06 14:40:17.970603+00:00 - -""" -import json -import logging - -import sqlalchemy as sa -from Crypto.PublicKey import RSA - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "04bbd03bf9f1" -down_revision = "f08f9c6bded6" -branch_labels = None -depends_on = None - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - - -def upgrade() -> None: - # Add the new columns as nullable, add the values, then make them non-nullable - op.add_column( - "libraries", - sa.Column("public_key", sa.Unicode(), nullable=True), - ) - op.add_column( - "libraries", - sa.Column("private_key", sa.LargeBinary(), nullable=True), - ) - - # Now we update the value stored for the key pair - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - for library in libraries: - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.library_id = (%s) and cs.key = 'key-pair' and cs.external_integration_id IS NULL", - (library.id,), - ).fetchone() - if setting and setting.value: - _, private_key_str = json.loads(setting.value) - private_key = RSA.import_key(private_key_str) - else: - log.info(f"Library {library.short_name} has no key pair, generating one...") - private_key = RSA.generate(2048) - - private_key_bytes = private_key.export_key("DER") - public_key_str = private_key.publickey().export_key("PEM").decode("utf-8") - - connection.execute( - "update libraries set public_key = (%s), private_key = (%s) where id = (%s)", - (public_key_str, private_key_bytes, library.id), - ) - - # Then we make the columns non-nullable - op.alter_column("libraries", "public_key", nullable=False) - op.alter_column("libraries", "private_key", nullable=False) - - -def downgrade() -> None: - op.drop_column("libraries", "private_key") - op.drop_column("libraries", "public_key") diff --git a/alembic/versions/20230706_c471f553249b_migrate_library_logo.py b/alembic/versions/20230706_c471f553249b_migrate_library_logo.py deleted file mode 100644 index b56d5a4f9..000000000 --- a/alembic/versions/20230706_c471f553249b_migrate_library_logo.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Migrate library logo - -Revision ID: c471f553249b -Revises: 04bbd03bf9f1 -Create Date: 2023-07-06 19:37:59.269231+00:00 - -""" -import logging - -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "c471f553249b" -down_revision = "04bbd03bf9f1" -branch_labels = None -depends_on = None - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - - -def upgrade() -> None: - op.create_table( - "libraries_logos", - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("content", sa.LargeBinary(), nullable=False), - sa.ForeignKeyConstraint( - ["library_id"], - ["libraries.id"], - ), - sa.PrimaryKeyConstraint("library_id"), - ) - - prefix = "data:image/png;base64," - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - - for library in libraries: - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.library_id = (%s) and cs.key = 'logo'", - (library.id,), - ).first() - if setting and setting.value: - log.info(f"Library {library.short_name} has a logo, migrating it.") - logo_str = setting.value - - # We stored the logo with a data:image prefix before, but we - # don't need that anymore, so we remove it here. - if logo_str.startswith(prefix): - logo_str = logo_str[len(prefix) :] - - logo_bytes = logo_str.encode("utf-8") - connection.execute( - "insert into libraries_logos (library_id, content) values (%s, %s)", - (library.id, logo_bytes), - ) - else: - log.info(f"Library {library.short_name} has no logo, skipping.") - - -def downgrade() -> None: - op.drop_table("libraries_logos") diff --git a/alembic/versions/20230711_3d380776c1bf_migrate_announcements.py b/alembic/versions/20230711_3d380776c1bf_migrate_announcements.py deleted file mode 100644 index ac04381a3..000000000 --- a/alembic/versions/20230711_3d380776c1bf_migrate_announcements.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Migrate announcements - -Revision ID: 3d380776c1bf -Revises: c471f553249b -Create Date: 2023-07-11 17:22:56.596888+00:00 - -""" -import json -from typing import Optional - -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.engine import Connection, Row - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "3d380776c1bf" -down_revision = "c471f553249b" -branch_labels = None -depends_on = None - - -def create_announcement( - connection: Connection, setting: Optional[Row], library_id: Optional[int] = None -) -> None: - if setting and setting.value: - announcements = json.loads(setting.value) - for announcement in announcements: - connection.execute( - "insert into announcements (id, content, start, finish, library_id) values (%s, %s, %s, %s, %s)", - ( - announcement["id"], - announcement["content"], - announcement["start"], - announcement["finish"], - library_id, - ), - ) - - -def upgrade() -> None: - # Create table for announcements - op.create_table( - "announcements", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("content", sa.Unicode(), nullable=False), - sa.Column("start", sa.Date(), nullable=False), - sa.Column("finish", sa.Date(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["library_id"], - ["libraries.id"], - ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_index( - op.f("ix_announcements_library_id"), - "announcements", - ["library_id"], - unique=False, - ) - - # Migrate announcements from configuration settings - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - - # Migrate library announcements - for library in libraries: - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.library_id = (%s) and cs.key = 'announcements' and cs.external_integration_id IS NULL", - (library.id,), - ).fetchone() - create_announcement(connection, setting, library.id) - - # Migrate global announcements - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.key = 'global_announcements' and cs.library_id IS NULL and cs.external_integration_id IS NULL", - ).fetchone() - create_announcement(connection, setting) - - -def downgrade() -> None: - op.drop_index(op.f("ix_announcements_library_id"), table_name="announcements") - op.drop_table("announcements") diff --git a/alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py b/alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py deleted file mode 100644 index 09638fbeb..000000000 --- a/alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Migrate library settings - -Revision ID: b3749bac3e55 -Revises: 3d380776c1bf -Create Date: 2023-07-19 16:13:14.831349+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -from alembic import op -from core.configuration.library import LibrarySettings -from core.migration.migrate_external_integration import _validate_and_load_settings -from core.model import json_serializer - -# revision identifiers, used by Alembic. -revision = "b3749bac3e55" -down_revision = "3d380776c1bf" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.add_column( - "libraries", - sa.Column( - "settings_dict", postgresql.JSONB(astext_type=sa.Text()), nullable=True - ), - ) - - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - for library in libraries: - configuration_settings = connection.execute( - "select key, value from configurationsettings " - "where library_id = (%s) and external_integration_id IS NULL", - (library.id,), - ) - settings_dict = {} - for key, value in configuration_settings: - if key in ["announcements", "logo", "key-pair"]: - continue - if not value: - continue - settings_dict[key] = value - - settings = _validate_and_load_settings(LibrarySettings, settings_dict) - connection.execute( - "update libraries set settings_dict = (%s) where id = (%s)", - (json_serializer(settings.dict()), library.id), - ) - - op.alter_column("libraries", "settings_dict", nullable=False) - - -def downgrade() -> None: - op.drop_column("libraries", "settings_dict") diff --git a/alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py b/alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py deleted file mode 100644 index f13772ba3..000000000 --- a/alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Remove integration client - -Revision ID: 2f1a51aa0ee8 -Revises: 892c8e0c89f8 -Create Date: 2023-07-26 13:34:02.924885+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "2f1a51aa0ee8" -down_revision = "892c8e0c89f8" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index("ix_datasources_integration_client_id", table_name="datasources") - op.drop_constraint( - "datasources_integration_client_id_fkey", "datasources", type_="foreignkey" - ) - op.drop_column("datasources", "integration_client_id") - op.drop_index("ix_holds_integration_client_id", table_name="holds") - op.drop_constraint("holds_integration_client_id_fkey", "holds", type_="foreignkey") - op.drop_column("holds", "integration_client_id") - op.drop_index("ix_loans_integration_client_id", table_name="loans") - op.drop_constraint("loans_integration_client_id_fkey", "loans", type_="foreignkey") - op.drop_column("loans", "integration_client_id") - op.drop_index( - "ix_integrationclients_shared_secret", table_name="integrationclients" - ) - op.drop_table("integrationclients") - - -def downgrade() -> None: - op.create_table( - "integrationclients", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("url", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("shared_secret", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("enabled", sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column( - "created", - postgresql.TIMESTAMP(timezone=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "last_accessed", - postgresql.TIMESTAMP(timezone=True), - autoincrement=False, - nullable=True, - ), - sa.PrimaryKeyConstraint("id", name="integrationclients_pkey"), - sa.UniqueConstraint("url", name="integrationclients_url_key"), - ) - op.create_index( - "ix_integrationclients_shared_secret", - "integrationclients", - ["shared_secret"], - unique=False, - ) - op.add_column( - "loans", - sa.Column( - "integration_client_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - ) - op.create_foreign_key( - "loans_integration_client_id_fkey", - "loans", - "integrationclients", - ["integration_client_id"], - ["id"], - ) - op.create_index( - "ix_loans_integration_client_id", - "loans", - ["integration_client_id"], - unique=False, - ) - op.add_column( - "holds", - sa.Column( - "integration_client_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - ) - op.create_foreign_key( - "holds_integration_client_id_fkey", - "holds", - "integrationclients", - ["integration_client_id"], - ["id"], - ) - op.create_index( - "ix_holds_integration_client_id", - "holds", - ["integration_client_id"], - unique=False, - ) - op.add_column( - "datasources", - sa.Column( - "integration_client_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - ) - op.create_foreign_key( - "datasources_integration_client_id_fkey", - "datasources", - "integrationclients", - ["integration_client_id"], - ["id"], - ) - op.create_index( - "ix_datasources_integration_client_id", - "datasources", - ["integration_client_id"], - unique=False, - ) diff --git a/alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py b/alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py deleted file mode 100644 index 0c7312b0f..000000000 --- a/alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Audiobook playtime tracking - -Revision ID: 892c8e0c89f8 -Revises: b3749bac3e55 -Create Date: 2023-07-28 07:20:24.625484+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "892c8e0c89f8" -down_revision = "b3749bac3e55" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "playtime_entries", - sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column("identifier_id", sa.Integer(), nullable=False), - sa.Column("collection_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False), - sa.Column("total_seconds_played", sa.Integer(), nullable=False), - sa.Column("tracking_id", sa.String(length=64), nullable=False), - sa.Column("processed", sa.Boolean(), nullable=True), - sa.ForeignKeyConstraint( - ["collection_id"], - ["collections.id"], - onupdate="CASCADE", - ondelete="CASCADE", - ), - sa.ForeignKeyConstraint( - ["identifier_id"], - ["identifiers.id"], - onupdate="CASCADE", - ondelete="CASCADE", - ), - sa.ForeignKeyConstraint( - ["library_id"], ["libraries.id"], onupdate="CASCADE", ondelete="CASCADE" - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint( - "identifier_id", "collection_id", "library_id", "tracking_id" - ), - ) - op.create_table( - "playtime_summaries", - sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column("identifier_id", sa.Integer(), nullable=True), - sa.Column("collection_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("identifier_str", sa.String(), nullable=False), - sa.Column("collection_name", sa.String(), nullable=False), - sa.Column("library_name", sa.String(), nullable=False), - sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False), - sa.Column("total_seconds_played", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["collection_id"], - ["collections.id"], - onupdate="CASCADE", - ondelete="SET NULL", - ), - sa.ForeignKeyConstraint( - ["identifier_id"], - ["identifiers.id"], - onupdate="CASCADE", - ondelete="SET NULL", - ), - sa.ForeignKeyConstraint( - ["library_id"], ["libraries.id"], onupdate="CASCADE", ondelete="SET NULL" - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint( - "identifier_str", "collection_name", "library_name", "timestamp" - ), - ) - op.create_index( - op.f("ix_playtime_summaries_collection_id"), - "playtime_summaries", - ["collection_id"], - unique=False, - ) - op.create_index( - op.f("ix_playtime_summaries_identifier_id"), - "playtime_summaries", - ["identifier_id"], - unique=False, - ) - op.create_index( - op.f("ix_playtime_summaries_library_id"), - "playtime_summaries", - ["library_id"], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index( - op.f("ix_playtime_summaries_library_id"), table_name="playtime_summaries" - ) - op.drop_index( - op.f("ix_playtime_summaries_identifier_id"), table_name="playtime_summaries" - ) - op.drop_index( - op.f("ix_playtime_summaries_collection_id"), table_name="playtime_summaries" - ) - op.drop_table("playtime_summaries") - op.drop_table("playtime_entries") - # ### end Alembic commands ### diff --git a/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py b/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py deleted file mode 100644 index 70b14f1dc..000000000 --- a/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Add discovery service tables - -Revision ID: 0df58829fc1a -Revises: 2f1a51aa0ee8 -Create Date: 2023-08-10 15:49:36.784169+00:00 - -""" -import sqlalchemy as sa - -from alembic import op -from api.discovery.opds_registration import OpdsRegistrationService -from core.migration.migrate_external_integration import ( - _migrate_external_integration, - get_configuration_settings, - get_integrations, - get_library_for_integration, -) -from core.migration.util import drop_enum, pg_update_enum - -# revision identifiers, used by Alembic. -revision = "0df58829fc1a" -down_revision = "2f1a51aa0ee8" -branch_labels = None -depends_on = None - -old_goals_enum = [ - "PATRON_AUTH_GOAL", - "LICENSE_GOAL", -] - -new_goals_enum = old_goals_enum + ["DISCOVERY_GOAL"] - - -def upgrade() -> None: - op.create_table( - "discovery_service_registrations", - sa.Column( - "status", - sa.Enum("SUCCESS", "FAILURE", name="registrationstatus"), - nullable=False, - ), - sa.Column( - "stage", - sa.Enum("TESTING", "PRODUCTION", name="registrationstage"), - nullable=False, - ), - sa.Column("web_client", sa.Unicode(), nullable=True), - sa.Column("short_name", sa.Unicode(), nullable=True), - sa.Column("shared_secret", sa.Unicode(), nullable=True), - sa.Column("integration_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("vendor_id", sa.Unicode(), nullable=True), - sa.ForeignKeyConstraint( - ["integration_id"], ["integration_configurations.id"], ondelete="CASCADE" - ), - sa.ForeignKeyConstraint(["library_id"], ["libraries.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("integration_id", "library_id"), - ) - pg_update_enum( - op, - "integration_configurations", - "goal", - "goals", - old_goals_enum, - new_goals_enum, - ) - - # Migrate data - connection = op.get_bind() - external_integrations = get_integrations(connection, "discovery") - for external_integration in external_integrations: - # This should always be the case, but we want to make sure - assert external_integration.protocol == "OPDS Registration" - - # Create the settings and library settings dicts from the configurationsettings - settings_dict, library_settings, self_test_result = get_configuration_settings( - connection, external_integration - ) - - # Write the configurationsettings into the integration_configurations table - integration_configuration_id = _migrate_external_integration( - connection, - external_integration, - OpdsRegistrationService, - "DISCOVERY_GOAL", - settings_dict, - self_test_result, - ) - - # Get the libraries that are associated with this external integration - interation_libraries = get_library_for_integration( - connection, external_integration.id - ) - - vendor_id = settings_dict.get("vendor_id") - - # Write the library settings into the discovery_service_registrations table - for library in interation_libraries: - library_id = library.library_id - library_settings_dict = library_settings[library_id] - - status = library_settings_dict.get("library-registration-status") - if status is None: - status = "FAILURE" - else: - status = status.upper() - - stage = library_settings_dict.get("library-registration-stage") - if stage is None: - stage = "TESTING" - else: - stage = stage.upper() - - web_client = library_settings_dict.get("library-registration-web-client") - short_name = library_settings_dict.get("username") - shared_secret = library_settings_dict.get("password") - - connection.execute( - "insert into discovery_service_registrations " - "(status, stage, web_client, short_name, shared_secret, integration_id, library_id, vendor_id) " - "values (%s, %s, %s, %s, %s, %s, %s, %s)", - ( - status, - stage, - web_client, - short_name, - shared_secret, - integration_configuration_id, - library_id, - vendor_id, - ), - ) - - -def downgrade() -> None: - connection = op.get_bind() - connection.execute( - "DELETE from integration_configurations where goal = %s", "DISCOVERY_GOAL" - ) - - op.drop_table("discovery_service_registrations") - drop_enum(op, "registrationstatus") - drop_enum(op, "registrationstage") - pg_update_enum( - op, - "integration_configurations", - "goal", - "goals", - new_goals_enum, - old_goals_enum, - ) diff --git a/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py b/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py deleted file mode 100644 index 2b2d0406a..000000000 --- a/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Remove self_hosted from licensepools - -Revision ID: 1c566151741f -Revises: 2b672c6fb2b9 -Create Date: 2023-08-31 16:13:54.935093+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "1c566151741f" -down_revision = "2b672c6fb2b9" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index("ix_licensepools_self_hosted", table_name="licensepools") - op.drop_column("licensepools", "self_hosted") - - -def downgrade() -> None: - op.add_column( - "licensepools", - sa.Column("self_hosted", sa.BOOLEAN(), autoincrement=False, nullable=False), - ) - op.create_index( - "ix_licensepools_self_hosted", "licensepools", ["self_hosted"], unique=False - ) diff --git a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py deleted file mode 100644 index 0f1171408..000000000 --- a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Type coerce collection settings - -Revision ID: 2b672c6fb2b9 -Revises: 0df58829fc1a -Create Date: 2023-09-05 06:40:35.739869+00:00 - -""" -import json -import logging -from copy import deepcopy -from typing import Any, Dict, Optional, Tuple - -from pydantic import PositiveInt, ValidationError, parse_obj_as - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "2b672c6fb2b9" -down_revision = "0df58829fc1a" -branch_labels = None -depends_on = None - - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - - -# All the settings types that have non-str types -ALL_SETTING_TYPES: Dict[str, Any] = { - "verify_certificate": Optional[bool], - "default_reservation_period": Optional[PositiveInt], - "loan_limit": Optional[PositiveInt], - "hold_limit": Optional[PositiveInt], - "max_retry_count": Optional[PositiveInt], - "ebook_loan_duration": Optional[PositiveInt], - "default_loan_duration": Optional[PositiveInt], -} - - -def _coerce_types(original_settings: Dict[str, Any]) -> Tuple[bool, Dict[str, Any]]: - """Coerce the types, in-place""" - modified = False - modified_settings = deepcopy(original_settings) - for setting_name, setting_type in ALL_SETTING_TYPES.items(): - if setting_name in original_settings: - # If the setting is an empty string, we set it to None - if original_settings[setting_name] == "": - setting = None - else: - setting = original_settings[setting_name] - - try: - modified = True - modified_settings[setting_name] = parse_obj_as(setting_type, setting) - except ValidationError as e: - log.error( - f"Error while parsing setting {setting_name}. Settings: {original_settings}." - ) - raise e - - return modified, modified_settings - - -def upgrade() -> None: - connection = op.get_bind() - # Fetch all integration settings with the 'licenses' goal - results = connection.execute( - "SELECT id, settings from integration_configurations where goal='LICENSE_GOAL';" - ).fetchall() - - # For each integration setting, we check id any of the non-str - # keys are present in the DB - # We then type-coerce that value - for settings_id, settings in results: - modified, updated_settings = _coerce_types(settings) - if modified: - log.info( - f"Updating settings for integration_configuration (id:{settings_id}). " - f"Original settings: {settings}. New settings: {updated_settings}." - ) - # If any of the values were modified, we update the DB - connection.execute( - "UPDATE integration_configurations SET settings=%s where id=%s", - json.dumps(updated_settings), - settings_id, - ) - - # Do the same for any Library settings - results = connection.execute( - "SELECT ilc.parent_id, ilc.library_id, ilc.settings from integration_library_configurations ilc " - "join integration_configurations ic on ilc.parent_id = ic.id where ic.goal='LICENSE_GOAL';" - ).fetchall() - - for parent_id, library_id, settings in results: - modified, updated_settings = _coerce_types(settings) - if modified: - log.info( - f"Updating settings for integration_library_configuration (parent_id:{parent_id}/library_id:{library_id}). " - f"Original settings: {settings}. New settings: {updated_settings}." - ) - connection.execute( - "UPDATE integration_library_configurations SET settings=%s where parent_id=%s and library_id=%s", - json.dumps(updated_settings), - parent_id, - library_id, - ) - - -def downgrade() -> None: - """There is no need to revert the types back to strings""" diff --git a/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py b/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py deleted file mode 100644 index 1bff2f4e0..000000000 --- a/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Remove ExternalIntegrationLink. - -Revision ID: 5d71a80073d5 -Revises: 1c566151741f -Create Date: 2023-09-13 15:23:07.566404+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "5d71a80073d5" -down_revision = "1c566151741f" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index( - "ix_externalintegrationslinks_external_integration_id", - table_name="externalintegrationslinks", - ) - op.drop_index( - "ix_externalintegrationslinks_library_id", - table_name="externalintegrationslinks", - ) - op.drop_index( - "ix_externalintegrationslinks_other_integration_id", - table_name="externalintegrationslinks", - ) - op.drop_index( - "ix_externalintegrationslinks_purpose", table_name="externalintegrationslinks" - ) - op.drop_table("externalintegrationslinks") - - -def downgrade() -> None: - op.create_table( - "externalintegrationslinks", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column( - "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "other_integration_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - sa.Column("purpose", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint( - ["external_integration_id"], - ["externalintegrations.id"], - name="externalintegrationslinks_external_integration_id_fkey", - ), - sa.ForeignKeyConstraint( - ["library_id"], - ["libraries.id"], - name="externalintegrationslinks_library_id_fkey", - ), - sa.ForeignKeyConstraint( - ["other_integration_id"], - ["externalintegrations.id"], - name="externalintegrationslinks_other_integration_id_fkey", - ), - sa.PrimaryKeyConstraint("id", name="externalintegrationslinks_pkey"), - ) - op.create_index( - "ix_externalintegrationslinks_purpose", - "externalintegrationslinks", - ["purpose"], - unique=False, - ) - op.create_index( - "ix_externalintegrationslinks_other_integration_id", - "externalintegrationslinks", - ["other_integration_id"], - unique=False, - ) - op.create_index( - "ix_externalintegrationslinks_library_id", - "externalintegrationslinks", - ["library_id"], - unique=False, - ) - op.create_index( - "ix_externalintegrationslinks_external_integration_id", - "externalintegrationslinks", - ["external_integration_id"], - unique=False, - ) diff --git a/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py b/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py deleted file mode 100644 index 4c2abc3cb..000000000 --- a/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Loan and hold notification times - -Revision ID: 21a65b8f391d -Revises: 5d71a80073d5 -Create Date: 2023-10-16 09:46:58.743018+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "21a65b8f391d" -down_revision = "5d71a80073d5" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic ### - op.add_column( - "holds", sa.Column("patron_last_notified", sa.DateTime(), nullable=True) - ) - op.add_column( - "loans", sa.Column("patron_last_notified", sa.DateTime(), nullable=True) - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic ### - op.drop_column("loans", "patron_last_notified") - op.drop_column("holds", "patron_last_notified") - # ### end Alembic commands ### diff --git a/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py b/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py index 13f071a20..cebc811c4 100644 --- a/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py +++ b/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py @@ -4,6 +4,11 @@ Revises: 21a65b8f391d Create Date: 2023-10-19 05:23:00.694886+00:00 +Note that this migration was changed for the v13.0.0 release, older migrations +were deleted from the repository history, and this was made the first migration +by changing the down_revision to None. + +See: https://alembic.sqlalchemy.org/en/latest/cookbook.html#building-an-up-to-date-database-from-scratch """ import sqlalchemy as sa @@ -11,7 +16,7 @@ # revision identifiers, used by Alembic. revision = "0739d5558dda" -down_revision = "21a65b8f391d" +down_revision = None branch_labels = None depends_on = None diff --git a/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py new file mode 100644 index 000000000..bf95fae23 --- /dev/null +++ b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py @@ -0,0 +1,309 @@ +"""Remove collection external integration. + +Revision ID: 2d72d6876c52 +Revises: cc084e35e037 +Create Date: 2023-11-01 22:42:06.754873+00:00 + +""" +from collections import deque +from dataclasses import dataclass + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op +from api.integration.registry.license_providers import LicenseProvidersRegistry +from core.integration.base import HasChildIntegrationConfiguration +from core.migration.util import migration_logger +from core.model import json_serializer + +# revision identifiers, used by Alembic. +revision = "2d72d6876c52" +down_revision = "d7ef6948af4e" +branch_labels = None +depends_on = None + + +log = migration_logger(revision) + + +@dataclass +class RenameQueryRow: + collection_id: int + integration_id: int + integration_name: str + collection_name: str + deferral_count: int = 0 + + +def upgrade() -> None: + conn = op.get_bind() + + # Our collection names have gotten out of sync with the integration names. The collection names + # are what are being displayed to users, so before we stop using the collection name, we need + # to update the integration name to match the collection name. + # For now, we leave the collection name column in place, but we make it nullable and remove the + # unique constraint. + rows = conn.execute( + "SELECT c.id as collection_id, ic.id as integration_id, ic.name as integration_name, " + "c.name as collection_name from collections c JOIN integration_configurations ic " + "ON c.integration_configuration_id = ic.id WHERE c.name != ic.name" + ).all() + + integration_names = {row.integration_name for row in rows} + collection_renames = deque(RenameQueryRow(**r) for r in rows) + while collection_renames: + rename = collection_renames.popleft() + if rename.collection_name in integration_names: + # The collection name is already in use by an integration, so we need to rename the + # integration first. + log.info( + f"Collection name {rename.collection_name} is already in use. Deferring rename." + ) + rename.deferral_count += 1 + if rename.deferral_count > 3: + raise RuntimeError( + f"Unable to rename collection {rename.collection_id}. Max deferral count reached." + ) + collection_renames.append(rename) + continue + log.info( + f"Updating name for collection {rename.collection_id} from {rename.integration_name} to {rename.collection_name}." + ) + conn.execute( + "UPDATE integration_configurations SET name = (%s) WHERE id = (%s)", + (rename.collection_name, rename.integration_id), + ) + integration_names.remove(rename.integration_name) + + op.alter_column("collections", "name", existing_type=sa.VARCHAR(), nullable=True) + op.drop_index("ix_collections_name", table_name="collections") + + # We have moved the setting for the TOKEN_AUTH integration from an external integration + # to a new JSONB column on the integration_configurations table (context). We need to move + # the data into the new column as part of this migration. + # The context column is not nullable, so we need to set a default value for the existing + # rows. We will use an empty JSON object. We create the column as nullable, set the default + # value, then make it non-nullable. + op.add_column( + "integration_configurations", + sa.Column("context", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + ) + + conn.execute("UPDATE integration_configurations SET context = '{}'") + + rows = conn.execute( + "SELECT c.id, cs.value FROM collections c " + "JOIN externalintegrations ei ON c.external_integration_id = ei.id " + "JOIN configurationsettings cs ON ei.id = cs.external_integration_id " + "WHERE key='token_auth_endpoint' and value <> ''" + ).all() + + for row in rows: + context = json_serializer({"token_auth_endpoint": row.value}) + log.info(f"Updating context for collection {row.id} to {context}.") + conn.execute( + "UPDATE integration_configurations SET context = (%s) " + "FROM collections " + "WHERE integration_configurations.id = collections.integration_configuration_id " + "and collections.id = (%s)", + (context, row.id), + ) + + op.alter_column("integration_configurations", "context", nullable=False) + + # We have moved the data that was in external_account_id into the settings column of the + # integration, so we need to make sure that it gets moved as part of this migration. We + # also make sure that the new settings are valid for the integration before saving them + # to the database. + rows = conn.execute( + "SELECT ic.id as integration_id, ic.settings, ic.protocol, ic.goal, c.external_account_id, c.parent_id " + "FROM collections c JOIN integration_configurations ic ON c.integration_configuration_id = ic.id" + ).all() + + registry = LicenseProvidersRegistry() + for row in rows: + if row.external_account_id is None: + continue + settings_dict = row.settings.copy() + settings_dict["external_account_id"] = row.external_account_id + impl_class = registry.get(row.protocol) + if impl_class is None: + raise RuntimeError( + f"Could not find implementation for protocol {row.protocol}" + ) + if row.parent_id is not None: + if issubclass(impl_class, HasChildIntegrationConfiguration): + settings_obj = impl_class.child_settings_class()(**settings_dict) + else: + raise RuntimeError( + f"Integration {row.integration_id} is a child integration, " + f"but {row.protocol} does not support child integrations." + ) + else: + settings_obj = impl_class.settings_class()(**settings_dict) + new_settings_dict = settings_obj.dict() + if row.settings != new_settings_dict: + new_settings = json_serializer(new_settings_dict) + log.info( + f"Updating settings for integration {row.integration_id} from {row.settings} to {new_settings}." + ) + conn.execute( + "UPDATE integration_configurations SET settings = (%s) WHERE id = (%s)", + (new_settings, row.integration_id), + ) + + # Because collections now rely on integration_configurations, they can no longer + # have a null value for integration_configuration_id. This should already be true + # of our existing collections. We also drop our foreign key constraint, and recreate + # it with the correct ondelete behavior. + op.alter_column( + "collections", + "integration_configuration_id", + existing_type=sa.INTEGER(), + nullable=False, + ) + op.drop_constraint( + "collections_integration_configuration_id_fkey", + "collections", + type_="foreignkey", + ) + op.create_foreign_key( + "collections_integration_configuration_id_fkey", + "collections", + "integration_configurations", + ["integration_configuration_id"], + ["id"], + ) + + # The data that was in the collections_libraries table is now tracked by + # integration_library_configurations, we keep the data in the collections_libraries + # table for now, but we remove the foreign key constraints and indexes. + op.alter_column( + "collections_libraries", + "collection_id", + existing_type=sa.INTEGER(), + nullable=True, + ) + op.alter_column( + "collections_libraries", "library_id", existing_type=sa.INTEGER(), nullable=True + ) + op.drop_index( + "ix_collections_libraries_collection_id", table_name="collections_libraries" + ) + op.drop_index( + "ix_collections_libraries_library_id", table_name="collections_libraries" + ) + op.drop_constraint( + "collections_libraries_collection_id_fkey", + "collections_libraries", + type_="foreignkey", + ) + op.drop_constraint( + "collections_libraries_library_id_fkey", + "collections_libraries", + type_="foreignkey", + ) + + # Collections have now been migrated entirely to use integration_configurations. We keep this column + # for now, but we remove the foreign key constraint and index. + op.drop_index("ix_collections_external_integration_id", table_name="collections") + op.drop_constraint( + "collections_external_integration_id_fkey", "collections", type_="foreignkey" + ) + + # We create a new index on the settings column of integration_configurations. This + # will allow us to quickly find integrations that have a specific setting. + op.create_index( + "ix_integration_configurations_settings_dict", + "integration_configurations", + ["settings"], + unique=False, + postgresql_using="gin", + ) + + +def downgrade() -> None: + op.drop_index( + "ix_integration_configurations_settings_dict", + table_name="integration_configurations", + postgresql_using="gin", + ) + + op.create_foreign_key( + "collections_external_integration_id_fkey", + "collections", + "externalintegrations", + ["external_integration_id"], + ["id"], + ) + op.create_index( + "ix_collections_external_integration_id", + "collections", + ["external_integration_id"], + unique=True, + ) + + op.create_foreign_key( + "collections_libraries_collection_id_fkey", + "collections_libraries", + "collections", + ["collection_id"], + ["id"], + ) + op.create_foreign_key( + "collections_libraries_library_id_fkey", + "collections_libraries", + "libraries", + ["library_id"], + ["id"], + ) + op.create_index( + "ix_collections_libraries_library_id", + "collections_libraries", + ["library_id"], + unique=False, + ) + op.create_index( + "ix_collections_libraries_collection_id", + "collections_libraries", + ["collection_id"], + unique=False, + ) + op.alter_column( + "collections_libraries", + "library_id", + existing_type=sa.INTEGER(), + nullable=False, + ) + op.alter_column( + "collections_libraries", + "collection_id", + existing_type=sa.INTEGER(), + nullable=False, + ) + + op.drop_constraint( + "collections_integration_configuration_id_fkey", + "collections", + type_="foreignkey", + ) + op.create_foreign_key( + "collections_integration_configuration_id_fkey", + "collections", + "integration_configurations", + ["integration_configuration_id"], + ["id"], + ondelete="SET NULL", + ) + op.alter_column( + "collections", + "integration_configuration_id", + existing_type=sa.INTEGER(), + nullable=True, + ) + + op.drop_column("integration_configurations", "context") + + op.create_index("ix_collections_name", "collections", ["name"], unique=True) + op.alter_column("collections", "name", existing_type=sa.VARCHAR(), nullable=False) diff --git a/alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py b/alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py new file mode 100644 index 000000000..385d61348 --- /dev/null +++ b/alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py @@ -0,0 +1,52 @@ +"""Remove collection unused columns. + +Revision ID: 382d7921f500 +Revises: e4b120a8d1d5 +Create Date: 2023-11-03 00:09:10.761425+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "382d7921f500" +down_revision = "e4b120a8d1d5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.drop_table("collections_libraries") + op.drop_column("collections", "external_integration_id") + op.drop_column("collections", "name") + op.drop_column("collections", "external_account_id") + + +def downgrade() -> None: + op.add_column( + "collections", + sa.Column( + "external_account_id", sa.VARCHAR(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "collections", + sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "collections", + sa.Column( + "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True + ), + ) + op.create_table( + "collections_libraries", + sa.Column("collection_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.UniqueConstraint( + "collection_id", + "library_id", + name="collections_libraries_collection_id_library_id_key", + ), + ) diff --git a/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py b/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py new file mode 100644 index 000000000..e79038af3 --- /dev/null +++ b/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py @@ -0,0 +1,101 @@ +"""Migrate catalog services. + +Revision ID: 1e46a5bc33b5 +Revises: 382d7921f500 +Create Date: 2023-11-21 17:48:04.535064+00:00 + +""" + +from alembic import op +from core.marc import MARCExporter +from core.migration.migrate_external_integration import ( + _migrate_external_integration, + _migrate_library_settings, + get_configuration_settings, + get_integrations, + get_library_for_integration, +) +from core.migration.util import pg_update_enum + +# revision identifiers, used by Alembic. +revision = "1e46a5bc33b5" +down_revision = "382d7921f500" +branch_labels = None +depends_on = None + +CATALOG_GOAL = "CATALOG_GOAL" +old_goals_enum = ["PATRON_AUTH_GOAL", "LICENSE_GOAL", "DISCOVERY_GOAL"] +new_goals_enum = old_goals_enum + [CATALOG_GOAL] + + +def upgrade() -> None: + # Add the new enum value to our goals enum + pg_update_enum( + op, + "integration_configurations", + "goal", + "goals", + old_goals_enum, + new_goals_enum, + ) + + # Migrate the existing catalog services to integration configurations + connection = op.get_bind() + integrations = get_integrations(connection, "ils_catalog") + for integration in integrations: + _id, protocol, name = integration + + if protocol != "MARC Export": + raise RuntimeError(f"Unknown catalog service '{protocol}'") + + ( + settings_dict, + libraries_settings, + self_test_result, + ) = get_configuration_settings(connection, integration) + + # We moved the setting for update_frequency from the library settings to the integration settings. + update_frequency: str | None = None + for library_id, library_settings in libraries_settings.items(): + if "marc_update_frequency" in library_settings: + frequency = library_settings["marc_update_frequency"] + del library_settings["marc_update_frequency"] + if update_frequency is not None and update_frequency != frequency: + raise RuntimeError( + f"Found different update frequencies for different libraries ({update_frequency}/{frequency})." + ) + update_frequency = frequency + + if update_frequency is not None: + settings_dict["marc_update_frequency"] = update_frequency + + integration_configuration_id = _migrate_external_integration( + connection, + integration.name, + MARCExporter.__name__, + MARCExporter, + CATALOG_GOAL, + settings_dict, + self_test_result, + ) + + integration_libraries = get_library_for_integration(connection, _id) + for library in integration_libraries: + _migrate_library_settings( + connection, + integration_configuration_id, + library.library_id, + libraries_settings[library.library_id], + MARCExporter, + ) + + +def downgrade() -> None: + pg_update_enum( + op, + "integration_configurations", + "goal", + "goals", + new_goals_enum, + old_goals_enum, + ) diff --git a/alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py b/alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py new file mode 100644 index 000000000..85aef3ecd --- /dev/null +++ b/alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py @@ -0,0 +1,47 @@ +"""Remove short_name from library settings. + +Revision ID: e4b120a8d1d5 +Revises: 2d72d6876c52 +Create Date: 2023-11-22 16:28:55.759169+00:00 + +""" +from alembic import op +from core.migration.util import migration_logger +from core.model import json_serializer + +# revision identifiers, used by Alembic. +revision = "e4b120a8d1d5" +down_revision = "2d72d6876c52" +branch_labels = None +depends_on = None + + +log = migration_logger(revision) + + +def upgrade() -> None: + conn = op.get_bind() + + # Find all the library configurations that have a short_name key in their settings. + rows = conn.execute( + "select parent_id, library_id, settings from integration_library_configurations where settings ? 'short_name'" + ).all() + + for row in rows: + settings = row.settings.copy() + short_name = settings.get("short_name") + del settings["short_name"] + log.info( + f"Removing short_name {short_name} from library configuration " + f"(parent:{row.parent_id}/library:{row.library_id}) {settings}" + ) + settings_json = json_serializer(settings) + conn.execute( + "update integration_library_configurations set settings = (%s) where parent_id = (%s) and library_id = (%s)", + (settings_json, row.parent_id, row.library_id), + ) + + +def downgrade() -> None: + # No need to do anything here. The key was never used. + pass diff --git a/alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py b/alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py new file mode 100644 index 000000000..e853aceb2 --- /dev/null +++ b/alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py @@ -0,0 +1,57 @@ +"""Biblioboard licensepools time tracking flag + +Revision ID: 1c14468b74ce +Revises: 6af9160a578e +Create Date: 2023-11-24 08:11:35.541207+00:00 + +""" +from alembic import op +from core.migration.util import migration_logger + +# revision identifiers, used by Alembic. +revision = "1c14468b74ce" +down_revision = "6af9160a578e" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + connection = op.get_bind() + log = migration_logger(revision) + + collections = connection.execute( + "select collections.id from integration_configurations \ + JOIN collections on collections.integration_configuration_id = integration_configurations.id \ + where integration_configurations.protocol = 'OPDS for Distributors'" + ).all() + + log.warning(f"Will update licensepools for collections: {collections}") + + collection_ids = [cid.id for cid in collections] + + if len(collection_ids) == 0: + log.info("No collections found to update!") + return + + pool_ids = connection.execute( + "select licensepools.id from licensepools \ + JOIN collections on collections.id = licensepools.collection_id \ + JOIN editions on editions.primary_identifier_id = licensepools.identifier_id \ + WHERE editions.medium = 'Audio' and licensepools.collection_id in %(collection_ids)s", + collection_ids=tuple(collection_ids), + ).all() + + pool_ids_list = [p.id for p in pool_ids] + # update licensepools + if len(pool_ids_list) == 0: + log.info("No licensepools to update!") + return + + connection.execute( + "UPDATE licensepools SET should_track_playtime=true WHERE id in %(ids)s returning id", + ids=tuple(pool_ids_list), + ).all() + + +def downgrade() -> None: + pass diff --git a/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py b/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py new file mode 100644 index 000000000..50b2ba3d9 --- /dev/null +++ b/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py @@ -0,0 +1,34 @@ +"""Licensepools time tracking flag + +Revision ID: 6af9160a578e +Revises: 1e46a5bc33b5 +Create Date: 2023-11-24 08:08:12.636590+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "6af9160a578e" +down_revision = "1e46a5bc33b5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "licensepools", + sa.Column( + "should_track_playtime", + sa.Boolean(), + nullable=False, + server_default=sa.sql.false(), + default=False, + ), + ) + op.alter_column("licensepools", "should_track_playtime", server_default=None) + + +def downgrade() -> None: + op.drop_column("licensepools", "should_track_playtime") diff --git a/alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py b/alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py new file mode 100644 index 000000000..9f1e1a0eb --- /dev/null +++ b/alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py @@ -0,0 +1,62 @@ +"""MARC Export by collection. + +Revision ID: 0039f3f12014 +Revises: 1c14468b74ce +Create Date: 2023-11-28 20:19:55.520740+00:00 + +""" +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "0039f3f12014" +down_revision = "1c14468b74ce" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "marcfiles", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("library_id", sa.Integer(), nullable=True), + sa.Column("collection_id", sa.Integer(), nullable=True), + sa.Column("key", sa.Unicode(), nullable=False), + sa.Column("created", sa.DateTime(timezone=True), nullable=False), + sa.Column("since", sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint( + ["collection_id"], + ["collections.id"], + ondelete="SET NULL", + ), + sa.ForeignKeyConstraint( + ["library_id"], + ["libraries.id"], + ondelete="SET NULL", + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_marcfiles_collection_id"), "marcfiles", ["collection_id"], unique=False + ) + op.create_index( + op.f("ix_marcfiles_created"), "marcfiles", ["created"], unique=False + ) + op.create_index( + op.f("ix_marcfiles_library_id"), "marcfiles", ["library_id"], unique=False + ) + op.add_column( + "collections", sa.Column("export_marc_records", sa.Boolean(), nullable=True) + ) + op.execute("UPDATE collections SET export_marc_records = 'f'") + op.alter_column("collections", "export_marc_records", nullable=False) + + +def downgrade() -> None: + op.drop_column("collections", "export_marc_records") + op.drop_index(op.f("ix_marcfiles_library_id"), table_name="marcfiles") + op.drop_index(op.f("ix_marcfiles_created"), table_name="marcfiles") + op.drop_index(op.f("ix_marcfiles_collection_id"), table_name="marcfiles") + op.drop_table("marcfiles") diff --git a/alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py b/alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py new file mode 100644 index 000000000..1d3dd425b --- /dev/null +++ b/alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py @@ -0,0 +1,97 @@ +"""MARC Export cleanup migration. + +Revision ID: d3cdbea3d43b +Revises: e06f965879ab +Create Date: 2023-12-04 17:23:26.396526+00:00 + +""" + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d3cdbea3d43b" +down_revision = "e06f965879ab" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # remove the coverage records for the cachedmarcfiles + op.execute("DELETE FROM coveragerecords WHERE operation = 'generate-marc'") + + # Remove the foreign key constraint on the cachedmarcfiles table + op.drop_constraint( + "cachedmarcfiles_representation_id_fkey", + "cachedmarcfiles", + type_="foreignkey", + ) + + # Remove the representations for the cachedmarcfiles + op.execute( + "DELETE FROM representations WHERE id IN (SELECT representation_id FROM cachedmarcfiles)" + ) + + # Remove the cachedmarcfiles + op.drop_index("ix_cachedmarcfiles_end_time", table_name="cachedmarcfiles") + op.drop_index("ix_cachedmarcfiles_lane_id", table_name="cachedmarcfiles") + op.drop_index("ix_cachedmarcfiles_library_id", table_name="cachedmarcfiles") + op.drop_index("ix_cachedmarcfiles_start_time", table_name="cachedmarcfiles") + op.drop_table("cachedmarcfiles") + + # Remove the unused marc_record column from the works table + op.drop_column("works", "marc_record") + + +def downgrade() -> None: + op.add_column( + "works", + sa.Column("marc_record", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.create_table( + "cachedmarcfiles", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("lane_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "representation_id", sa.INTEGER(), autoincrement=False, nullable=False + ), + sa.Column( + "start_time", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column( + "end_time", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.ForeignKeyConstraint( + ["lane_id"], ["lanes.id"], name="cachedmarcfiles_lane_id_fkey" + ), + sa.ForeignKeyConstraint( + ["library_id"], ["libraries.id"], name="cachedmarcfiles_library_id_fkey" + ), + sa.ForeignKeyConstraint( + ["representation_id"], + ["representations.id"], + name="cachedmarcfiles_representation_id_fkey", + ), + sa.PrimaryKeyConstraint("id", name="cachedmarcfiles_pkey"), + ) + op.create_index( + "ix_cachedmarcfiles_start_time", "cachedmarcfiles", ["start_time"], unique=False + ) + op.create_index( + "ix_cachedmarcfiles_library_id", "cachedmarcfiles", ["library_id"], unique=False + ) + op.create_index( + "ix_cachedmarcfiles_lane_id", "cachedmarcfiles", ["lane_id"], unique=False + ) + op.create_index( + "ix_cachedmarcfiles_end_time", "cachedmarcfiles", ["end_time"], unique=False + ) diff --git a/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py new file mode 100644 index 000000000..e93b11033 --- /dev/null +++ b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py @@ -0,0 +1,82 @@ +"""MARC S3 file cleanup. + +Revision ID: e06f965879ab +Revises: 0039f3f12014 +Create Date: 2023-12-06 16:04:36.936466+00:00 + +""" +from urllib.parse import unquote, urlparse + +from alembic import op +from core.migration.util import migration_logger +from core.service.container import container_instance + +# revision identifiers, used by Alembic. +revision = "e06f965879ab" +down_revision = "0039f3f12014" +branch_labels = None +depends_on = None + + +def parse_key_from_url(url: str | None, bucket: str) -> str | None: + """Parse the key from a URL. + + :param url: The URL to parse. + :return: The key, or None if the URL is not a valid S3 URL. + """ + if url is None: + return None + + parsed_url = urlparse(url) + + if f"/{bucket}/" in parsed_url.path: + key = parsed_url.path.split(f"/{bucket}/", 1)[1] + elif bucket in parsed_url.netloc: + key = parsed_url.path.lstrip("/") + else: + return None + + # The key stored in the DB is URL encoded, so we need to decode it + return unquote(key) + + +def upgrade() -> None: + # Before removing the cachedmarcfiles table, we want to clean up + # the cachedmarcfiles stored in s3. + # + # Note: if you are running this migration on a development system, and you want + # to skip deleting these files you can just comment out the migration code below. + services = container_instance() + public_s3 = services.storage.public() + log = migration_logger(revision) + + # Check if there are any cachedmarcfiles in s3 + connection = op.get_bind() + cached_files = connection.execute( + "SELECT r.url FROM cachedmarcfiles cmf JOIN representations r ON cmf.representation_id = r.id" + ).all() + if public_s3 is None and len(cached_files) > 0: + raise RuntimeError( + "There are cachedmarcfiles in the database, but no public s3 storage configured!" + ) + + keys_to_delete = [] + for cached_file in cached_files: + url = cached_file.url + bucket = public_s3.bucket + key = parse_key_from_url(url, bucket) + if key is None: + log.info(f"Skipping cachedmarcfile with invalid URL: {url}") + continue + generated_url = public_s3.generate_url(key) + if generated_url != url: + raise RuntimeError(f"URL mismatch: {url} != {generated_url}") + keys_to_delete.append(key) + + for key in keys_to_delete: + log.info(f"Deleting {key} from s3 bucket {public_s3.bucket}") + public_s3.delete(key) + + +def downgrade() -> None: + pass diff --git a/alembic/versions/20240104_735bf6ced8b9_update_license_provider_settings.py b/alembic/versions/20240104_735bf6ced8b9_update_license_provider_settings.py new file mode 100644 index 000000000..74595c6c1 --- /dev/null +++ b/alembic/versions/20240104_735bf6ced8b9_update_license_provider_settings.py @@ -0,0 +1,65 @@ +"""Update license provider settings. + +Revision ID: 735bf6ced8b9 +Revises: d3cdbea3d43b +Create Date: 2024-01-04 16:24:32.895789+00:00 + +""" +from alembic import op +from api.integration.registry.license_providers import LicenseProvidersRegistry +from core.integration.base import HasChildIntegrationConfiguration +from core.migration.util import migration_logger +from core.model import json_serializer + +# revision identifiers, used by Alembic. +revision = "735bf6ced8b9" +down_revision = "d3cdbea3d43b" +branch_labels = None +depends_on = None + + +log = migration_logger(revision) + + +def upgrade() -> None: + conn = op.get_bind() + + rows = conn.execute( + "SELECT ic.id as integration_id, ic.settings, ic.protocol, ic.goal, c.parent_id, ic.name " + "FROM collections c JOIN integration_configurations ic ON c.integration_configuration_id = ic.id" + ).all() + + registry = LicenseProvidersRegistry() + for row in rows: + settings_dict = row.settings.copy() + impl_class = registry.get(row.protocol) + if impl_class is None: + raise RuntimeError( + f"Could not find implementation for protocol {row.protocol} for " + f"integration {row.name}({row.integration_id})." + ) + if row.parent_id is not None: + if issubclass(impl_class, HasChildIntegrationConfiguration): + settings_obj = impl_class.child_settings_class()(**settings_dict) + else: + raise RuntimeError( + f"Integration {row.name}({row.integration_id}) is a child integration, " + f"but {row.protocol} does not support child integrations." + ) + else: + settings_obj = impl_class.settings_class()(**settings_dict) + new_settings_dict = settings_obj.dict(exclude_extra=True) + if row.settings != new_settings_dict: + new_settings = json_serializer(new_settings_dict) + log.info( + f"Updating settings for integration {row.name}({row.integration_id}) " + f"from {row.settings} to {new_settings}." + ) + conn.execute( + "UPDATE integration_configurations SET settings = (%s) WHERE id = (%s)", + (new_settings, row.integration_id), + ) + + +def downgrade() -> None: + pass diff --git a/api/admin/announcement_list_validator.py b/api/admin/announcement_list_validator.py index d7bcceaf6..019426fe2 100644 --- a/api/admin/announcement_list_validator.py +++ b/api/admin/announcement_list_validator.py @@ -1,7 +1,7 @@ import datetime import json import uuid -from typing import Dict, List, Optional, Union, cast +from typing import cast import dateutil from flask_babel import lazy_gettext as _ @@ -28,8 +28,8 @@ def __init__( self.default_duration_days = default_duration_days def validate_announcements( - self, announcements: Union[str, List[Dict[str, str]]] - ) -> Dict[uuid.UUID, AnnouncementData]: + self, announcements: str | list[dict[str, str]] + ) -> dict[uuid.UUID, AnnouncementData]: validated_announcements = {} bad_format = INVALID_INPUT.detailed( _( @@ -64,7 +64,7 @@ def validate_announcements( validated_announcements[id] = validated return validated_announcements - def validate_announcement(self, announcement: Dict[str, str]) -> AnnouncementData: + def validate_announcement(self, announcement: dict[str, str]) -> AnnouncementData: if not isinstance(announcement, dict): raise ProblemError( INVALID_INPUT.detailed( @@ -162,8 +162,8 @@ def validate_length(self, value: str, minimum: int, maximum: int) -> str: def validate_date( cls, field: str, - value: Union[str, datetime.date], - minimum: Optional[datetime.date] = None, + value: str | datetime.date, + minimum: datetime.date | None = None, ) -> datetime.date: """Validate a date value. diff --git a/api/admin/config.py b/api/admin/config.py index 4d4f60337..1076e141e 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -1,6 +1,5 @@ import os from enum import Enum -from typing import Optional from urllib.parse import urljoin from requests import RequestException @@ -52,7 +51,7 @@ class Configuration(LoggerMixin): ENV_ADMIN_UI_PACKAGE_VERSION = "TPP_CIRCULATION_ADMIN_PACKAGE_VERSION" # Cache the package version after first lookup. - _version: Optional[str] = None + _version: str | None = None @classmethod def operational_mode(cls) -> OperationalMode: @@ -91,7 +90,7 @@ def resolve_package_version(cls, package_name: str, package_version: str) -> str return package_version @classmethod - def env_package_version(cls) -> Optional[str]: + def env_package_version(cls) -> str | None: """Get the package version specified in configuration or environment. :return Package verison. @@ -120,7 +119,7 @@ def package_version(cls) -> str: @classmethod def lookup_asset_url( - cls, key: str, *, _operational_mode: Optional[OperationalMode] = None + cls, key: str, *, _operational_mode: OperationalMode | None = None ) -> str: """Get the URL for the asset_type. @@ -143,7 +142,7 @@ def lookup_asset_url( ) @classmethod - def package_url(cls, *, _operational_mode: Optional[OperationalMode] = None) -> str: + def package_url(cls, *, _operational_mode: OperationalMode | None = None) -> str: """Compute the URL for the admin UI package. :param _operational_mode: For testing. The operational mode is @@ -162,7 +161,7 @@ def package_url(cls, *, _operational_mode: Optional[OperationalMode] = None) -> return url @classmethod - def package_development_directory(cls, *, _base_dir: Optional[str] = None) -> str: + def package_development_directory(cls, *, _base_dir: str | None = None) -> str: """Absolute path for the admin UI package when in development mode. :param _base_dir: For testing purposes. Not used in normal operation. @@ -177,7 +176,7 @@ def package_development_directory(cls, *, _base_dir: Optional[str] = None) -> st ) @classmethod - def static_files_directory(cls, *, _base_dir: Optional[str] = None) -> str: + def static_files_directory(cls, *, _base_dir: str | None = None) -> str: """Absolute path for the admin UI static files. :param _base_dir: For testing purposes. Not used in normal operation. diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index 0579f5f54..ff613a1d5 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -5,7 +5,7 @@ from api.admin.controller.quicksight import QuickSightController if TYPE_CHECKING: - from api.controller import CirculationManager + from api.circulation_manager import CirculationManager def setup_admin_controllers(manager: CirculationManager): @@ -83,7 +83,7 @@ def setup_admin_controllers(manager: CirculationManager): ) manager.admin_collection_settings_controller = CollectionSettingsController(manager) manager.admin_collection_self_tests_controller = CollectionSelfTestsController( - manager + manager._db ) manager.admin_sitewide_configuration_settings_controller = ( SitewideConfigurationSettingsController(manager) diff --git a/api/admin/controller/admin_search.py b/api/admin/controller/admin_search.py index e2aa1fddb..dd4d2c7ce 100644 --- a/api/admin/controller/admin_search.py +++ b/api/admin/controller/admin_search.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - import flask from sqlalchemy import func, or_ @@ -38,13 +36,13 @@ def search_field_values(self) -> dict: return self._search_field_values_cached(collection_ids) @classmethod - def _unzip(cls, values: List[Tuple[str, int]]) -> dict: + def _unzip(cls, values: list[tuple[str, int]]) -> dict: """Covert a list of tuples to a {value0: value1} dictionary""" return {a[0]: a[1] for a in values if type(a[0]) is str} # 1 hour in-memory cache @memoize(ttls=3600) - def _search_field_values_cached(self, collection_ids: List[int]) -> dict: + def _search_field_values_cached(self, collection_ids: list[int]) -> dict: licenses_filter = or_( LicensePool.open_access == True, LicensePool.licenses_owned != 0, diff --git a/api/admin/controller/announcement_service.py b/api/admin/controller/announcement_service.py index 68c320d29..8ec18ac0a 100644 --- a/api/admin/controller/announcement_service.py +++ b/api/admin/controller/announcement_service.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Callable, Dict +from collections.abc import Callable +from typing import Any import flask @@ -19,13 +20,13 @@ def _action(self) -> Callable: method = flask.request.method.lower() return getattr(self, method) - def process_many(self) -> Dict[str, Any] | ProblemDetail: + def process_many(self) -> dict[str, Any] | ProblemDetail: try: return self._action()() except ProblemError as e: return e.problem_detail - def get(self) -> Dict[str, Any]: + def get(self) -> dict[str, Any]: """Respond with settings and all global announcements""" db_announcements = ( self._db.execute(Announcement.global_announcements()).scalars().all() @@ -37,7 +38,7 @@ def get(self) -> Dict[str, Any]: announcements=announcements, ) - def post(self) -> Dict[str, Any]: + def post(self) -> dict[str, Any]: """POST multiple announcements to the global namespace""" validator = AnnouncementListValidator() if flask.request.form is None or "announcements" not in flask.request.form: diff --git a/api/admin/controller/catalog_services.py b/api/admin/controller/catalog_services.py index 294a8358c..35f19b5ff 100644 --- a/api/admin/controller/catalog_services.py +++ b/api/admin/controller/catalog_services.py @@ -1,29 +1,34 @@ import flask from flask import Response -from flask_babel import lazy_gettext as _ - -from api.admin.controller.settings import SettingsController -from api.admin.problem_details import ( - CANNOT_CHANGE_PROTOCOL, - INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_SERVICE, - MULTIPLE_SERVICES_FOR_LIBRARY, - UNKNOWN_PROTOCOL, + +from api.admin.controller.base import AdminPermissionsControllerMixin +from api.admin.controller.integration_settings import ( + IntegrationSettingsController, + UpdatedLibrarySettingsTuple, ) +from api.admin.form_data import ProcessFormData +from api.admin.problem_details import MULTIPLE_SERVICES_FOR_LIBRARY +from api.integration.registry.catalog_services import CatalogServicesRegistry +from core.integration.goals import Goals +from core.integration.settings import BaseSettings from core.marc import MARCExporter -from core.model import ExternalIntegration, get_one -from core.util.problem_detail import ProblemDetail +from core.model import ( + IntegrationConfiguration, + IntegrationLibraryConfiguration, + json_serializer, + site_configuration_has_changed, +) +from core.util.problem_detail import ProblemDetail, ProblemError -class CatalogServicesController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - service_apis = [MARCExporter] - self.protocols = self._get_integration_protocols( - service_apis, protocol_name_attr="NAME" - ) +class CatalogServicesController( + IntegrationSettingsController[MARCExporter], + AdminPermissionsControllerMixin, +): + def default_registry(self) -> CatalogServicesRegistry: + return CatalogServicesRegistry() - def process_catalog_services(self): + def process_catalog_services(self) -> Response | ProblemDetail: self.require_system_admin() if flask.request.method == "GET": @@ -31,104 +36,76 @@ def process_catalog_services(self): else: return self.process_post() - def process_get(self): - services = self._get_integration_info( - ExternalIntegration.CATALOG_GOAL, self.protocols - ) - return dict( - catalog_services=services, - protocols=self.protocols, + def process_get(self) -> Response: + return Response( + json_serializer( + { + "catalog_services": self.configured_services, + "protocols": list(self.protocols.values()), + } + ), + status=200, + mimetype="application/json", ) - def process_post(self): - protocol = flask.request.form.get("protocol") - is_new = False - error = self.validate_form_fields(protocol) - if error: - return error - - id = flask.request.form.get("id") - if id: - # Find an existing service to edit - service = get_one( - self._db, - ExternalIntegration, - id=id, - goal=ExternalIntegration.CATALOG_GOAL, + def library_integration_validation( + self, integration: IntegrationLibraryConfiguration + ) -> None: + """Check that the library didn't end up with multiple MARC integrations.""" + + library = integration.library + integrations = ( + self._db.query(IntegrationConfiguration) + .join(IntegrationLibraryConfiguration) + .filter( + IntegrationLibraryConfiguration.library_id == library.id, + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, ) - if not service: - return MISSING_SERVICE - if protocol != service.protocol: - return CANNOT_CHANGE_PROTOCOL - else: - # Create a new service - service, is_new = self._create_integration( - self.protocols, - protocol, - ExternalIntegration.CATALOG_GOAL, + .count() + ) + if integrations > 1: + raise ProblemError( + MULTIPLE_SERVICES_FOR_LIBRARY.detailed( + f"You tried to add a MARC export service to {library.short_name}, but it already has one." + ) ) - if isinstance(service, ProblemDetail): - return service - name = self.get_name(service) - if isinstance(name, ProblemDetail): + def process_updated_libraries( + self, + libraries: list[UpdatedLibrarySettingsTuple], + settings_class: type[BaseSettings], + ) -> None: + super().process_updated_libraries(libraries, settings_class) + for integration, _ in libraries: + self.library_integration_validation(integration) + + def process_post(self) -> Response | ProblemDetail: + try: + form_data = flask.request.form + libraries_data = self.get_libraries_data(form_data) + catalog_service, protocol, response_code = self.get_service(form_data) + + # Update settings + impl_cls = self.registry[protocol] + settings_class = impl_cls.settings_class() + validated_settings = ProcessFormData.get_settings(settings_class, form_data) + catalog_service.settings_dict = validated_settings.dict() + + # Update library settings + if libraries_data: + self.process_libraries( + catalog_service, libraries_data, impl_cls.library_settings_class() + ) + + # Trigger a site configuration change + site_configuration_has_changed(self._db) + + except ProblemError as e: self._db.rollback() - return name - elif name: - service.name = name + return e.problem_detail - [protocol] = [p for p in self.protocols if p.get("name") == protocol] + return Response(str(catalog_service.id), response_code) - result = self._set_integration_settings_and_libraries(service, protocol) - if isinstance(result, ProblemDetail): - return result - - library_error = self.check_libraries(service) - if library_error: - self._db.rollback() - return library_error - - if is_new: - return Response(str(service.id), 201) - else: - return Response(str(service.id), 200) - - def validate_form_fields(self, protocol): - """Verify that the protocol which the user has selected is in the list - of recognized protocol options.""" - - if protocol and protocol not in [p.get("name") for p in self.protocols]: - return UNKNOWN_PROTOCOL - - def get_name(self, service): - """Check that there isn't already a service with this name""" - - name = flask.request.form.get("name") - if name: - if service.name != name: - service_with_name = get_one(self._db, ExternalIntegration, name=name) - if service_with_name: - return INTEGRATION_NAME_ALREADY_IN_USE - return name - - def check_libraries(self, service): - """Check that no library ended up with multiple MARC export integrations.""" - - for library in service.libraries: - marc_export_count = 0 - for integration in library.integrations: - if ( - integration.goal == ExternalIntegration.CATALOG_GOAL - and integration.protocol == ExternalIntegration.MARC_EXPORT - ): - marc_export_count += 1 - if marc_export_count > 1: - return MULTIPLE_SERVICES_FOR_LIBRARY.detailed( - _( - "You tried to add a MARC export service to %(library)s, but it already has one.", - library=library.short_name, - ) - ) - - def process_delete(self, service_id): - return self._delete_integration(service_id, ExternalIntegration.CATALOG_GOAL) + def process_delete(self, service_id: int) -> Response: + self.require_system_admin() + return self.delete_service(service_id) diff --git a/api/admin/controller/collection_self_tests.py b/api/admin/controller/collection_self_tests.py index 38ceb7c40..8cc53dcb1 100644 --- a/api/admin/controller/collection_self_tests.py +++ b/api/admin/controller/collection_self_tests.py @@ -1,63 +1,41 @@ -from flask_babel import lazy_gettext as _ +from __future__ import annotations -from api.admin.controller.self_tests import SelfTestsController -from api.admin.problem_details import * +from typing import Any + +from flask import Response +from sqlalchemy.orm import Session + +from api.admin.controller.self_tests import IntegrationSelfTestsController +from api.circulation import CirculationApiType from api.integration.registry.license_providers import LicenseProvidersRegistry -from api.selftest import HasCollectionSelfTests -from core.model import Collection -from core.opds_import import OPDSImporter, OPDSImportMonitor - - -class CollectionSelfTestsController(SelfTestsController): - def __init__(self, manager): - super().__init__(manager) - self.type = _("collection") - self.registry = LicenseProvidersRegistry() - self.protocols = self._get_collection_protocols(self.registry.integrations) - - def process_collection_self_tests(self, identifier): - return self._manage_self_tests(identifier) - - def look_up_by_id(self, identifier): - """Find the collection to display self test results or run self tests for; - display an error message if a collection with this ID turns out not to exist""" - - collection = Collection.by_id(self._db, identifier) - if not collection: - return NO_SUCH_COLLECTION - - self.protocol_class = self._find_protocol_class(collection) - return collection - - def get_info(self, collection): - """Compile information about this collection, including the results from the last time, if ever, - that the self tests were run.""" - - return dict( - id=collection.id, - name=collection.name, - protocol=collection.protocol, - parent_id=collection.parent_id, - settings=dict(external_account_id=collection.external_account_id), - ) - - def _find_protocol_class(self, collection): - """Figure out which protocol is providing books to this collection""" - return self.registry.get(collection.protocol) - - def run_tests(self, collection): - collection_protocol = collection.protocol or None - - if self.protocol_class: - value = None - if collection_protocol == OPDSImportMonitor.PROTOCOL: - self.protocol_class = OPDSImportMonitor - value, results = self.protocol_class.run_self_tests( - self._db, self.protocol_class, self._db, collection, OPDSImporter - ) - elif issubclass(self.protocol_class, HasCollectionSelfTests): - value, results = self.protocol_class.run_self_tests( - self._db, self.protocol_class, self._db, collection - ) - - return value +from core.integration.registry import IntegrationRegistry +from core.model import IntegrationConfiguration +from core.selftest import HasSelfTestsIntegrationConfiguration +from core.util.problem_detail import ProblemDetail + + +class CollectionSelfTestsController(IntegrationSelfTestsController[CirculationApiType]): + def __init__( + self, + db: Session, + registry: IntegrationRegistry[CirculationApiType] | None = None, + ): + registry = registry or LicenseProvidersRegistry() + super().__init__(db, registry) + + def process_collection_self_tests( + self, identifier: int | None + ) -> Response | ProblemDetail: + return self.process_self_tests(identifier) + + def run_self_tests( + self, integration: IntegrationConfiguration + ) -> dict[str, Any] | None: + protocol_class = self.get_protocol_class(integration) + if issubclass(protocol_class, HasSelfTestsIntegrationConfiguration): + test_result, _ = protocol_class.run_self_tests( + self.db, protocol_class, self.db, integration.collection + ) + return test_result + + return None diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index 6474d2ef4..ee28ac21d 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -1,367 +1,166 @@ -import json -from typing import Any, Dict, List, Optional +from typing import Any import flask from flask import Response -from flask_babel import lazy_gettext as _ -from api.admin.controller.settings import SettingsController +from api.admin.controller.base import AdminPermissionsControllerMixin +from api.admin.controller.integration_settings import IntegrationSettingsController +from api.admin.form_data import ProcessFormData from api.admin.problem_details import ( - CANNOT_CHANGE_PROTOCOL, CANNOT_DELETE_COLLECTION_WITH_CHILDREN, - COLLECTION_NAME_ALREADY_IN_USE, - INCOMPLETE_CONFIGURATION, MISSING_COLLECTION, - MISSING_COLLECTION_NAME, MISSING_PARENT, - NO_PROTOCOL_FOR_NEW_SERVICE, - NO_SUCH_LIBRARY, + MISSING_SERVICE, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, - PROTOCOL_DOES_NOT_SUPPORT_SETTINGS, - UNKNOWN_PROTOCOL, ) +from api.circulation import CirculationApiType from api.integration.registry.license_providers import LicenseProvidersRegistry +from core.integration.base import HasChildIntegrationConfiguration +from core.integration.registry import IntegrationRegistry from core.model import ( Collection, - ConfigurationSetting, - Library, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + create, get_one, - get_one_or_create, + json_serializer, + site_configuration_has_changed, ) -from core.model.admin import Admin -from core.model.integration import IntegrationConfiguration from core.util.problem_detail import ProblemDetail, ProblemError -class CollectionSettingsController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - self.type = _("collection") - self.registry = LicenseProvidersRegistry() - - def _get_collection_protocols(self): - protocols = super()._get_collection_protocols(self.registry.integrations) - - # dedupe and only keep the latest SETTINGS - # this will allow child objects to overwrite - # parent settings with the same key - # This relies on the fact that child settings - # are added after parent settings as such - # `SETTINGS + .to_settings()` - for protocol in protocols: - if "settings" not in protocol: - continue - _found_settings = dict() - for ix, setting in enumerate(protocol["settings"]): - _key = setting["key"] - _found_settings[_key] = ix - _settings = [] - # Go through the dict items and only use the latest found settings - # for any given key - for _, v in _found_settings.items(): - _settings.append(protocol["settings"][v]) - protocol["settings"] = _settings +class CollectionSettingsController( + IntegrationSettingsController[CirculationApiType], AdminPermissionsControllerMixin +): + def default_registry(self) -> IntegrationRegistry[CirculationApiType]: + return LicenseProvidersRegistry() + + def configured_service_info( + self, service: IntegrationConfiguration + ) -> dict[str, Any] | None: + service_info = super().configured_service_info(service) + user = getattr(flask.request, "admin", None) + if service_info: + # Add 'marked_for_deletion' to the service info + service_info["marked_for_deletion"] = service.collection.marked_for_deletion + service_info["parent_id"] = ( + service.collection.parent.integration_configuration_id + if service.collection.parent + else None + ) + service_info["settings"]["export_marc_records"] = str( + service.collection.export_marc_records + ).lower() + if user and user.can_see_collection(service.collection): + return service_info + return None - return protocols + def configured_service_library_info( + self, library_configuration: IntegrationLibraryConfiguration + ) -> dict[str, Any] | None: + library_info = super().configured_service_library_info(library_configuration) + user = getattr(flask.request, "admin", None) + if library_info: + if user and user.is_librarian(library_configuration.library): + return library_info + return None - def process_collections(self): + def process_collections(self) -> Response | ProblemDetail: if flask.request.method == "GET": return self.process_get() else: return self.process_post() - # GET - def process_get(self): - collections_db = self._db.query(Collection).order_by(Collection.name).all() - ConfigurationSetting.cache_warm(self._db) - Collection.cache_warm(self._db, lambda: collections_db) - protocols = self._get_collection_protocols() - user = flask.request.admin - collections = [] - collection_object: Collection - for collection_object in collections_db: - if not user or not user.can_see_collection(collection_object): - continue - - collection_dict = self.collection_to_dict(collection_object) - if collection_object.integration_configuration: - libraries = self.load_libraries(collection_object, user) - collection_dict["libraries"] = libraries - collection_dict[ - "settings" - ] = collection_object.integration_configuration.settings_dict - self.load_settings(collection_object, collection_dict["settings"]) - collection_dict["self_test_results"] = self._get_prior_test_results( - collection_object - ) - collection_dict[ - "marked_for_deletion" - ] = collection_object.marked_for_deletion - - collections.append(collection_dict) - - return dict( - collections=collections, - protocols=protocols, - ) - - def collection_to_dict(self, collection_object): - return dict( - id=collection_object.id, - name=collection_object.name, - protocol=collection_object.protocol, - parent_id=collection_object.parent_id, - ) - - def load_libraries(self, collection_object: Collection, user: Admin) -> List[Dict]: - """Get a list of the libraries that 1) are associated with this collection - and 2) the user is affiliated with""" - - libraries = [] - integration: IntegrationConfiguration = ( - collection_object.integration_configuration + def process_get(self) -> Response: + return Response( + json_serializer( + { + "collections": self.configured_services, + "protocols": list(self.protocols.values()), + } + ), + status=200, + mimetype="application/json", ) - if not integration: - return [] - for library in collection_object.libraries: - if not user or not user.is_librarian(library): - continue - library_info = dict(short_name=library.short_name) - # Find and update the library settings if they exist - for config in integration.library_configurations: - if library.id == config.library_id: - library_info.update(config.settings_dict) - break - libraries.append(library_info) - - return libraries - - def load_settings(self, collection_object, collection_settings): - """Compile the information about the collection that corresponds to the settings - externally imposed by the collection's protocol.""" - - settings = collection_settings - settings["external_account_id"] = collection_object.external_account_id - def find_protocol_class(self, collection_object): - """Figure out which class this collection's protocol belongs to, from the list - of possible protocols defined in the registry""" + def create_new_service(self, name: str, protocol: str) -> IntegrationConfiguration: + service = super().create_new_service(name, protocol) + # Make sure the new service is associated with a collection + create(self._db, Collection, integration_configuration=service) + return service - return self.registry.get(collection_object.protocol) - - # POST - def process_post(self): + def process_post(self) -> Response | ProblemDetail: self.require_system_admin() - protocols = self._get_collection_protocols() - is_new = False - collection = None - - name = flask.request.form.get("name") - protocol_name = flask.request.form.get("protocol") - parent_id = flask.request.form.get("parent_id") - fields = {"name": name, "protocol": protocol_name} - id = flask.request.form.get("id") - if id: - collection = get_one(self._db, Collection, id=id) - fields["collection"] = collection - - error = self.validate_form_fields(is_new, protocols, **fields) - if error: - return error - - settings_class = self._get_settings_class( - self.registry, protocol_name, is_child=(parent_id is not None) - ) - if not settings_class: - return UNKNOWN_PROTOCOL - - if protocol_name and not collection: - collection, is_new = get_one_or_create(self._db, Collection, name=name) - if not is_new: - self._db.rollback() - return COLLECTION_NAME_ALREADY_IN_USE - collection.create_integration_configuration(protocol_name) - # Mirrors still use the external integration - # TODO: Remove the use of external integrations when Mirrors are migrated - # to use the integration configurations - collection.create_external_integration(protocol_name) - - collection.name = name - [protocol_dict] = [p for p in protocols if p.get("name") == protocol_name] - - valid = self.validate_parent(protocol_dict, collection) - if isinstance(valid, ProblemDetail): - self._db.rollback() - return valid - - settings = protocol_dict["settings"] - settings_error = self.process_settings(settings, collection) - if settings_error: - self._db.rollback() - return settings_error + try: + form_data = flask.request.form + libraries_data = self.get_libraries_data(form_data) + parent_id = form_data.get("parent_id", None, int) + export_marc_records = ( + form_data.get("export_marc_records", None, str) == "true" + ) + integration, protocol, response_code = self.get_service(form_data) - libraries_error = self.process_libraries(protocol_dict, collection) - if libraries_error: - return libraries_error + impl_cls = self.registry[protocol] - if is_new: - return Response(str(collection.id), 201) - else: - return Response(str(collection.id), 200) - - def validate_form_fields(self, is_new, protocols, **fields): - """Check that 1) the required fields aren't blank, 2) the protocol is on the - list of recognized protocols, 3) the collection (if there is one) is valid, and - 4) the URL is valid""" - if not fields.get("name"): - return MISSING_COLLECTION_NAME - if "collection" in fields: - if fields.get("collection"): - invalid_collection = self.validate_collection(**fields) - if invalid_collection: - return invalid_collection + # Validate and set parent collection + if parent_id is not None: + if issubclass(impl_cls, HasChildIntegrationConfiguration): + settings_class = impl_cls.child_settings_class() + parent_integration = get_one( + self._db, IntegrationConfiguration, id=parent_id + ) + if ( + parent_integration is None + or parent_integration.collection is None + ): + raise ProblemError(MISSING_PARENT) + integration.collection.parent = parent_integration.collection + else: + raise ProblemError(PROTOCOL_DOES_NOT_SUPPORT_PARENTS) else: - return MISSING_COLLECTION - if fields.get("protocol"): - if fields.get("protocol") not in [p.get("name") for p in protocols]: - return UNKNOWN_PROTOCOL - else: - return NO_PROTOCOL_FOR_NEW_SERVICE + settings_class = impl_cls.settings_class() - def validate_collection(self, **fields): - """The protocol of an existing collection cannot be changed, and - collections must have unique names.""" - if fields.get("protocol") != fields.get("collection").protocol: - return CANNOT_CHANGE_PROTOCOL - if fields.get("name") != fields.get("collection").name: - collection_with_name = get_one( - self._db, Collection, name=fields.get("name") - ) - if collection_with_name: - return COLLECTION_NAME_ALREADY_IN_USE - - def validate_parent(self, protocol, collection): - """Verify that the parent collection is set properly, then determine - the type of the settings that need to be validated: are they 1) settings for a - regular collection (e.g. client key and client secret for an Overdrive collection), - or 2) settings for a child collection (e.g. library ID for an Overdrive Advantage collection)? - """ - - parent_id = flask.request.form.get("parent_id") - if parent_id and not protocol.get("child_settings"): - return PROTOCOL_DOES_NOT_SUPPORT_PARENTS - if parent_id: - parent = get_one(self._db, Collection, id=parent_id) - if not parent: - return MISSING_PARENT - collection.parent = parent - else: - collection.parent = None + # Set export_marc_records flag on the collection + integration.collection.export_marc_records = export_marc_records - return True + # Update settings + validated_settings = ProcessFormData.get_settings(settings_class, form_data) + integration.settings_dict = validated_settings.dict() - def validate_external_account_id_setting(self, value, setting): - """Check that the user has submitted any required values for associating - this collection with an external account.""" - if not value and not setting.get("optional"): - # Roll back any changes to the collection that have already been made. - return INCOMPLETE_CONFIGURATION.detailed( - _( - "The collection configuration is missing a required setting: %(setting)s", - setting=setting.get("label"), + # Update library settings + if libraries_data: + self.process_libraries( + integration, libraries_data, impl_cls.library_settings_class() ) - ) - - def process_settings( - self, settings: List[Dict[str, Any]], collection: Collection - ) -> Optional[ProblemDetail]: - """Process the settings for the given collection. - - Go through the settings that the user has just submitted for this collection, - and check that each setting is valid and that no required settings are missing. If - the setting passes all of the validations, go ahead and set it for this collection. - """ - settings_class = self._get_settings_class( - self.registry, - collection.protocol, - is_child=(flask.request.form.get("parent_id") is not None), - ) - if isinstance(settings_class, ProblemDetail): - return settings_class - if settings_class is None: - return PROTOCOL_DOES_NOT_SUPPORT_SETTINGS - collection_settings = {} - for setting in settings: - key = setting["key"] - value = self._extract_form_setting_value(setting, flask.request.form) - if key == "external_account_id": - error = self.validate_external_account_id_setting(value, setting) - if error: - return error - collection.external_account_id = value - elif value is not None: - # Only if the key was present in the request should we add it - collection_settings[key] = value - else: - # Keep existing setting value, when present, if a value is not specified. - # This can help prevent accidental loss of settings due to some programming errors. - if key in collection.integration_configuration.settings_dict: - collection_settings[ - key - ] = collection.integration_configuration.settings_dict[key] - # validate then apply - try: - validated_settings = settings_class(**collection_settings) - except ProblemError as ex: - return ex.problem_detail - collection.integration_configuration.settings_dict = validated_settings.dict() - return None + # Trigger a site configuration change + site_configuration_has_changed(self._db) - def process_libraries(self, protocol, collection): - """Go through the libraries that the user is trying to associate with this collection; - check that each library actually exists, and that the library-related configuration settings - that the user has submitted are complete and valid. If the library passes all of the validations, - go ahead and associate it with this collection.""" + except ProblemError as e: + self._db.rollback() + return e.problem_detail - libraries = [] - protocol_class = self.registry.get(protocol["name"]) - if flask.request.form.get("libraries"): - libraries = json.loads(flask.request.form.get("libraries")) + return Response(str(integration.id), response_code) - for library_info in libraries: - library = get_one( - self._db, Library, short_name=library_info.get("short_name") - ) - if not library: - return NO_SUCH_LIBRARY.detailed( - _( - "You attempted to add the collection to %(library_short_name)s, but the library does not exist.", - library_short_name=library_info.get("short_name"), - ) - ) - if collection not in library.collections: - library.collections.append(collection) - result = self._set_configuration_library( - collection.integration_configuration, library_info, protocol_class - ) - if isinstance(result, ProblemDetail): - return result + def process_delete(self, service_id: int) -> Response | ProblemDetail: + self.require_system_admin() - short_names = [l.get("short_name") for l in libraries] - for library in collection.libraries: - if library.short_name not in short_names: - collection.disassociate_library(library) + integration = get_one( + self._db, + IntegrationConfiguration, + id=service_id, + goal=self.registry.goal, + ) + if not integration: + return MISSING_SERVICE - # DELETE - def process_delete(self, collection_id): - self.require_system_admin() - collection = get_one(self._db, Collection, id=collection_id) + collection = integration.collection if not collection: return MISSING_COLLECTION + if len(collection.children) > 0: return CANNOT_DELETE_COLLECTION_WITH_CHILDREN # Flag the collection to be deleted by script in the background. collection.marked_for_deletion = True - return Response(str(_("Deleted")), 200) + return Response("Deleted", 200) diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index a5f867e4a..c2fc8d4f7 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -1,8 +1,8 @@ from __future__ import annotations import json +from collections.abc import Callable from datetime import datetime -from typing import Callable, Dict, List, Optional, Union import flask from flask import Response, url_for @@ -21,7 +21,7 @@ MISSING_COLLECTION, MISSING_CUSTOM_LIST, ) -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.problem_details import CANNOT_DELETE_SHARED_LIST from core.app_server import load_pagination_from_request from core.feed.acquisition import OPDSAcquisitionFeed @@ -51,16 +51,16 @@ class CustomListSharePostResponse(BaseModel): class CustomListPostRequest(BaseModel): name: str - id: Optional[int] = None - entries: List[dict] = [] - collections: List[int] = [] - deletedEntries: List[dict] = [] + id: int | None = None + entries: list[dict] = [] + collections: list[int] = [] + deletedEntries: list[dict] = [] # For auto updating lists auto_update: bool = False - auto_update_query: Optional[dict] = None - auto_update_facets: Optional[dict] = None + auto_update_query: dict | None = None + auto_update_facets: dict | None = None - def _list_as_json(self, list: CustomList, is_owner=True) -> Dict: + def _list_as_json(self, list: CustomList, is_owner=True) -> dict: """Transform a CustomList object into a response ready dict""" collections = [] for collection in list.collections: @@ -84,7 +84,7 @@ def _list_as_json(self, list: CustomList, is_owner=True) -> Dict: is_shared=len(list.shared_locally_with_libraries) > 0, ) - def custom_lists(self) -> Union[Dict, ProblemDetail, Response, None]: + def custom_lists(self) -> dict | ProblemDetail | Response | None: library: Library = flask.request.library # type: ignore # "Request" has no attribute "library" self.require_librarian(library) @@ -113,7 +113,7 @@ def custom_lists(self) -> Union[Dict, ProblemDetail, Response, None]: return None - def _getJSONFromRequest(self, values: Optional[str]) -> list: + def _getJSONFromRequest(self, values: str | None) -> list: if values: return_values = json.loads(values) else: @@ -121,9 +121,7 @@ def _getJSONFromRequest(self, values: Optional[str]) -> list: return return_values - def _get_work_from_urn( - self, library: Library, urn: Optional[str] - ) -> Optional[Work]: + def _get_work_from_urn(self, library: Library, urn: str | None) -> Work | None: identifier, ignore = Identifier.parse_urn(self._db, urn) if identifier is None: @@ -143,14 +141,14 @@ def _create_or_update_list( self, library: Library, name: str, - entries: List[Dict], - collections: List[int], - deleted_entries: Optional[List[Dict]] = None, - id: Optional[int] = None, - auto_update: Optional[bool] = None, - auto_update_query: Optional[dict[str, str]] = None, - auto_update_facets: Optional[dict[str, str]] = None, - ) -> Union[ProblemDetail, Response]: + entries: list[dict], + collections: list[int], + deleted_entries: list[dict] | None = None, + id: int | None = None, + auto_update: bool | None = None, + auto_update_query: dict[str, str] | None = None, + auto_update_facets: dict[str, str] | None = None, + ) -> ProblemDetail | Response: data_source = DataSource.lookup(self._db, DataSource.LIBRARY_STAFF) old_list_with_name = CustomList.find(self._db, name, library=library) @@ -325,9 +323,7 @@ def url_fn(after): return url_fn - def custom_list( - self, list_id: int - ) -> Optional[Union[Response, Dict, ProblemDetail]]: + def custom_list(self, list_id: int) -> Response | dict | ProblemDetail | None: library: Library = flask.request.library # type: ignore self.require_librarian(library) data_source = DataSource.lookup(self._db, DataSource.LIBRARY_STAFF) @@ -410,7 +406,7 @@ def custom_list( def share_locally( self, customlist_id: int - ) -> Union[ProblemDetail, Dict[str, int], Response]: + ) -> ProblemDetail | dict[str, int] | Response: """Share this customlist with all libraries on this local CM""" if not customlist_id: return INVALID_INPUT @@ -431,9 +427,10 @@ def share_locally( def share_locally_POST( self, customlist: CustomList - ) -> Union[ProblemDetail, Dict[str, int]]: + ) -> ProblemDetail | dict[str, int]: successes = [] failures = [] + self.log.info(f"Begin sharing customlist '{customlist.name}'") for library in self._db.query(Library).all(): # Do not share with self if library == customlist.library: @@ -441,6 +438,9 @@ def share_locally_POST( # Do not attempt to re-share if library in customlist.shared_locally_with_libraries: + self.log.info( + f"Customlist '{customlist.name}' is already shared with library '{library.name}'" + ) continue # Attempt to share the list @@ -454,13 +454,12 @@ def share_locally_POST( successes.append(library) self._db.commit() + self.log.info(f"Done sharing customlist {customlist.name}") return self.CustomListSharePostResponse( successes=len(successes), failures=len(failures) ).dict() - def share_locally_DELETE( - self, customlist: CustomList - ) -> Union[ProblemDetail, Response]: + def share_locally_DELETE(self, customlist: CustomList) -> ProblemDetail | Response: """Delete the shared status of a custom list If a customlist is actively in use by another library, then disallow the unshare """ diff --git a/api/admin/controller/dashboard.py b/api/admin/controller/dashboard.py index 5774210f5..33f2beccf 100644 --- a/api/admin/controller/dashboard.py +++ b/api/admin/controller/dashboard.py @@ -1,14 +1,14 @@ from __future__ import annotations +from collections.abc import Callable from datetime import date, datetime, timedelta -from typing import Callable import flask from sqlalchemy import desc, nullslast from sqlalchemy.orm import Session from api.admin.model.dashboard_statistics import StatisticsResponse -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.local_analytics_exporter import LocalAnalyticsExporter from core.feed.annotator.admin import AdminAnnotator from core.model import ( diff --git a/api/admin/controller/discovery_service_library_registrations.py b/api/admin/controller/discovery_service_library_registrations.py index 61ab9047a..083256d90 100644 --- a/api/admin/controller/discovery_service_library_registrations.py +++ b/api/admin/controller/discovery_service_library_registrations.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Any, Dict +from typing import Any import flask from flask import Response, url_for @@ -11,7 +11,7 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.problem_details import MISSING_SERVICE, NO_SUCH_LIBRARY -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry from core.integration.goals import Goals @@ -38,7 +38,7 @@ def __init__(self, manager: CirculationManager): def process_discovery_service_library_registrations( self, - ) -> Response | Dict[str, Any] | ProblemDetail: + ) -> Response | dict[str, Any] | ProblemDetail: self.require_system_admin() try: if flask.request.method == "GET": @@ -49,7 +49,7 @@ def process_discovery_service_library_registrations( self._db.rollback() return e.problem_detail - def process_get(self) -> Dict[str, Any]: + def process_get(self) -> dict[str, Any]: """Make a list of all discovery services, each with the list of libraries registered with that service and the status of the registration.""" @@ -94,7 +94,7 @@ def process_get(self) -> Dict[str, Any]: def get_library_info( self, registration: DiscoveryServiceRegistration - ) -> Dict[str, str]: + ) -> dict[str, str]: """Find the relevant information about the library which the user is trying to register""" diff --git a/api/admin/controller/discovery_services.py b/api/admin/controller/discovery_services.py index ad2b9b3ed..ebabbf833 100644 --- a/api/admin/controller/discovery_services.py +++ b/api/admin/controller/discovery_services.py @@ -1,5 +1,3 @@ -from typing import Union - import flask from flask import Response from sqlalchemy import and_, select @@ -7,12 +5,7 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.controller.integration_settings import IntegrationSettingsController from api.admin.form_data import ProcessFormData -from api.admin.problem_details import ( - INCOMPLETE_CONFIGURATION, - INTEGRATION_URL_ALREADY_IN_USE, - NO_PROTOCOL_FOR_NEW_SERVICE, - UNKNOWN_PROTOCOL, -) +from api.admin.problem_details import INTEGRATION_URL_ALREADY_IN_USE from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry from core.model import ( @@ -30,7 +23,7 @@ class DiscoveryServicesController( def default_registry(self) -> DiscoveryRegistry: return DiscoveryRegistry() - def process_discovery_services(self) -> Union[Response, ProblemDetail]: + def process_discovery_services(self) -> Response | ProblemDetail: self.require_system_admin() if flask.request.method == "GET": return self.process_get() @@ -65,30 +58,10 @@ def set_up_default_registry(self) -> None: ) default_registry.settings_dict = settings.dict() - def process_post(self) -> Union[Response, ProblemDetail]: + def process_post(self) -> Response | ProblemDetail: try: form_data = flask.request.form - protocol = form_data.get("protocol", None, str) - id = form_data.get("id", None, int) - name = form_data.get("name", None, str) - - if protocol is None and id is None: - raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) - - if protocol is None or protocol not in self.registry: - self.log.warning(f"Unknown service protocol: {protocol}") - raise ProblemError(UNKNOWN_PROTOCOL) - - if id is not None: - # Find an existing service to edit - service = self.get_existing_service(id, name, protocol) - response_code = 200 - else: - # Create a new service - if name is None: - raise ProblemError(INCOMPLETE_CONFIGURATION) - service = self.create_new_service(name, protocol) - response_code = 201 + service, protocol, response_code = self.get_service(form_data) impl_cls = self.registry[protocol] settings_class = impl_cls.settings_class() @@ -107,7 +80,7 @@ def process_post(self) -> Union[Response, ProblemDetail]: return Response(str(service.id), response_code) - def process_delete(self, service_id: int) -> Union[Response, ProblemDetail]: + def process_delete(self, service_id: int) -> Response | ProblemDetail: self.require_system_admin() try: return self.delete_service(service_id) diff --git a/api/admin/controller/feed.py b/api/admin/controller/feed.py index ea91c10c3..441a28ac1 100644 --- a/api/admin/controller/feed.py +++ b/api/admin/controller/feed.py @@ -4,7 +4,7 @@ from flask import url_for from api.admin.controller.base import AdminPermissionsControllerMixin -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.app_server import load_pagination_from_request from core.classifier import genres from core.feed.admin import AdminFeed diff --git a/api/admin/controller/individual_admin_settings.py b/api/admin/controller/individual_admin_settings.py index 7a3b67ccf..8e4297012 100644 --- a/api/admin/controller/individual_admin_settings.py +++ b/api/admin/controller/individual_admin_settings.py @@ -1,5 +1,4 @@ import json -from typing import Optional import flask from flask import Response @@ -27,8 +26,8 @@ def process_individual_admins(self): else: return self.process_post() - def _highest_authorized_role(self) -> Optional[AdminRole]: - highest_role: Optional[AdminRole] = None + def _highest_authorized_role(self) -> AdminRole | None: + highest_role: AdminRole | None = None has_auth = False admin = getattr(flask.request, "admin", None) @@ -54,7 +53,7 @@ def _highest_authorized_role(self) -> Optional[AdminRole]: return highest_role if has_auth else None def process_get(self): - logged_in_admin: Optional[Admin] = getattr(flask.request, "admin", None) + logged_in_admin: Admin | None = getattr(flask.request, "admin", None) if not logged_in_admin: return ADMIN_AUTH_NOT_CONFIGURED @@ -110,7 +109,7 @@ def process_post_create_first_admin(self, email: str): """Create the first admin in the system.""" # Passwords are always required, so check presence and validity up front. - password: Optional[str] = flask.request.form.get("password") + password: str | None = flask.request.form.get("password") if not self.is_acceptable_password(password): return self.unacceptable_password() @@ -143,7 +142,7 @@ def process_post_create_new_admin(self, email: str): """Create a new admin (not the first admin in the system).""" # Passwords are always required, so check presence and validity up front. - password: Optional[str] = flask.request.form.get("password") + password: str | None = flask.request.form.get("password") if not self.is_acceptable_password(password): return self.unacceptable_password() @@ -173,7 +172,7 @@ def process_post_create_new_admin(self, email: str): def process_post_update_existing_admin(self, admin: Admin): """Update an existing admin.""" - password: Optional[str] = flask.request.form.get("password") + password: str | None = flask.request.form.get("password") success = False try: @@ -244,7 +243,7 @@ def unacceptable_password(): ) @staticmethod - def is_acceptable_password(password: Optional[str]) -> bool: + def is_acceptable_password(password: str | None) -> bool: # Forbid missing passwords. if not password: return False diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index b491a508f..c8a93c8df 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -1,18 +1,25 @@ +from __future__ import annotations + import json from abc import ABC, abstractmethod -from typing import Any, Dict, Generic, List, NamedTuple, Optional, Type, TypeVar +from typing import Any, Generic, NamedTuple, TypeVar import flask from flask import Response +from werkzeug.datastructures import ImmutableMultiDict from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, MISSING_SERVICE, + MISSING_SERVICE_NAME, + NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, + UNKNOWN_PROTOCOL, ) -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from core.integration.base import ( + HasChildIntegrationConfiguration, HasIntegrationConfiguration, HasLibraryIntegrationConfiguration, ) @@ -35,20 +42,20 @@ class UpdatedLibrarySettingsTuple(NamedTuple): integration: IntegrationLibraryConfiguration - settings: Dict[str, Any] + settings: dict[str, Any] class ChangedLibrariesTuple(NamedTuple): - new: List[UpdatedLibrarySettingsTuple] - updated: List[UpdatedLibrarySettingsTuple] - removed: List[IntegrationLibraryConfiguration] + new: list[UpdatedLibrarySettingsTuple] + updated: list[UpdatedLibrarySettingsTuple] + removed: list[IntegrationLibraryConfiguration] class IntegrationSettingsController(ABC, Generic[T], LoggerMixin): def __init__( self, manager: CirculationManager, - registry: Optional[IntegrationRegistry[T]] = None, + registry: IntegrationRegistry[T] | None = None, ): self._db = manager._db self.registry = registry or self.default_registry() @@ -61,9 +68,9 @@ def default_registry(self) -> IntegrationRegistry[T]: ... @memoize(ttls=1800) - def _cached_protocols(self) -> Dict[str, Dict[str, Any]]: + def _cached_protocols(self) -> dict[str, dict[str, Any]]: """Cached result for integration implementations""" - protocols = {} + protocols = [] for name, api in self.registry: protocol = { "name": name, @@ -75,17 +82,39 @@ def _cached_protocols(self) -> Dict[str, Dict[str, Any]]: protocol[ "library_settings" ] = api.library_settings_class().configuration_form(self._db) + if issubclass(api, HasChildIntegrationConfiguration): + protocol[ + "child_settings" + ] = api.child_settings_class().configuration_form(self._db) protocol.update(api.protocol_details(self._db)) - protocols[name] = protocol - return protocols + protocols.append((name, protocol)) + protocols.sort(key=lambda x: x[0]) + return dict(protocols) @property - def protocols(self) -> Dict[str, Dict[str, Any]]: + def protocols(self) -> dict[str, dict[str, Any]]: """Use a property for implementations to allow expiring cached results""" return self._cached_protocols() + def configured_service_info( + self, service: IntegrationConfiguration + ) -> dict[str, Any] | None: + return { + "id": service.id, + "name": service.name, + "protocol": service.protocol, + "settings": service.settings_dict, + } + + def configured_service_library_info( + self, library_configuration: IntegrationLibraryConfiguration + ) -> dict[str, Any] | None: + library_info = {"short_name": library_configuration.library.short_name} + library_info.update(library_configuration.settings_dict) + return library_info + @property - def configured_services(self) -> List[Dict[str, Any]]: + def configured_services(self) -> list[dict[str, Any]]: """Return a list of all currently configured services for the controller's goal.""" configured_services = [] for service in ( @@ -99,27 +128,26 @@ def configured_services(self) -> List[Dict[str, Any]]: ) continue - service_info = { - "id": service.id, - "name": service.name, - "protocol": service.protocol, - "settings": service.settings_dict, - } + service_info = self.configured_service_info(service) + if service_info is None: + continue api = self.registry[service.protocol] if issubclass(api, HasLibraryIntegrationConfiguration): libraries = [] for library_settings in service.library_configurations: - library_info = {"short_name": library_settings.library.short_name} - library_info.update(library_settings.settings_dict) - libraries.append(library_info) + library_info = self.configured_service_library_info( + library_settings + ) + if library_info is not None: + libraries.append(library_info) service_info["libraries"] = libraries configured_services.append(service_info) return configured_services def get_existing_service( - self, service_id: int, name: Optional[str], protocol: str + self, service_id: int, name: str | None, protocol: str ) -> IntegrationConfiguration: """ Query for an existing service to edit. @@ -129,7 +157,7 @@ def get_existing_service( necessary and a ProblemError will be raised if the name is already in use. """ - service: Optional[IntegrationConfiguration] = get_one( + service: IntegrationConfiguration | None = get_one( self._db, IntegrationConfiguration, id=service_id, @@ -174,11 +202,42 @@ def create_new_service(self, name: str, protocol: str) -> IntegrationConfigurati ) return new_service + def get_libraries_data(self, form_data: ImmutableMultiDict[str, str]) -> str | None: + libraries_data = form_data.get("libraries", None, str) + return libraries_data + + def get_service( + self, form_data: ImmutableMultiDict[str, str] + ) -> tuple[IntegrationConfiguration, str, int]: + protocol = form_data.get("protocol", None, str) + _id = form_data.get("id", None, int) + name = form_data.get("name", None, str) + + if protocol is None and _id is None: + raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) + + if protocol is None or protocol not in self.registry: + self.log.warning(f"Unknown service protocol: {protocol}") + raise ProblemError(UNKNOWN_PROTOCOL) + + if _id is not None: + # Find an existing service to edit + service = self.get_existing_service(_id, name, protocol) + response_code = 200 + else: + # Create a new service + if name is None: + raise ProblemError(MISSING_SERVICE_NAME) + service = self.create_new_service(name, protocol) + response_code = 201 + + return service, protocol, response_code + def get_library(self, short_name: str) -> Library: """ Get a library by its short name. """ - library: Optional[Library] = get_one(self._db, Library, short_name=short_name) + library: Library | None = get_one(self._db, Library, short_name=short_name) if library is None: raise ProblemError( NO_SUCH_LIBRARY.detailed( @@ -213,13 +272,49 @@ def get_changed_libraries( ) -> ChangedLibrariesTuple: """ Return a tuple of lists of libraries that have had their library settings - added, updated, or removed. + added, updated, or removed. No action is taken to add, update, or remove + the settings, this function just parses the submitted data and returns + the lists of libraries that need to be processed. + + :param service: The IntegrationConfiguration that the library settings should be + associated with. + :param libraries_data: A JSON string containing a list of dictionaries. + Each dictionary has a 'short_name' key that identifies which + library the settings are for, and then the rest of the dictionary is the + settings for that library. + + :return: A named tuple with three lists of libraries: + - new: A list of UpdatedLibrarySettingsTuple named tuples that contains the + IntegrationLibraryConfiguration and settings for each library with newly + added settings. + - updated: A list of UpdatedLibrarySettingsTuple named tuples that contains the + IntegrationLibraryConfiguration and settings for each library that had its + settings updated. + - removed: A list of IntegrationLibraryConfiguration objects for libraries that + had their settings removed. """ libraries = json.loads(libraries_data) existing_library_settings = { c.library.short_name: c for c in service.library_configurations } - submitted_library_settings = {l.get("short_name"): l for l in libraries} + + submitted_library_settings = {} + for library in libraries: + # Each library settings dictionary should have a 'short_name' key that identifies + # which library the settings are for. This key is removed from the dictionary as + # only the settings should be stored in the database. + short_name = library.get("short_name") + if short_name is None: + self.log.error( + f"Library settings missing short_name. Settings: {library}." + ) + raise ProblemError( + INVALID_INPUT.detailed( + "Invalid library settings, missing short_name." + ) + ) + del library["short_name"] + submitted_library_settings[short_name] = library removed = [ existing_library_settings[library] @@ -246,7 +341,7 @@ def get_changed_libraries( return ChangedLibrariesTuple(new=new, updated=updated, removed=removed) def process_deleted_libraries( - self, removed: List[IntegrationLibraryConfiguration] + self, removed: list[IntegrationLibraryConfiguration] ) -> None: """ Delete any IntegrationLibraryConfigurations that were removed. @@ -256,8 +351,8 @@ def process_deleted_libraries( def process_updated_libraries( self, - libraries: List[UpdatedLibrarySettingsTuple], - settings_class: Type[BaseSettings], + libraries: list[UpdatedLibrarySettingsTuple], + settings_class: type[BaseSettings], ) -> None: """ Update the settings for any IntegrationLibraryConfigurations that were updated or added. @@ -270,7 +365,7 @@ def process_libraries( self, service: IntegrationConfiguration, libraries_data: str, - settings_class: Type[BaseSettings], + settings_class: type[BaseSettings], ) -> None: """ Process the library settings for a service. This will create new diff --git a/api/admin/controller/lanes.py b/api/admin/controller/lanes.py index 5aa02093a..ac80b125b 100644 --- a/api/admin/controller/lanes.py +++ b/api/admin/controller/lanes.py @@ -16,7 +16,7 @@ NO_CUSTOM_LISTS_FOR_LANE, NO_DISPLAY_NAME_FOR_LANE, ) -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.lanes import create_default_lanes from core.lane import Lane from core.model import CustomList, Library, create, get_one diff --git a/api/admin/controller/library_settings.py b/api/admin/controller/library_settings.py index 706576d68..7525faa6d 100644 --- a/api/admin/controller/library_settings.py +++ b/api/admin/controller/library_settings.py @@ -4,7 +4,6 @@ import json import uuid from io import BytesIO -from typing import Optional, Tuple import flask from flask import Response @@ -17,8 +16,8 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.form_data import ProcessFormData from api.admin.problem_details import * +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.lanes import create_default_lanes from core.configuration.library import LibrarySettings from core.model import ( @@ -164,7 +163,7 @@ def process_post(self) -> Response: else: return Response(str(library.uuid), 200) - def create_library(self, short_name: str) -> Tuple[Library, bool]: + def create_library(self, short_name: str) -> tuple[Library, bool]: self.require_system_admin() public_key, private_key = Library.generate_keypair() library, is_new = create( @@ -201,7 +200,7 @@ def get_library_from_uuid(self, library_uuid: str) -> Library: ) def check_short_name_unique( - self, library: Optional[Library], short_name: Optional[str] + self, library: Library | None, short_name: str | None ) -> None: if not library or (short_name and short_name != library.short_name): # If you're adding a new short_name, either by editing an @@ -227,7 +226,7 @@ def _process_image(image: Image.Image, _format: str = "PNG") -> bytes: def scale_and_store_logo( cls, library: Library, - image_file: Optional[FileStorage], + image_file: FileStorage | None, max_dimension: int = Configuration.LOGO_MAX_DIMENSION, ) -> None: if not image_file: diff --git a/api/admin/controller/patron.py b/api/admin/controller/patron.py index 31204c98c..3e328b68b 100644 --- a/api/admin/controller/patron.py +++ b/api/admin/controller/patron.py @@ -9,7 +9,7 @@ from api.adobe_vendor_id import AuthdataUtility from api.authentication.base import CannotCreateLocalPatron, PatronData from api.authenticator import LibraryAuthenticator -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.util.problem_detail import ProblemDetail diff --git a/api/admin/controller/patron_auth_service_self_tests.py b/api/admin/controller/patron_auth_service_self_tests.py index 6fae2a3d6..476456a59 100644 --- a/api/admin/controller/patron_auth_service_self_tests.py +++ b/api/admin/controller/patron_auth_service_self_tests.py @@ -1,58 +1,45 @@ from __future__ import annotations -from typing import Any, Dict, Optional, Type +from typing import Any -import flask from flask import Response from sqlalchemy.orm import Session -from api.admin.problem_details import * -from api.authentication.base import AuthenticationProvider +from api.admin.controller.self_tests import IntegrationSelfTestsController +from api.admin.problem_details import FAILED_TO_RUN_SELF_TESTS +from api.authentication.base import AuthenticationProviderType from api.integration.registry.patron_auth import PatronAuthRegistry -from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry -from core.model import get_one, json_serializer -from core.model.integration import ( - IntegrationConfiguration, - IntegrationLibraryConfiguration, -) +from core.model.integration import IntegrationConfiguration from core.util.problem_detail import ProblemDetail, ProblemError -class PatronAuthServiceSelfTestsController: +class PatronAuthServiceSelfTestsController( + IntegrationSelfTestsController[AuthenticationProviderType] +): def __init__( self, db: Session, - registry: Optional[IntegrationRegistry[AuthenticationProvider]] = None, + registry: IntegrationRegistry[AuthenticationProviderType] | None = None, ): - self.db = db - self.registry = registry if registry else PatronAuthRegistry() + registry = registry or PatronAuthRegistry() + super().__init__(db, registry) def process_patron_auth_service_self_tests( - self, identifier: Optional[int] + self, identifier: int | None ) -> Response | ProblemDetail: - if not identifier: - return MISSING_IDENTIFIER - try: - if flask.request.method == "GET": - return self.self_tests_process_get(identifier) - else: - return self.self_tests_process_post(identifier) - except ProblemError as e: - return e.problem_detail - - def self_tests_process_get(self, identifier: int) -> Response: - integration = self.look_up_by_id(identifier) - info = self.get_info(integration) - protocol_class = self.get_protocol_class(integration) + return self.process_self_tests(identifier) + def get_prior_test_results( + self, + protocol_class: type[AuthenticationProviderType], + integration: IntegrationConfiguration, + ) -> dict[str, Any]: # Find the first library associated with this service. library_configuration = self.get_library_configuration(integration) - if library_configuration is not None: - self_test_results = protocol_class.load_self_test_results(integration) - else: - self_test_results = dict( + if library_configuration is None: + return dict( exception=( "You must associate this service with at least one library " "before you can run self tests for it." @@ -60,58 +47,9 @@ def self_tests_process_get(self, identifier: int) -> Response: disabled=True, ) - info["self_test_results"] = ( - self_test_results if self_test_results else "No results yet" - ) - return Response( - json_serializer({"self_test_results": info}), - status=200, - mimetype="application/json", - ) - - def self_tests_process_post(self, identifier: int) -> Response: - integration = self.look_up_by_id(identifier) - self.run_tests(integration) - return Response("Successfully ran new self tests", 200) - - @staticmethod - def get_library_configuration( - integration: IntegrationConfiguration, - ) -> Optional[IntegrationLibraryConfiguration]: - if not integration.library_configurations: - return None - return integration.library_configurations[0] - - def get_protocol_class( - self, integration: IntegrationConfiguration - ) -> Type[AuthenticationProvider]: - if not integration.protocol or integration.protocol not in self.registry: - raise ProblemError(problem_detail=UNKNOWN_PROTOCOL) - return self.registry[integration.protocol] - - def look_up_by_id(self, identifier: int) -> IntegrationConfiguration: - service = get_one( - self.db, - IntegrationConfiguration, - id=identifier, - goal=Goals.PATRON_AUTH_GOAL, - ) - if not service: - raise (ProblemError(problem_detail=MISSING_SERVICE)) - return service - - @staticmethod - def get_info(patron_auth_service: IntegrationConfiguration): - info = dict( - id=patron_auth_service.id, - name=patron_auth_service.name, - protocol=patron_auth_service.protocol, - goal=patron_auth_service.goal, - settings=patron_auth_service.settings_dict, - ) - return info + return super().get_prior_test_results(protocol_class, integration) - def run_tests(self, integration: IntegrationConfiguration) -> Dict[str, Any]: + def run_self_tests(self, integration: IntegrationConfiguration) -> dict[str, Any]: # If the auth service doesn't have at least one library associated with it, # we can't run self tests. library_configuration = self.get_library_configuration(integration) diff --git a/api/admin/controller/patron_auth_services.py b/api/admin/controller/patron_auth_services.py index ff3ae352f..0e8dd595f 100644 --- a/api/admin/controller/patron_auth_services.py +++ b/api/admin/controller/patron_auth_services.py @@ -1,5 +1,3 @@ -from typing import List, Set, Type, Union - import flask from flask import Response @@ -32,14 +30,14 @@ def default_registry(self) -> IntegrationRegistry[AuthenticationProviderType]: return PatronAuthRegistry() @property - def basic_auth_protocols(self) -> Set[str]: + def basic_auth_protocols(self) -> set[str]: return { name for name, api in self.registry if issubclass(api, BasicAuthenticationProvider) } - def process_patron_auth_services(self) -> Union[Response, ProblemDetail]: + def process_patron_auth_services(self) -> Response | ProblemDetail: self.require_system_admin() if flask.request.method == "GET": @@ -59,33 +57,11 @@ def process_get(self) -> Response: mimetype="application/json", ) - def process_post(self) -> Union[Response, ProblemDetail]: + def process_post(self) -> Response | ProblemDetail: try: form_data = flask.request.form - protocol = form_data.get("protocol", None, str) - id = form_data.get("id", None, int) - name = form_data.get("name", None, str) - libraries_data = form_data.get("libraries", None, str) - - if protocol is None and id is None: - raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) - - if protocol is None or protocol not in self.registry: - self.log.warning( - f"Unknown patron authentication service protocol: {protocol}" - ) - raise ProblemError(UNKNOWN_PROTOCOL) - - if id is not None: - # Find an existing service to edit - auth_service = self.get_existing_service(id, name, protocol) - response_code = 200 - else: - # Create a new service - if name is None: - raise ProblemError(MISSING_PATRON_AUTH_NAME) - auth_service = self.create_new_service(name, protocol) - response_code = 201 + libraries_data = self.get_libraries_data(form_data) + auth_service, protocol, response_code = self.get_service(form_data) # Update settings impl_cls = self.registry[protocol] @@ -134,14 +110,14 @@ def library_integration_validation( def process_updated_libraries( self, - libraries: List[UpdatedLibrarySettingsTuple], - settings_class: Type[BaseSettings], + libraries: list[UpdatedLibrarySettingsTuple], + settings_class: type[BaseSettings], ) -> None: super().process_updated_libraries(libraries, settings_class) for integration, _ in libraries: self.library_integration_validation(integration) - def process_delete(self, service_id: int) -> Union[Response, ProblemDetail]: + def process_delete(self, service_id: int) -> Response | ProblemDetail: self.require_system_admin() try: return self.delete_service(service_id) diff --git a/api/admin/controller/quicksight.py b/api/admin/controller/quicksight.py index 509098d6f..b10a622f8 100644 --- a/api/admin/controller/quicksight.py +++ b/api/admin/controller/quicksight.py @@ -1,5 +1,4 @@ import logging -from typing import Dict import boto3 import flask @@ -10,7 +9,7 @@ QuicksightGenerateUrlRequest, QuicksightGenerateUrlResponse, ) -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.problem_details import NOT_FOUND_ON_REMOTE from core.config import Configuration from core.model.admin import Admin @@ -20,7 +19,7 @@ class QuickSightController(CirculationManagerController): - def generate_quicksight_url(self, dashboard_name) -> Dict: + def generate_quicksight_url(self, dashboard_name) -> dict: log = logging.getLogger(self.__class__.__name__) admin: Admin = getattr(flask.request, "admin") request_data = QuicksightGenerateUrlRequest(**flask.request.args) @@ -56,16 +55,16 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: if admin.is_librarian(library): allowed_libraries.append(library) - if request_data.library_ids: - allowed_library_ids = list( - set(request_data.library_ids).intersection( - {l.id for l in allowed_libraries} + if request_data.library_uuids: + allowed_library_uuids = list( + set(map(str, request_data.library_uuids)).intersection( + {l.uuid for l in allowed_libraries} ) ) else: - allowed_library_ids = [l.id for l in allowed_libraries] + allowed_library_uuids = [l.uuid for l in allowed_libraries] - if not allowed_library_ids: + if not allowed_library_uuids: raise ProblemError( NOT_FOUND_ON_REMOTE.detailed( "No library was found for this Admin that matched the request." @@ -73,13 +72,15 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: ) libraries = self._db.execute( - select(Library.name) - .where(Library.id.in_(allowed_library_ids)) + select(Library.short_name) + .where(Library.uuid.in_(allowed_library_uuids)) .order_by(Library.name) ).all() try: - delimiter = "|" + short_names = [x.short_name for x in libraries] + session_tags = self._build_session_tags_array(short_names) + client = boto3.client("quicksight", region_name=region) response = client.generate_embed_url_for_anonymous_user( AwsAccountId=aws_account_id, @@ -88,32 +89,51 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: ExperienceConfiguration={ "Dashboard": {"InitialDashboardId": dashboard_id} }, - SessionTags=[ - dict( - Key="library_name", - Value=delimiter.join([l.name for l in libraries]), - ) - ], + SessionTags=session_tags, ) except Exception as ex: - log.error(f"Error while fetching the Quisksight Embed url: {ex}") + log.error(f"Error while fetching the Quicksight Embed url: {ex}") raise ProblemError( INTERNAL_SERVER_ERROR.detailed( - "Error while fetching the Quisksight Embed url." + "Error while fetching the Quicksight Embed url." ) ) embed_url = response.get("EmbedUrl") if response.get("Status") // 100 != 2 or embed_url is None: - log.error(f"QuiskSight Embed url error response {response}") + log.error(f"Quicksight Embed url error response {response}") raise ProblemError( INTERNAL_SERVER_ERROR.detailed( - "Error while fetching the Quisksight Embed url." + "Error while fetching the Quicksight Embed url." ) ) return QuicksightGenerateUrlResponse(embed_url=embed_url).api_dict() + def _build_session_tags_array(self, short_names: list[str]) -> list[dict[str, str]]: + delimiter = "|" # specified by AWS's session tag limit + max_chars_per_tag = 256 + session_tags: list[str] = [] + session_tag = "" + for short_name in short_names: + if len(session_tag + delimiter + short_name) > max_chars_per_tag: + session_tags.append(session_tag) + session_tag = "" + if session_tag: + session_tag += delimiter + short_name + else: + session_tag = short_name + if session_tag: + session_tags.append(session_tag) + + return [ + { + "Key": f"library_short_name_{tag_index}", + "Value": tag_value, + } + for tag_index, tag_value in enumerate(session_tags) + ] + def get_dashboard_names(self): """Get the named dashboard IDs defined in the configuration""" config = Configuration.quicksight_authorized_arns() diff --git a/api/admin/controller/reset_password.py b/api/admin/controller/reset_password.py index 00a6391c9..effc57857 100644 --- a/api/admin/controller/reset_password.py +++ b/api/admin/controller/reset_password.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Optional, Union - import flask from flask import Request, Response, redirect, url_for from flask_babel import lazy_gettext as _ @@ -41,7 +39,7 @@ class ResetPasswordController(AdminController): ) ) - def forgot_password(self) -> Union[ProblemDetail, WerkzeugResponse]: + def forgot_password(self) -> ProblemDetail | WerkzeugResponse: """Shows forgot password page or starts off forgot password workflow""" if not self.admin_auth_providers: @@ -91,7 +89,7 @@ def forgot_password(self) -> Union[ProblemDetail, WerkzeugResponse]: "Sign in", ) - def _extract_admin_from_request(self, request: Request) -> Optional[Admin]: + def _extract_admin_from_request(self, request: Request) -> Admin | None: email = request.form.get("email") admin = get_one(self._db, Admin, email=email) @@ -114,7 +112,7 @@ def _generate_reset_password_url( def reset_password( self, reset_password_token: str, admin_id: int - ) -> Optional[WerkzeugResponse]: + ) -> WerkzeugResponse | None: """Shows reset password page or process the reset password request""" auth = self.admin_auth_provider(PasswordAdminAuthenticationProvider.NAME) if not auth: @@ -195,11 +193,11 @@ def reset_password( def _response_with_message_and_redirect_button( self, - message: Optional[str], + message: str | None, redirect_button_link: str, redirect_button_text: str, is_error: bool = False, - status_code: Optional[int] = 200, + status_code: int | None = 200, ) -> Response: style = error_style if is_error else body_style diff --git a/api/admin/controller/self_tests.py b/api/admin/controller/self_tests.py index 71e6f5836..239ff40ae 100644 --- a/api/admin/controller/self_tests.py +++ b/api/admin/controller/self_tests.py @@ -1,10 +1,31 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Any, Generic, TypeVar + import flask from flask import Response from flask_babel import lazy_gettext as _ +from sqlalchemy.orm import Session from api.admin.controller.settings import SettingsController -from api.admin.problem_details import FAILED_TO_RUN_SELF_TESTS, MISSING_IDENTIFIER -from core.util.problem_detail import ProblemDetail +from api.admin.problem_details import ( + FAILED_TO_RUN_SELF_TESTS, + MISSING_IDENTIFIER, + MISSING_SERVICE, + UNKNOWN_PROTOCOL, +) +from core.integration.base import HasIntegrationConfiguration +from core.integration.registry import IntegrationRegistry +from core.integration.settings import BaseSettings +from core.model import ( + IntegrationConfiguration, + IntegrationLibraryConfiguration, + get_one, + json_serializer, +) +from core.selftest import HasSelfTestsIntegrationConfiguration +from core.util.problem_detail import ProblemDetail, ProblemError class SelfTestsController(SettingsController): @@ -71,3 +92,105 @@ def self_tests_process_post(self, identifier): return FAILED_TO_RUN_SELF_TESTS.detailed( _("Failed to run self tests for this %(type)s.", type=self.type) ) + + +T = TypeVar("T", bound=HasIntegrationConfiguration[BaseSettings]) + + +class IntegrationSelfTestsController(Generic[T], ABC): + def __init__( + self, + db: Session, + registry: IntegrationRegistry[T], + ): + self.db = db + self.registry = registry + + @abstractmethod + def run_self_tests( + self, integration: IntegrationConfiguration + ) -> dict[str, Any] | None: + ... + + def get_protocol_class(self, integration: IntegrationConfiguration) -> type[T]: + if not integration.protocol or integration.protocol not in self.registry: + raise ProblemError(problem_detail=UNKNOWN_PROTOCOL) + return self.registry[integration.protocol] + + def look_up_by_id(self, identifier: int) -> IntegrationConfiguration: + service = get_one( + self.db, + IntegrationConfiguration, + id=identifier, + goal=self.registry.goal, + ) + if not service: + raise (ProblemError(problem_detail=MISSING_SERVICE)) + return service + + @staticmethod + def get_info(integration: IntegrationConfiguration) -> dict[str, Any]: + info = dict( + id=integration.id, + name=integration.name, + protocol=integration.protocol, + goal=integration.goal, + settings=integration.settings_dict, + ) + return info + + @staticmethod + def get_library_configuration( + integration: IntegrationConfiguration, + ) -> IntegrationLibraryConfiguration | None: + if not integration.library_configurations: + return None + return integration.library_configurations[0] + + def get_prior_test_results( + self, protocol_class: type[T], integration: IntegrationConfiguration + ) -> dict[str, Any]: + if issubclass(protocol_class, HasSelfTestsIntegrationConfiguration): + self_test_results = protocol_class.load_self_test_results(integration) # type: ignore[unreachable] + else: + self_test_results = dict( + exception=("Self tests are not supported for this integration."), + disabled=True, + ) + + return self_test_results + + def process_self_tests(self, identifier: int | None) -> Response | ProblemDetail: + if not identifier: + return MISSING_IDENTIFIER + try: + if flask.request.method == "GET": + return self.self_tests_process_get(identifier) + else: + return self.self_tests_process_post(identifier) + except ProblemError as e: + return e.problem_detail + + def self_tests_process_get(self, identifier: int) -> Response: + integration = self.look_up_by_id(identifier) + info = self.get_info(integration) + protocol_class = self.get_protocol_class(integration) + + self_test_results = self.get_prior_test_results(protocol_class, integration) + + info["self_test_results"] = ( + self_test_results if self_test_results else "No results yet" + ) + return Response( + json_serializer({"self_test_results": info}), + status=200, + mimetype="application/json", + ) + + def self_tests_process_post(self, identifier: int) -> Response: + integration = self.look_up_by_id(identifier) + results = self.run_self_tests(integration) + if results is not None: + return Response("Successfully ran new self tests", 200) + else: + raise ProblemError(problem_detail=FAILED_TO_RUN_SELF_TESTS) diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index 277605c80..c9e6ceaa9 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -2,7 +2,7 @@ import json import logging -from typing import TYPE_CHECKING, Any, Dict, Optional, Type, cast +from typing import TYPE_CHECKING, Any import flask from flask import Response @@ -23,8 +23,7 @@ UNKNOWN_PROTOCOL, ) from api.admin.validator import Validator -from api.controller import CirculationManagerController -from api.integration.registry.license_providers import LicenseProvidersRegistry +from api.controller.circulation_manager import CirculationManagerController from core.external_search import ExternalSearchIndex from core.integration.base import ( HasChildIntegrationConfiguration, @@ -43,8 +42,6 @@ get_one, get_one_or_create, ) -from core.opds_import import OPDSImporter, OPDSImportMonitor -from core.selftest import BaseHasSelfTests from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: @@ -58,7 +55,7 @@ class SettingsController(CirculationManagerController, AdminPermissionsControlle def _get_settings_class( self, registry: IntegrationRegistry, protocol_name: str, is_child=False - ) -> Type[BaseSettings] | ProblemDetail | None: + ) -> type[BaseSettings] | ProblemDetail | None: api_class = registry.get(protocol_name) if not api_class: return None @@ -237,14 +234,14 @@ def _get_menu_values(setting_key, form): return values def _extract_form_setting_value( - self, setting: Dict[str, Any], form_data: ImmutableMultiDict - ) -> Optional[Any]: + self, setting: dict[str, Any], form_data: ImmutableMultiDict + ) -> Any | None: """Extract the value of a setting from form data.""" key = setting.get("key") setting_type = setting.get("type") - value: Optional[Any] + value: Any | None if setting_type == "list" and not setting.get("options"): value = [item for item in form_data.getlist(key) if item] elif setting_type == "menu": @@ -298,7 +295,7 @@ def _set_configuration_library( self, configuration: IntegrationConfiguration, library_info: dict, - protocol_class: Type[HasLibraryIntegrationConfiguration], + protocol_class: type[HasLibraryIntegrationConfiguration], ) -> IntegrationLibraryConfiguration: """Set the library configuration for the integration configuration. The data will be validated first.""" @@ -307,14 +304,16 @@ def _set_configuration_library( library = get_one(self._db, Library, short_name=info_copy.pop("short_name")) if not library: raise RuntimeError("Could not find the configuration library") - config = None # Validate first validated_data = protocol_class.library_settings_class()(**info_copy) + # Attach the configuration - config = configuration.for_library(cast(int, library.id), create=True) - config.settings_dict = validated_data.dict() - return config + library_configuration = IntegrationLibraryConfiguration( + library=library, settings_dict=validated_data.dict() + ) + configuration.library_configurations.append(library_configuration) + return library_configuration def _set_integration_library(self, integration, library_info, protocol): library = get_one(self._db, Library, short_name=library_info.get("short_name")) @@ -407,24 +406,7 @@ def _get_prior_test_results(self, item, protocol_class=None, *extra_args): self_test_results = None try: - if self.type == "collection": - if not item.protocol or not len(item.protocol): - return None - - if not protocol_class: - registry = LicenseProvidersRegistry() - protocol_class = registry.get(item.protocol) - - if item.protocol == OPDSImportMonitor.PROTOCOL: - protocol_class = OPDSImportMonitor - extra_args = (OPDSImporter,) - - if issubclass(protocol_class, BaseHasSelfTests): - self_test_results = protocol_class.prior_test_results( - self._db, protocol_class, self._db, item, *extra_args - ) - - elif self.type == "search service": + if self.type == "search service": self_test_results = ExternalSearchIndex.prior_test_results( self._db, None, self._db, item ) @@ -432,20 +414,6 @@ def _get_prior_test_results(self, item, protocol_class=None, *extra_args): self_test_results = protocol_class.prior_test_results( self._db, *extra_args ) - elif self.type == "patron authentication service": - library = None - if len(item.libraries): - library = item.libraries[0] - self_test_results = protocol_class.prior_test_results( - self._db, None, library, item - ) - else: - self_test_results = dict( - exception=_( - "You must associate this service with at least one library before you can run self tests for it." - ), - disabled=True, - ) except Exception as e: # This is bad, but not so bad that we should short-circuit diff --git a/api/admin/controller/sign_in.py b/api/admin/controller/sign_in.py index e61aa2e00..57f2e71b4 100644 --- a/api/admin/controller/sign_in.py +++ b/api/admin/controller/sign_in.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import Tuple from urllib.parse import urlsplit import flask @@ -147,7 +146,7 @@ class SanitizedRedirections: """Functions to sanitize redirects.""" @staticmethod - def _check_redirect(target: str) -> Tuple[bool, str]: + def _check_redirect(target: str) -> tuple[bool, str]: """Check that a redirect is allowed. Because the URL redirect is assumed to be untrusted user input, we extract the URL path and forbid redirecting to external diff --git a/api/admin/controller/timestamps.py b/api/admin/controller/timestamps.py index ba848e5a2..43f0c8fe3 100644 --- a/api/admin/controller/timestamps.py +++ b/api/admin/controller/timestamps.py @@ -1,7 +1,7 @@ from __future__ import annotations from api.admin.controller.base import AdminPermissionsControllerMixin -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.model import Timestamp diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index bc79a1bfe..7363cdc7e 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -6,7 +6,7 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.problem_details import * -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.classifier import NO_NUMBER, NO_VALUE, SimplifiedGenreClassifier, genres from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.admin import AdminAnnotator @@ -339,7 +339,6 @@ def edit(self, identifier_type, identifier): # problem the user is trying to fix. policy = PresentationCalculationPolicy( classify=True, - regenerate_marc_record=True, update_search_index=True, calculate_quality=changed_rating, choose_summary=changed_summary, @@ -621,7 +620,6 @@ def edit_classifications(self, identifier_type, identifier): # Update presentation policy = PresentationCalculationPolicy( classify=True, - regenerate_marc_record=True, update_search_index=True, ) work.calculate_presentation(policy=policy) diff --git a/api/admin/dashboard_stats.py b/api/admin/dashboard_stats.py index 07df5a1c0..5bfe92674 100644 --- a/api/admin/dashboard_stats.py +++ b/api/admin/dashboard_stats.py @@ -1,13 +1,14 @@ from __future__ import annotations -import typing +import dataclasses +from collections.abc import Callable, Iterable from datetime import datetime from functools import partial -from typing import Callable, Iterable +from typing import TYPE_CHECKING from sqlalchemy.orm import Session -from sqlalchemy.sql import func -from sqlalchemy.sql.expression import and_, or_ +from sqlalchemy.sql import func, select +from sqlalchemy.sql.expression import and_, false, or_, true from api.admin.model.dashboard_statistics import ( CollectionInventory, @@ -16,7 +17,25 @@ PatronStatistics, StatisticsResponse, ) -from core.model import Admin, Collection, Hold, Library, LicensePool, Loan, Patron +from core.model import ( + Admin, + Collection, + Edition, + Hold, + Library, + LicensePool, + Loan, + Patron, +) + +if TYPE_CHECKING: + from sqlalchemy.sql.elements import ( + BinaryExpression, + BooleanClauseList, + ClauseElement, + ) + from sqlalchemy.sql.expression import ColumnElement + from sqlalchemy.sql.type_api import TypeEngine def generate_statistics(admin: Admin, db: Session) -> StatisticsResponse: @@ -24,17 +43,17 @@ def generate_statistics(admin: Admin, db: Session) -> StatisticsResponse: class Statistics: - METERED_LICENSE_FILTER = and_( # type: ignore[type-var] + METERED_LICENSE_FILTER = and_( LicensePool.licenses_owned > 0, - LicensePool.unlimited_access == False, - LicensePool.open_access == False, + LicensePool.unlimited_access == false(), + LicensePool.open_access == false(), ) - UNLIMITED_LICENSE_FILTER = and_( # type: ignore[type-var] - LicensePool.unlimited_access == True, - LicensePool.open_access == False, + UNLIMITED_LICENSE_FILTER = and_( + LicensePool.unlimited_access == true(), + LicensePool.open_access == false(), ) - OPEN_ACCESS_FILTER = LicensePool.open_access == True - AT_LEAST_ONE_LENDABLE_FILTER = or_( + OPEN_ACCESS_FILTER = LicensePool.open_access == true() + AT_LEAST_ONE_LOANABLE_FILTER = or_( UNLIMITED_LICENSE_FILTER, OPEN_ACCESS_FILTER, and_(METERED_LICENSE_FILTER, LicensePool.licenses_available > 0), @@ -43,6 +62,70 @@ class Statistics: def __init__(self, session: Session): self._db = session + def stats(self, admin: Admin) -> StatisticsResponse: + """Build and return a statistics response for admin user's authorized libraries.""" + + # Determine which libraries and collections are authorized for this user. + authorized_libraries = self._libraries_for_admin(admin) + authorized_collections_by_library = { + lib.short_name: set(lib.all_collections) for lib in authorized_libraries + } + all_authorized_collections: list[Collection] = [ + c for c in self._db.query(Collection) if admin.can_see_collection(c) + ] + + collection_inventories = sorted( + (self._create_collection_inventory(c) for c in all_authorized_collections), + key=lambda c: c.id, + ) + ( + collection_inventory_summary, + collection_inventory_summary_by_medium, + ) = _summarize_collection_inventories( + collection_inventories, all_authorized_collections + ) + + inventories_by_library = { + library_key: _summarize_collection_inventories( + collection_inventories, collections + ) + for library_key, collections in authorized_collections_by_library.items() + } + patron_stats_by_library = { + lib.short_name: self._gather_patron_stats(lib) + for lib in authorized_libraries + } + library_statistics = [ + LibraryStatistics( + key=lib.short_name, + name=lib.name or "(missing library name)", + patron_statistics=patron_stats_by_library[lib.short_name], + inventory_summary=inventories_by_library[lib.short_name][0], + inventory_by_medium=inventories_by_library[lib.short_name][1], + collection_ids=sorted( + [ + c.id + for c in authorized_collections_by_library[lib.short_name] + if c.id is not None + ] + ), + ) + for lib in authorized_libraries + ] + + # Accumulate patron summary statistics from authorized libraries. + patron_summary = sum( + patron_stats_by_library.values(), PatronStatistics.zeroed() + ) + + return StatisticsResponse( + collections=collection_inventories, + libraries=library_statistics, + inventory_summary=collection_inventory_summary, + inventory_by_medium=collection_inventory_summary_by_medium, + patron_summary=patron_summary, + ) + def _libraries_for_admin(self, admin: Admin) -> list[Library]: """Return a list of libraries to which this user has access.""" return [ @@ -51,55 +134,81 @@ def _libraries_for_admin(self, admin: Admin) -> list[Library]: if admin.is_librarian(library) ] - def _collection_count(self, collection_filter, query_filter) -> int: - return ( - self._db.query(LicensePool) - .filter(collection_filter) - .filter(query_filter) - .count() + def _collection_statistics_by_medium_query( + self, + collection_filter: BinaryExpression[TypeEngine[bool]], + query_filter: BooleanClauseList[ClauseElement], + /, + columns: list[ColumnElement[TypeEngine[int]]], + ) -> dict[str, dict[str, int]]: + stats_with_medium = ( + self._db.execute( + select( + Edition.medium, + *columns, + ) + .select_from(LicensePool) + .join(Edition, Edition.id == LicensePool.presentation_edition_id) + .where(collection_filter) + .where(query_filter) + .group_by(Edition.medium) + ) + .mappings() + .all() ) + return { + row["medium"]: {k: v for k, v in row.items() if k != "medium"} + for row in stats_with_medium + } - def _gather_collection_stats(self, collection: Collection) -> CollectionInventory: + def _run_collection_stats_queries( + self, collection: Collection + ) -> _CollectionStatisticsQueryResults: collection_filter = LicensePool.collection_id == collection.id - _count: Callable = partial(self._collection_count, collection_filter) - - metered_license_title_count = _count(self.METERED_LICENSE_FILTER) - unlimited_license_title_count = _count(self.UNLIMITED_LICENSE_FILTER) - open_access_title_count = _count(self.OPEN_ACCESS_FILTER) - # TODO: We no longer support self-hosted books, so this should always be 0. - # this value is still included in the response for backwards compatibility, - # but should be removed in a future release. - self_hosted_title_count = 0 - at_least_one_loanable_count = _count(self.AT_LEAST_ONE_LENDABLE_FILTER) - - licenses_owned_count, licenses_available_count = map( - lambda x: x if x is not None else 0, - self._db.query( - func.sum(LicensePool.licenses_owned), - func.sum(LicensePool.licenses_available), - ) - .filter(collection_filter) - .filter(self.METERED_LICENSE_FILTER) - .all()[0], + _query_stats_group: Callable[..., dict[str, dict[str, int]]] = partial( + self._collection_statistics_by_medium_query, collection_filter + ) + count = func.count().label("count") + return _CollectionStatisticsQueryResults( + metered_title_counts=_query_stats_group( + self.METERED_LICENSE_FILTER, columns=[count] + ), + unlimited_title_counts=_query_stats_group( + self.UNLIMITED_LICENSE_FILTER, columns=[count] + ), + open_access_title_counts=_query_stats_group( + self.OPEN_ACCESS_FILTER, columns=[count] + ), + loanable_title_counts=_query_stats_group( + self.AT_LEAST_ONE_LOANABLE_FILTER, columns=[count] + ), + metered_license_stats=_query_stats_group( + self.METERED_LICENSE_FILTER, + columns=[ + func.sum(LicensePool.licenses_owned).label("owned"), + func.sum(LicensePool.licenses_available).label("available"), + ], + ), ) + def _create_collection_inventory( + self, collection: Collection + ) -> CollectionInventory: + """Return a CollectionInventory for the given collection.""" + + statistics = self._run_collection_stats_queries(collection) + # Ensure that the key is a string, even if the medium is null. + inventory_by_medium = { + str(m): inv for m, inv in statistics.inventories_by_medium().items() + } + summary_inventory = sum( + inventory_by_medium.values(), InventoryStatistics.zeroed() + ) return CollectionInventory( id=collection.id, name=collection.name, - inventory=InventoryStatistics( - titles=metered_license_title_count - + unlimited_license_title_count - + open_access_title_count, - available_titles=at_least_one_loanable_count, - self_hosted_titles=self_hosted_title_count, - open_access_titles=open_access_title_count, - licensed_titles=metered_license_title_count - + unlimited_license_title_count, - unlimited_license_titles=unlimited_license_title_count, - metered_license_titles=metered_license_title_count, - metered_licenses_owned=licenses_owned_count, - metered_licenses_available=licenses_available_count, - ), + inventory=summary_inventory, + inventory_by_medium=inventory_by_medium, ) def _gather_patron_stats(self, library: Library) -> PatronStatistics: @@ -129,95 +238,94 @@ def _gather_patron_stats(self, library: Library) -> PatronStatistics: holds=hold_count, ) - def _collection_level_statistics( - self, - collections: typing.Collection[Collection], - ) -> tuple[list[CollectionInventory], InventoryStatistics]: - """Return individual and summary statistics for the given collections. - - The list of per-collection statistics is sorted by the collection `id`. - """ - collection_stats = [self._gather_collection_stats(c) for c in collections] - summary_stats = sum( - (c.inventory for c in collection_stats), InventoryStatistics.zeroed() - ) - return sorted(collection_stats, key=lambda c: c.id), summary_stats - - @staticmethod - def lookup_stats( - collection_inventories: Iterable[CollectionInventory], - collections: Iterable[Collection], - defaults: Iterable[InventoryStatistics] | None = None, - ) -> Iterable[InventoryStatistics]: - """Return the inventory dictionaries for the specified collections.""" - defaults = defaults if defaults is not None else [InventoryStatistics.zeroed()] - collection_ids = {c.id for c in collections} - return ( - ( - stats.inventory - for stats in collection_inventories - if stats.id in collection_ids - ) - if collection_ids - else defaults - ) - def stats(self, admin: Admin) -> StatisticsResponse: - """Build and return a statistics response for user's authorized libraries.""" +def _summarize_collection_inventories( + collection_inventories: Iterable[CollectionInventory], + collections: Iterable[Collection], +) -> tuple[InventoryStatistics, dict[str, InventoryStatistics]]: + """Summarize the inventories associated with the specified collections. - # Determine which libraries and collections are authorized for this user. - authorized_libraries = self._libraries_for_admin(admin) - authorized_collections_by_library = { - lib.short_name: set(lib.all_collections) for lib in authorized_libraries - } - all_authorized_collections: list[Collection] = [ - c for c in self._db.query(Collection) if admin.can_see_collection(c) - ] + The collections represented by the specified `collection_inventories` + must be a superset of the specified `collections`. - # Gather collection-level statistics for authorized collections. - ( - collection_inventories, - collection_inventory_summary, - ) = self._collection_level_statistics(all_authorized_collections) - - # Gather library-level statistics for the authorized libraries by - # summing up the values of each of libraries associated collections. - inventory_by_library = { - library_key: sum( - self.lookup_stats(collection_inventories, collections), - InventoryStatistics.zeroed(), + :param collections: `collections` for which to summarize inventory information. + :param collection_inventories: `CollectionInventory`s for the collections. + :return: Summary inventory and summary inventory by medium. + """ + included_collection_inventories = ( + inv for inv in collection_inventories if inv.id in {c.id for c in collections} + ) + + summary_inventory = InventoryStatistics.zeroed() + summary_inventory_by_medium: dict[str, InventoryStatistics] = {} + + for ci in included_collection_inventories: + summary_inventory += ci.inventory + inventory_by_medium = ci.inventory_by_medium or {} + for medium, inventory in inventory_by_medium.items(): + summary_inventory_by_medium[medium] = ( + summary_inventory_by_medium.get(medium, InventoryStatistics.zeroed()) + + inventory ) - for library_key, collections in authorized_collections_by_library.items() - } - patron_stats_by_library = { - lib.short_name: self._gather_patron_stats(lib) - for lib in authorized_libraries + return summary_inventory, summary_inventory_by_medium + + +@dataclasses.dataclass(frozen=True) +class _CollectionStatisticsQueryResults: + unlimited_title_counts: dict[str, dict[str, int]] + open_access_title_counts: dict[str, dict[str, int]] + loanable_title_counts: dict[str, dict[str, int]] + metered_title_counts: dict[str, dict[str, int]] + metered_license_stats: dict[str, dict[str, int]] + + def inventories_by_medium(self) -> dict[str, InventoryStatistics]: + """Return a mapping of all mediums present to their associated inventories.""" + return { + medium: self.inventory_for_medium(medium) + for medium in self.mediums_present() } - library_statistics = [ - LibraryStatistics( - key=lib.short_name, - name=lib.name or "(missing library name)", - patron_statistics=patron_stats_by_library[lib.short_name], - inventory_summary=inventory_by_library[lib.short_name], - collection_ids=sorted( - [ - c.id - for c in authorized_collections_by_library[lib.short_name] - if c.id is not None - ] - ), - ) - for lib in authorized_libraries - ] - # Accumulate patron summary statistics from authorized libraries. - patron_summary = sum( - patron_stats_by_library.values(), PatronStatistics.zeroed() + def mediums_present(self) -> set[str]: + """Returns a list of the mediums present in these collection statistics.""" + statistics = dataclasses.asdict(self) + return set().union(*(stat.keys() for stat in statistics.values())) + + def inventory_for_medium(self, medium: str) -> InventoryStatistics: + """Return statistics for the specified medium.""" + unlimited_titles = self._lookup_property( + "unlimited_title_counts", medium, "count" + ) + open_access_titles = self._lookup_property( + "open_access_title_counts", medium, "count" + ) + loanable_titles = self._lookup_property( + "loanable_title_counts", medium, "count" + ) + metered_titles = self._lookup_property("metered_title_counts", medium, "count") + metered_owned_licenses = self._lookup_property( + "metered_license_stats", medium, "owned" + ) + metered_available_licenses = self._lookup_property( + "metered_license_stats", medium, "available" ) - return StatisticsResponse( - collections=collection_inventories, - libraries=library_statistics, - inventory_summary=collection_inventory_summary, - patron_summary=patron_summary, + return InventoryStatistics( + titles=metered_titles + unlimited_titles + open_access_titles, + available_titles=loanable_titles, + open_access_titles=open_access_titles, + licensed_titles=metered_titles + unlimited_titles, + unlimited_license_titles=unlimited_titles, + metered_license_titles=metered_titles, + metered_licenses_owned=metered_owned_licenses, + metered_licenses_available=metered_available_licenses, ) + + def _lookup_property( + self, + group: str, + medium: str, + column_name: str, + ) -> int: + """Return value for a statistic, if present; else, return zero.""" + field: dict[str, dict[str, int]] = getattr(self, group, {}) + return field.get(medium, {}).get(column_name, 0) diff --git a/api/admin/form_data.py b/api/admin/form_data.py index 88e9dc4d2..690371129 100644 --- a/api/admin/form_data.py +++ b/api/admin/form_data.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar from werkzeug.datastructures import ImmutableMultiDict @@ -14,38 +14,31 @@ class ProcessFormData: - @classmethod - def _remove_prefix(cls, text: str, prefix: str) -> str: - # TODO: Remove this when we upgrade to Python 3.9 - if text.startswith(prefix): - return text[len(prefix) :] - return text - @classmethod def _process_list( cls, key: str, form_data: ImmutableMultiDict[str, str] - ) -> List[str]: + ) -> list[str]: return [v for v in form_data.getlist(key) if v != ""] @classmethod def _process_menu( cls, key: str, form_data: ImmutableMultiDict[str, str] - ) -> List[str]: + ) -> list[str]: return [ - cls._remove_prefix(v, f"{key}_") + v.removeprefix(f"{key}_") for v in form_data.keys() if v.startswith(key) and v != f"{key}_menu" ] @classmethod def get_settings_dict( - cls, settings_class: Type[BaseSettings], form_data: ImmutableMultiDict[str, str] - ) -> Dict[str, Any]: + cls, settings_class: type[BaseSettings], form_data: ImmutableMultiDict[str, str] + ) -> dict[str, Any]: """ Process the wacky format that form data is sent by the admin interface into a dictionary that we can use to update the settings. """ - return_data: Dict[str, Any] = {} + return_data: dict[str, Any] = {} for field in settings_class.__fields__.values(): if not isinstance(field.field_info, FormFieldInfo): continue @@ -63,6 +56,6 @@ def get_settings_dict( @classmethod def get_settings( - cls, settings_class: Type[T], form_data: ImmutableMultiDict[str, str] + cls, settings_class: type[T], form_data: ImmutableMultiDict[str, str] ) -> T: return settings_class(**cls.get_settings_dict(settings_class, form_data)) diff --git a/api/admin/model/dashboard_statistics.py b/api/admin/model/dashboard_statistics.py index 91b7a6f6f..c31aef22f 100644 --- a/api/admin/model/dashboard_statistics.py +++ b/api/admin/model/dashboard_statistics.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from typing import Any, List +from typing import Any from pydantic import Field, NonNegativeInt @@ -54,9 +54,6 @@ class InventoryStatistics(StatisticsBaseModel): available_titles: NonNegativeInt = Field( description="Number of books available to lend." ) - self_hosted_titles: NonNegativeInt = Field( - description="Number of books that are self-hosted." - ) open_access_titles: NonNegativeInt = Field( description="Number of books with an Open Access license." ) @@ -90,7 +87,10 @@ class LibraryStatistics(CustomBaseModel): inventory_summary: InventoryStatistics = Field( description="Summary of inventory statistics for this library." ) - collection_ids: List[int] = Field( + inventory_by_medium: dict[str, InventoryStatistics] = Field( + description="Per-medium inventory statistics for this library." + ) + collection_ids: list[int] = Field( description="List of associated collection identifiers." ) @@ -103,20 +103,26 @@ class CollectionInventory(CustomBaseModel): inventory: InventoryStatistics = Field( description="Inventory statistics for this collection." ) + inventory_by_medium: dict[str, InventoryStatistics] = Field( + description="Per-medium inventory statistics for this collection." + ) class StatisticsResponse(CustomBaseModel): """Statistics response for authorized libraries and collections.""" - collections: List[CollectionInventory] = Field( + collections: list[CollectionInventory] = Field( description="List of collection-level statistics (includes collections not associated with a library." ) - libraries: List[LibraryStatistics] = Field( + libraries: list[LibraryStatistics] = Field( description="List of library-level statistics." ) inventory_summary: InventoryStatistics = Field( description="Summary inventory across all included collections." ) + inventory_by_medium: dict[str, InventoryStatistics] = Field( + description="Per-medium summary inventory across all included collections." + ) patron_summary: PatronStatistics = Field( description="Summary patron statistics across all libraries." ) diff --git a/api/admin/model/quicksight.py b/api/admin/model/quicksight.py index 752f889e3..523499f76 100644 --- a/api/admin/model/quicksight.py +++ b/api/admin/model/quicksight.py @@ -1,4 +1,4 @@ -from typing import List +from uuid import UUID from pydantic import Field, validator @@ -6,12 +6,12 @@ class QuicksightGenerateUrlRequest(CustomBaseModel): - library_ids: List[int] = Field( + library_uuids: list[UUID] = Field( description="The list of libraries to include in the dataset, an empty list is equivalent to all the libraries the user is allowed to access." ) - @validator("library_ids", pre=True) - def parse_library_ids(cls, value): + @validator("library_uuids", pre=True) + def parse_library_uuids(cls, value) -> list[str]: return str_comma_list_validator(value) @@ -20,4 +20,4 @@ class QuicksightGenerateUrlResponse(CustomBaseModel): class QuicksightDashboardNamesResponse(CustomBaseModel): - names: List[str] = Field(description="The named quicksight dashboard ids") + names: list[str] = Field(description="The named quicksight dashboard ids") diff --git a/api/admin/password_admin_authentication_provider.py b/api/admin/password_admin_authentication_provider.py index b6cd88795..549f82af0 100644 --- a/api/admin/password_admin_authentication_provider.py +++ b/api/admin/password_admin_authentication_provider.py @@ -1,5 +1,3 @@ -from typing import Union - from flask import render_template_string, url_for from sqlalchemy.orm.session import Session @@ -110,7 +108,7 @@ def send_reset_password_email(self, admin: Admin, reset_password_url: str) -> No def validate_token_and_extract_admin( self, reset_password_token: str, admin_id: int, _db: Session - ) -> Union[Admin, ProblemDetail]: + ) -> Admin | ProblemDetail: secret_key = ConfigurationSetting.sitewide_secret(_db, Configuration.SECRET_KEY) return Admin.validate_reset_password_token_and_fetch_admin( diff --git a/api/admin/problem_details.py b/api/admin/problem_details.py index 5d4c881a4..cca2bd0ce 100644 --- a/api/admin/problem_details.py +++ b/api/admin/problem_details.py @@ -154,13 +154,6 @@ detail=_("The specified collection does not exist."), ) -MISSING_COLLECTION_NAME = pd( - "http://librarysimplified.org/terms/problem/missing-collection-name", - status_code=400, - title=_("Missing collection name."), - detail=_("You must identify the collection by its name."), -) - MISSING_ANALYTICS_NAME = pd( "http://librarysimplified.org/terms/problem/missing-analytics-name", status_code=400, @@ -168,15 +161,6 @@ detail=_("You must identify the analytics service by its name."), ) -COLLECTION_NAME_ALREADY_IN_USE = pd( - "http://librarysimplified.org/terms/problem/collection-name-already-in-use", - status_code=400, - title=_("Collection name already in use"), - detail=_( - "The collection name must be unique, and there's already a collection with the specified name." - ), -) - CANNOT_DELETE_COLLECTION_WITH_CHILDREN = pd( "http://librarysimplified.org/terms/problem/cannot-delete-collection-with-children", status_code=400, @@ -209,11 +193,11 @@ detail=_("A protocol can't be changed once it has been set."), ) -MISSING_PATRON_AUTH_NAME = pd( - "http://librarysimplified.org/terms/problem/missing-patron-auth-name", +MISSING_SERVICE_NAME = pd( + "http://librarysimplified.org/terms/problem/missing-service-name", status_code=400, - title=_("Missing patron auth service name."), - detail=_("You must identify the patron auth service by its name."), + title=_("Missing service name."), + detail=_("You must identify the service by its name."), ) PROTOCOL_DOES_NOT_SUPPORT_PARENTS = pd( diff --git a/api/adobe_vendor_id.py b/api/adobe_vendor_id.py index 9af5bd615..59d43eb46 100644 --- a/api/adobe_vendor_id.py +++ b/api/adobe_vendor_id.py @@ -5,7 +5,7 @@ import logging import sys import uuid -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any import jwt from jwt.algorithms import HMACAlgorithm @@ -99,9 +99,7 @@ def __init__( self.short_token_signing_key = self.short_token_signer.prepare_key(self.secret) @classmethod - def from_config( - cls, library: Library, _db: Optional[Session] = None - ) -> Optional[Self]: + def from_config(cls, library: Library, _db: Session | None = None) -> Self | None: """Initialize an AuthdataUtility from site configuration. The library must be successfully registered with a discovery @@ -173,7 +171,7 @@ def adobe_relevant_credentials(self, patron: Patron) -> Query[Credential]: ) ) - def encode(self, patron_identifier: Optional[str]) -> Tuple[str, bytes]: + def encode(self, patron_identifier: str | None) -> tuple[str, bytes]: """Generate an authdata JWT suitable for putting in an OPDS feed, where it can be picked up by a client and sent to the delegation authority to look up an Adobe ID. @@ -193,12 +191,12 @@ def encode(self, patron_identifier: Optional[str]) -> Tuple[str, bytes]: def _encode( self, iss: str, - sub: Optional[str] = None, - iat: Optional[datetime.datetime] = None, - exp: Optional[datetime.datetime] = None, + sub: str | None = None, + iat: datetime.datetime | None = None, + exp: datetime.datetime | None = None, ) -> bytes: """Helper method split out separately for use in tests.""" - payload: Dict[str, Any] = dict(iss=iss) # Issuer + payload: dict[str, Any] = dict(iss=iss) # Issuer if sub: payload["sub"] = sub # Subject if iat: @@ -213,7 +211,7 @@ def _encode( ) @classmethod - def adobe_base64_encode(cls, str_to_encode: Union[str, bytes]) -> str: + def adobe_base64_encode(cls, str_to_encode: str | bytes) -> str: """A modified base64 encoding that avoids triggering an Adobe bug. The bug seems to happen when the 'password' portion of a @@ -232,7 +230,7 @@ def adobe_base64_decode(cls, str_to_decode: str) -> bytes: encoded = str_to_decode.replace(":", "+").replace(";", "/").replace("@", "=") return base64.decodebytes(encoded.encode("utf-8")) - def decode(self, authdata: bytes) -> Tuple[str, str]: + def decode(self, authdata: bytes) -> tuple[str, str]: """Decode and verify an authdata JWT from one of the libraries managed by `secrets_by_library`. @@ -266,7 +264,7 @@ def decode(self, authdata: bytes) -> Tuple[str, str]: # in the list. raise exceptions[-1] - def _decode(self, authdata: bytes) -> Tuple[str, str]: + def _decode(self, authdata: bytes) -> tuple[str, str]: # First, decode the authdata without checking the signature. authdata_str = authdata.decode("utf-8") decoded = jwt.decode( @@ -295,7 +293,7 @@ def _decode(self, authdata: bytes) -> Tuple[str, str]: return library_uri, decoded["sub"] @classmethod - def _adobe_patron_identifier(cls, patron: Patron) -> Optional[str]: + def _adobe_patron_identifier(cls, patron: Patron) -> str | None: """Take patron object and return identifier for Adobe ID purposes""" _db = Session.object_session(patron) internal = DataSource.lookup(_db, DataSource.INTERNAL_PROCESSING) @@ -314,8 +312,8 @@ def refresh(credential: Credential) -> None: return patron_identifier.credential def short_client_token_for_patron( - self, patron_information: Union[Patron, str] - ) -> Tuple[str, str]: + self, patron_information: Patron | str + ) -> tuple[str, str]: """Generate short client token for patron, or for a patron's identifier for Adobe ID purposes""" @@ -333,8 +331,8 @@ def _now(self) -> datetime.datetime: return utc_now() def encode_short_client_token( - self, patron_identifier: Optional[str], expires: Optional[Dict[str, int]] = None - ) -> Tuple[str, str]: + self, patron_identifier: str | None, expires: dict[str, int] | None = None + ) -> tuple[str, str]: """Generate a short client token suitable for putting in an OPDS feed, where it can be picked up by a client and sent to the delegation authority to look up an Adobe ID. @@ -357,7 +355,7 @@ def _encode_short_client_token( self, library_short_name: str, patron_identifier: str, - expires: Union[int, float], + expires: int | float, ) -> str: base = library_short_name + "|" + str(expires) + "|" + patron_identifier signature = self.short_token_signer.sign( @@ -374,7 +372,7 @@ def _encode_short_client_token( ) return base + "|" + signature_encoded - def decode_short_client_token(self, token: str) -> Tuple[str, str]: + def decode_short_client_token(self, token: str) -> tuple[str, str]: """Attempt to interpret a 'username' and 'password' as a short client token identifying a patron of a specific library. @@ -392,7 +390,7 @@ def decode_short_client_token(self, token: str) -> Tuple[str, str]: def decode_two_part_short_client_token( self, username: str, password: str - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Decode a short client token that has already been split into two parts. """ @@ -401,7 +399,7 @@ def decode_two_part_short_client_token( def _decode_short_client_token( self, token: str, supposed_signature: bytes - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Make sure a client token is properly formatted, correctly signed, and not expired. """ diff --git a/api/annotations.py b/api/annotations.py index 86d01af32..263ca286a 100644 --- a/api/annotations.py +++ b/api/annotations.py @@ -149,9 +149,6 @@ def detail(cls, annotation, with_context=True): class AnnotationParser: @classmethod def parse(cls, _db, data, patron): - if patron.synchronize_annotations != True: - return PATRON_NOT_OPTED_IN_TO_ANNOTATION_SYNC - try: data = json.loads(data) if "id" in data and data["id"] is None: diff --git a/api/app.py b/api/app.py index 320604f42..9e3daf8a6 100644 --- a/api/app.py +++ b/api/app.py @@ -8,8 +8,8 @@ from flask_pydantic_spec import FlaskPydanticSpec from api.admin.controller import setup_admin_controllers +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.util.flask import PalaceFlask from api.util.profilers import ( PalaceCProfileProfiler, diff --git a/api/authentication/access_token.py b/api/authentication/access_token.py index 142e75f63..ecc29df13 100644 --- a/api/authentication/access_token.py +++ b/api/authentication/access_token.py @@ -4,7 +4,7 @@ import time from abc import ABC, abstractmethod from datetime import timedelta -from typing import TYPE_CHECKING, Type +from typing import TYPE_CHECKING from jwcrypto import jwe, jwk @@ -174,4 +174,4 @@ def is_access_token(cls, token: str | None) -> bool: return True -AccessTokenProvider: Type[PatronAccessTokenProvider] = PatronJWEAccessTokenProvider +AccessTokenProvider: type[PatronAccessTokenProvider] = PatronJWEAccessTokenProvider diff --git a/api/authentication/basic.py b/api/authentication/basic.py index 7e9871aa1..b4075d468 100644 --- a/api/authentication/basic.py +++ b/api/authentication/basic.py @@ -2,8 +2,10 @@ import re from abc import ABC, abstractmethod +from collections.abc import Generator from enum import Enum -from typing import Any, Dict, Generator, List, Optional, Pattern, TypeVar +from re import Pattern +from typing import Any, TypeVar from flask import url_for from pydantic import PositiveInt, validator @@ -68,7 +70,7 @@ class BasicAuthProviderSettings(AuthProviderSettings): # Configuration settings that are common to all Basic Auth-type # authentication techniques. - test_identifier: Optional[str] = FormField( + test_identifier: str | None = FormField( None, form=ConfigurationFormItem( label="Test identifier", @@ -77,7 +79,7 @@ class BasicAuthProviderSettings(AuthProviderSettings): required=True, ), ) - test_password: Optional[str] = FormField( + test_password: str | None = FormField( None, form=ConfigurationFormItem( label="Test password", @@ -115,7 +117,7 @@ class BasicAuthProviderSettings(AuthProviderSettings): ), ) # By default, there are no restrictions on passwords. - password_regular_expression: Optional[Pattern] = FormField( + password_regular_expression: Pattern | None = FormField( None, form=ConfigurationFormItem( label="Password Regular Expression", @@ -151,14 +153,14 @@ class BasicAuthProviderSettings(AuthProviderSettings): weight=10, ), ) - identifier_maximum_length: Optional[PositiveInt] = FormField( + identifier_maximum_length: PositiveInt | None = FormField( None, form=ConfigurationFormItem( label="Maximum identifier length", weight=10, ), ) - password_maximum_length: Optional[PositiveInt] = FormField( + password_maximum_length: PositiveInt | None = FormField( None, form=ConfigurationFormItem( label="Maximum password length", @@ -227,7 +229,7 @@ class BasicAuthProviderLibrarySettings(AuthProviderLibrarySettings): # Usually this is a string which is compared against the # patron's identifiers using the comparison method chosen in # identifier_restriction_type. - library_identifier_restriction_criteria: Optional[str] = FormField( + library_identifier_restriction_criteria: str | None = FormField( None, form=ConfigurationFormItem( label="Library Identifier Restriction", @@ -241,8 +243,8 @@ class BasicAuthProviderLibrarySettings(AuthProviderLibrarySettings): @validator("library_identifier_restriction_criteria") def validate_restriction_criteria( - cls, v: Optional[str], values: Dict[str, Any] - ) -> Optional[str]: + cls, v: str | None, values: dict[str, Any] + ) -> str | None: """Validate the library_identifier_restriction_criteria field.""" if not v: return v @@ -311,8 +313,8 @@ def __init__( ) def process_library_identifier_restriction_criteria( - self, criteria: Optional[str] - ) -> str | List[str] | re.Pattern | None: + self, criteria: str | None + ) -> str | list[str] | re.Pattern | None: """Process the library identifier restriction criteria.""" if not criteria: return None @@ -661,13 +663,13 @@ def _authentication_flow_document(self, _db: Session) -> dict[str, Any]: OPDS document. """ - login_inputs: Dict[str, Any] = dict(keyboard=self.identifier_keyboard.value) + login_inputs: dict[str, Any] = dict(keyboard=self.identifier_keyboard.value) if self.identifier_maximum_length: login_inputs["maximum_length"] = self.identifier_maximum_length if self.identifier_barcode_format != BarcodeFormats.NONE: login_inputs["barcode_format"] = self.identifier_barcode_format.value - password_inputs: Dict[str, Any] = dict(keyboard=self.password_keyboard.value) + password_inputs: dict[str, Any] = dict(keyboard=self.password_keyboard.value) if self.password_maximum_length: password_inputs["maximum_length"] = self.password_maximum_length @@ -713,7 +715,7 @@ def identifies_individuals(self): def _restriction_matches( cls, field: str | None, - restriction: str | List[str] | re.Pattern | None, + restriction: str | list[str] | re.Pattern | None, match_type: LibraryIdentifierRestriction, ) -> bool: """Does the given patron match the given restriction?""" diff --git a/api/authentication/basic_token.py b/api/authentication/basic_token.py index 5e0407231..9197f078f 100644 --- a/api/authentication/basic_token.py +++ b/api/authentication/basic_token.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Generator, Type, cast +from collections.abc import Generator +from typing import TYPE_CHECKING, cast from flask import url_for from sqlalchemy.orm import Session @@ -31,11 +32,11 @@ class BasicTokenAuthenticationProvider( """ @classmethod - def library_settings_class(cls) -> Type[LibrarySettingsType]: + def library_settings_class(cls) -> type[LibrarySettingsType]: raise NotImplementedError() @classmethod - def settings_class(cls) -> Type[SettingsType]: + def settings_class(cls) -> type[SettingsType]: raise NotImplementedError() FLOW_TYPE = "http://thepalaceproject.org/authtype/basic-token" diff --git a/api/authenticator.py b/api/authenticator.py index 56d749006..1b2faf7ee 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -4,7 +4,8 @@ import logging import sys from abc import ABC -from typing import Dict, Iterable, List, Optional, Tuple, Type, cast +from collections.abc import Iterable +from typing import cast import flask import jwt @@ -159,11 +160,11 @@ class LibraryAuthenticator(LoggerMixin): @classmethod def from_config( - cls: Type[Self], + cls: type[Self], _db: Session, library: Library, - analytics: Optional[Analytics] = None, - custom_catalog_source: Type[CustomPatronCatalog] = CustomPatronCatalog, + analytics: Analytics | None = None, + custom_catalog_source: type[CustomPatronCatalog] = CustomPatronCatalog, ) -> Self: """Initialize an Authenticator for the given Library based on its configured ExternalIntegrations. @@ -181,7 +182,7 @@ def from_config( # Find all of this library's ExternalIntegrations set up with # the goal of authenticating patrons. - integrations: List[ + integrations: list[ IntegrationLibraryConfiguration ] = IntegrationLibraryConfiguration.for_library_and_goal( _db, library, Goals.PATRON_AUTH_GOAL @@ -223,14 +224,13 @@ def __init__( self, _db: Session, library: Library, - basic_auth_provider: Optional[BasicAuthenticationProvider] = None, - saml_providers: Optional[List[BaseSAMLAuthenticationProvider]] = None, - ekirjasto_provider: Optional[EkirjastoAuthenticationAPI] = None, # Finland - bearer_token_signing_secret: Optional[str] = None, - authentication_document_annotator: Optional[CustomPatronCatalog] = None, - integration_registry: Optional[ - IntegrationRegistry[AuthenticationProvider] - ] = None, + basic_auth_provider: BasicAuthenticationProvider | None = None, + saml_providers: list[BaseSAMLAuthenticationProvider] | None = None, + ekirjasto_provider: EkirjastoAuthenticationAPI | None = None, # Finland + bearer_token_signing_secret: str | None = None, + authentication_document_annotator: CustomPatronCatalog | None = None, + integration_registry: None + | (IntegrationRegistry[AuthenticationProvider]) = None, ): """Initialize a LibraryAuthenticator from a list of AuthenticationProviders. @@ -265,8 +265,8 @@ def __init__( self.saml_providers_by_name = {} self.ekirjasto_provider = ekirjasto_provider # Finland self.bearer_token_signing_secret = bearer_token_signing_secret - self.initialization_exceptions: Dict[ - Tuple[int | None, int | None], Exception + self.initialization_exceptions: dict[ + tuple[int | None, int | None], Exception ] = {} self.basic_auth_provider: BasicAuthenticationProvider | None = None @@ -482,7 +482,7 @@ def authenticated_patron( ProblemDetail if an error occurs. """ provider: AuthenticationProvider | None = None - provider_token: Dict[str, str | None] | str | None = None + provider_token: dict[str, str | None] | str | None = None if self.basic_auth_provider and auth.type.lower() == "basic": # The patron wants to authenticate with the # BasicAuthenticationProvider. @@ -598,7 +598,7 @@ def create_bearer_token( payload, cast(str, self.bearer_token_signing_secret), algorithm="HS256" ) - def decode_bearer_token(self, token: str) -> Tuple[str, str]: + def decode_bearer_token(self, token: str) -> tuple[str, str]: """Extract auth provider name and access token from JSON web token.""" decoded = jwt.decode( token, cast(str, self.bearer_token_signing_secret), algorithms=["HS256"] @@ -621,7 +621,7 @@ def create_authentication_document(self) -> str: """Create the Authentication For OPDS document to be used when a request comes in with no authentication. """ - links: List[Dict[str, Optional[str]]] = [] + links: list[dict[str, str | None]] = [] if self.library is None: raise ValueError("No library specified!") @@ -799,8 +799,8 @@ def create_authentication_document(self) -> str: # Add feature flags to signal to clients what features they should # offer. - enabled: List[str] = [] - disabled: List[str] = [] + enabled: list[str] = [] + disabled: list[str] = [] if self.library and self.library.settings.allow_holds: bucket = enabled else: diff --git a/api/axis.py b/api/axis.py index 04bd50f3e..2b712a814 100644 --- a/api/axis.py +++ b/api/axis.py @@ -5,27 +5,12 @@ import html import json import re -import socket import ssl import urllib from abc import ABC, abstractmethod +from collections.abc import Callable, Generator, Mapping, Sequence from datetime import timedelta -from typing import ( - Any, - Callable, - Dict, - Generator, - Generic, - List, - Literal, - Mapping, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, -) +from typing import Any, Generic, Literal, Optional, TypeVar, Union, cast from urllib.parse import urlparse import certifi @@ -40,6 +25,7 @@ from api.circulation import ( APIAwareFulfillmentInfo, BaseCirculationAPI, + BaseCirculationApiSettings, BaseCirculationLoanSettings, CirculationInternalFormatsMixin, FulfillmentInfo, @@ -54,7 +40,6 @@ from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider, CoverageFailure from core.integration.settings import ( - BaseSettings, ConfigurationFormItem, ConfigurationFormItemType, FormField, @@ -109,14 +94,14 @@ class Axis360APIConstants: } -class Axis360Settings(BaseSettings): +class Axis360Settings(BaseCirculationApiSettings): username: str = FormField( form=ConfigurationFormItem(label=_("Username"), required=True) ) password: str = FormField( form=ConfigurationFormItem(label=_("Password"), required=True) ) - external_account_id: Optional[str] = FormField( + external_account_id: str = FormField( form=ConfigurationFormItem( label=_("Library ID"), required=True, @@ -129,7 +114,7 @@ class Axis360Settings(BaseSettings): required=True, ), ) - verify_certificate: Optional[bool] = FormField( + verify_certificate: bool | None = FormField( default=True, form=ConfigurationFormItem( label=_("Verify SSL Certificate"), @@ -198,11 +183,11 @@ class Axis360API( } @classmethod - def settings_class(cls) -> Type[Axis360Settings]: + def settings_class(cls) -> type[Axis360Settings]: return Axis360Settings @classmethod - def library_settings_class(cls) -> Type[Axis360LibrarySettings]: + def library_settings_class(cls) -> type[Axis360LibrarySettings]: return Axis360LibrarySettings @classmethod @@ -221,8 +206,8 @@ def __init__(self, _db: Session, collection: Collection) -> None: ) super().__init__(_db, collection) - self.library_id = collection.external_account_id or "" settings = self.settings + self.library_id = settings.external_account_id self.username = settings.username self.password = settings.password @@ -237,7 +222,7 @@ def __init__(self, _db: Session, collection: Collection) -> None: if not self.library_id or not self.username or not self.password: raise CannotLoadConfiguration("Axis 360 configuration is incomplete.") - self.token: Optional[str] = None + self.token: str | None = None self.verify_certificate: bool = ( settings.verify_certificate if settings.verify_certificate is not None @@ -245,11 +230,11 @@ def __init__(self, _db: Session, collection: Collection) -> None: ) @property - def source(self) -> Optional[DataSource]: + def source(self) -> DataSource | None: return DataSource.lookup(self._db, DataSource.AXIS_360) # type: ignore[no-any-return] @property - def authorization_headers(self) -> Dict[str, str]: + def authorization_headers(self) -> dict[str, str]: authorization = ":".join([self.username, self.password, self.library_id]) authorization_encoded = authorization.encode("utf_16_le") authorization_b64 = base64.standard_b64encode(authorization_encoded).decode( @@ -309,9 +294,9 @@ def request( self, url: str, method: str = "get", - extra_headers: Optional[Dict[str, str]] = None, - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + extra_headers: dict[str, str] | None = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, exception_on_401: bool = False, **kwargs: Any, ) -> RequestsResponse: @@ -358,9 +343,9 @@ def request( def availability( self, - patron_id: Optional[str] = None, - since: Optional[datetime.datetime] = None, - title_ids: Optional[List[str]] = None, + patron_id: str | None = None, + since: datetime.datetime | None = None, + title_ids: list[str] | None = None, ) -> RequestsResponse: url = self.base_url + self.availability_endpoint args = dict() @@ -407,9 +392,7 @@ def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: except etree.XMLSyntaxError as e: raise RemoteInitiatedServerError(response.content, self.label()) - def _checkin( - self, title_id: Optional[str], patron_id: Optional[str] - ) -> RequestsResponse: + def _checkin(self, title_id: str | None, patron_id: str | None) -> RequestsResponse: """Make a request to the EarlyCheckInTitle endpoint.""" if title_id is None: self.log.warning( @@ -452,7 +435,7 @@ def checkout( raise RemoteInitiatedServerError(response.content, self.label()) def _checkout( - self, title_id: Optional[str], patron_id: Optional[str], internal_format: str + self, title_id: str | None, patron_id: str | None, internal_format: str ) -> RequestsResponse: url = self.base_url + "checkout/v2" args = dict(titleId=title_id, patronId=patron_id, format=internal_format) @@ -498,7 +481,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - hold_notification_email: Optional[str], + hold_notification_email: str | None, ) -> HoldInfo: if not hold_notification_email: hold_notification_email = self.default_notification_email_address( @@ -546,10 +529,10 @@ def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> No def patron_activity( self, patron: Patron, - pin: Optional[str], - identifier: Optional[Identifier] = None, - internal_format: Optional[str] = None, - ) -> List[LoanInfo | HoldInfo]: + pin: str | None, + identifier: Identifier | None = None, + internal_format: str | None = None, + ) -> list[LoanInfo | HoldInfo]: if identifier: assert identifier.identifier is not None title_ids = [identifier.identifier] @@ -572,7 +555,7 @@ def update_availability(self, licensepool: LicensePool) -> None: self.update_licensepools_for_identifiers([licensepool.identifier]) def update_licensepools_for_identifiers( - self, identifiers: List[Identifier] + self, identifiers: list[Identifier] ) -> None: """Update availability and bibliographic information for a list of books. @@ -604,7 +587,7 @@ def update_book( bibliographic: Metadata, availability: CirculationData, analytics: Analytics = Provide[Services.analytics.analytics], - ) -> Tuple[Edition, bool, LicensePool, bool]: + ) -> tuple[Edition, bool, LicensePool, bool]: """Create or update a single book based on bibliographic and availability data from the Axis 360 API. @@ -635,8 +618,8 @@ def update_book( return edition, new_edition, license_pool, new_license_pool def _fetch_remote_availability( - self, identifiers: List[Identifier] - ) -> Generator[Tuple[Metadata, CirculationData], None, None]: + self, identifiers: list[Identifier] + ) -> Generator[tuple[Metadata, CirculationData], None, None]: """Retrieve availability information for the specified identifiers. :yield: A stream of (Metadata, CirculationData) 2-tuples. @@ -678,7 +661,7 @@ def _reap(self, identifier: Identifier) -> None: def recent_activity( self, since: datetime.datetime - ) -> Generator[Tuple[Metadata, CirculationData], None, None]: + ) -> Generator[tuple[Metadata, CirculationData], None, None]: """Find books that have had recent activity. :yield: A sequence of (Metadata, CirculationData) 2-tuples @@ -690,7 +673,7 @@ def recent_activity( @classmethod def create_identifier_strings( cls, identifiers: Sequence[Identifier | str] - ) -> List[str]: + ) -> list[str]: identifier_strings = [] for i in identifiers: if isinstance(i, Identifier): @@ -712,8 +695,8 @@ def _make_request( url: str, method: str, headers: Mapping[str, str], - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, **kwargs: Any, ) -> RequestsResponse: """Actually make an HTTP request.""" @@ -738,9 +721,9 @@ def __init__( self, _db: Session, collection: Collection, - api_class: Union[ - Axis360API, Callable[[Session, Collection], Axis360API] - ] = Axis360API, + api_class: ( + Axis360API | Callable[[Session, Collection], Axis360API] + ) = Axis360API, ): super().__init__(_db, collection) if isinstance(api_class, Axis360API): @@ -758,7 +741,7 @@ def __init__( def catch_up_from( self, start: datetime.datetime, - cutoff: Optional[datetime.datetime], + cutoff: datetime.datetime | None, progress: TimestampData, ) -> None: """Find Axis 360 books that changed recently. @@ -776,7 +759,7 @@ def catch_up_from( def process_book( self, bibliographic: Metadata, circulation: CirculationData - ) -> Tuple[Edition, LicensePool]: + ) -> tuple[Edition, LicensePool]: edition, new_edition, license_pool, new_license_pool = self.api.update_book( bibliographic, circulation ) @@ -809,9 +792,9 @@ class Axis360BibliographicCoverageProvider(BibliographicCoverageProvider): def __init__( self, collection: Collection, - api_class: Union[ - Axis360API, Callable[[Session, Collection], Axis360API] - ] = Axis360API, + api_class: ( + Axis360API | Callable[[Session, Collection], Axis360API] + ) = Axis360API, **kwargs: Any, ) -> None: """Constructor. @@ -833,8 +816,8 @@ def __init__( self.parser = BibliographicParser() def process_batch( - self, identifiers: List[Identifier] - ) -> List[CoverageFailure | Identifier]: + self, identifiers: list[Identifier] + ) -> list[CoverageFailure | Identifier]: identifier_strings = self.api.create_identifier_strings(identifiers) response = self.api.availability(title_ids=identifier_strings) seen_identifiers = set() @@ -886,9 +869,9 @@ def __init__( self, _db: Session, collection: Collection, - api_class: Union[ - Axis360API, Callable[[Session, Collection], Axis360API] - ] = Axis360API, + api_class: ( + Axis360API | Callable[[Session, Collection], Axis360API] + ) = Axis360API, ) -> None: super().__init__(_db, collection) if isinstance(api_class, Axis360API): @@ -898,7 +881,7 @@ def __init__( else: self.api = api_class(_db, collection) - def process_items(self, identifiers: List[Identifier]) -> None: + def process_items(self, identifiers: list[Identifier]) -> None: self.api.update_licensepools_for_identifiers(identifiers) @@ -912,7 +895,7 @@ class Axis360Parser(XMLProcessor[T], ABC): NAMESPACES = {"axis": "http://axis360api.baker-taylor.com/vendorAPI"} - def _pd(self, date: Optional[str]) -> Optional[datetime.datetime]: + def _pd(self, date: str | None) -> datetime.datetime | None: """Stupid function to parse a date.""" if date is None: return date @@ -926,7 +909,7 @@ def _xpath1_boolean( self, e: _Element, target: str, - ns: Optional[Dict[str, str]], + ns: dict[str, str] | None, default: bool = False, ) -> bool: text = self.text_of_optional_subtag(e, target, ns) @@ -938,13 +921,13 @@ def _xpath1_boolean( return False def _xpath1_date( - self, e: _Element, target: str, ns: Optional[Dict[str, str]] - ) -> Optional[datetime.datetime]: + self, e: _Element, target: str, ns: dict[str, str] | None + ) -> datetime.datetime | None: value = self.text_of_optional_subtag(e, target, ns) return self._pd(value) -class BibliographicParser(Axis360Parser[Tuple[Metadata, CirculationData]], LoggerMixin): +class BibliographicParser(Axis360Parser[tuple[Metadata, CirculationData]], LoggerMixin): DELIVERY_DATA_FOR_AXIS_FORMAT = { "Blio": None, # Legacy format, handled the same way as AxisNow "Acoustik": (None, DeliveryMechanism.FINDAWAY_DRM), # Audiobooks @@ -954,7 +937,7 @@ class BibliographicParser(Axis360Parser[Tuple[Metadata, CirculationData]], Logge } @classmethod - def parse_list(cls, l: str) -> List[str]: + def parse_list(cls, l: str) -> list[str]: """Turn strings like this into lists: FICTION / Thrillers; FICTION / Suspense; FICTION / General @@ -968,9 +951,9 @@ def xpath_expression(self) -> str: def extract_availability( self, - circulation_data: Optional[CirculationData], + circulation_data: CirculationData | None, element: _Element, - ns: Optional[Dict[str, str]], + ns: dict[str, str] | None, ) -> CirculationData: identifier = self.text_of_subtag(element, "axis:titleId", ns) primary_identifier = IdentifierData(Identifier.AXIS_360_ID, identifier) @@ -1012,7 +995,7 @@ def parse_contributor( cls, author: str, primary_author_found: bool = False, - force_role: Optional[str] = None, + force_role: str | None = None, ) -> ContributorData: """Parse an Axis 360 contributor string. @@ -1055,7 +1038,7 @@ def parse_contributor( return ContributorData(sort_name=author, roles=[role]) def extract_bibliographic( - self, element: _Element, ns: Optional[Dict[str, str]] + self, element: _Element, ns: dict[str, str] | None ) -> Metadata: """Turn bibliographic metadata into a Metadata and a CirculationData objects, and return them as a tuple.""" @@ -1261,8 +1244,8 @@ def extract_bibliographic( return metadata def process_one( - self, element: _Element, ns: Optional[Dict[str, str]] - ) -> Tuple[Metadata, CirculationData]: + self, element: _Element, ns: dict[str, str] | None + ) -> tuple[Metadata, CirculationData]: bibliographic = self.extract_bibliographic(element, ns) passed_availability = None @@ -1282,7 +1265,7 @@ class ResponseParser: SERVICE_NAME = "Axis 360" # Map Axis 360 error codes to our circulation exceptions. - code_to_exception: Mapping[int | Tuple[int, str], Type[IntegrationException]] = { + code_to_exception: Mapping[int | tuple[int, str], type[IntegrationException]] = { 315: InvalidInputException, # Bad password 316: InvalidInputException, # DRM account already exists 1000: PatronAuthorizationFailedException, @@ -1342,11 +1325,10 @@ def _raise_exception_on_error( cls, code: str | int, message: str, - custom_error_classes: Optional[ - Mapping[int | Tuple[int, str], Type[IntegrationException]] - ] = None, - ignore_error_codes: Optional[List[int]] = None, - ) -> Tuple[int, str]: + custom_error_classes: None + | (Mapping[int | tuple[int, str], type[IntegrationException]]) = None, + ignore_error_codes: list[int] | None = None, + ) -> tuple[int, str]: try: code = int(code) except ValueError: @@ -1387,12 +1369,11 @@ def __init__(self, collection: Collection): def raise_exception_on_error( self, e: _Element, - ns: Optional[Dict[str, str]], - custom_error_classes: Optional[ - Mapping[int | Tuple[int, str], Type[IntegrationException]] - ] = None, - ignore_error_codes: Optional[List[int]] = None, - ) -> Tuple[int, str]: + ns: dict[str, str] | None, + custom_error_classes: None + | (Mapping[int | tuple[int, str], type[IntegrationException]]) = None, + ignore_error_codes: list[int] | None = None, + ) -> tuple[int, str]: """Raise an error if the given lxml node represents an Axis 360 error condition. @@ -1423,7 +1404,7 @@ def xpath_expression(self) -> str: return "//axis:EarlyCheckinRestResult" def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] + self, e: _Element, namespaces: dict[str, str] | None ) -> Literal[True]: """Either raise an appropriate exception, or do nothing.""" self.raise_exception_on_error(e, namespaces, ignore_error_codes=[4058]) @@ -1435,9 +1416,7 @@ class CheckoutResponseParser(XMLResponseParser[LoanInfo]): def xpath_expression(self) -> str: return "//axis:checkoutResult" - def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] - ) -> LoanInfo: + def process_one(self, e: _Element, namespaces: dict[str, str] | None) -> LoanInfo: """Either turn the given document into a LoanInfo object, or raise an appropriate exception. """ @@ -1470,9 +1449,7 @@ class HoldResponseParser(XMLResponseParser[HoldInfo]): def xpath_expression(self) -> str: return "//axis:addtoholdResult" - def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] - ) -> HoldInfo: + def process_one(self, e: _Element, namespaces: dict[str, str] | None) -> HoldInfo: """Either turn the given document into a HoldInfo object, or raise an appropriate exception. """ @@ -1510,7 +1487,7 @@ def xpath_expression(self) -> str: return "//axis:removeholdResult" def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] + self, e: _Element, namespaces: dict[str, str] | None ) -> Literal[True]: # There's no data to gather here. Either there was an error # or we were successful. @@ -1519,7 +1496,7 @@ def process_one( class AvailabilityResponseParser(XMLResponseParser[Union[LoanInfo, HoldInfo]]): - def __init__(self, api: Axis360API, internal_format: Optional[str] = None) -> None: + def __init__(self, api: Axis360API, internal_format: str | None = None) -> None: """Constructor. :param api: An Axis360API instance, in case the parsing of an @@ -1543,8 +1520,8 @@ def xpath_expression(self) -> str: return "//axis:title" def process_one( - self, e: _Element, ns: Optional[Dict[str, str]] - ) -> Optional[Union[LoanInfo, HoldInfo]]: + self, e: _Element, ns: dict[str, str] | None + ) -> LoanInfo | HoldInfo | None: # Figure out which book we're talking about. axis_identifier = self.text_of_subtag(e, "axis:titleId", ns) availability = self._xpath1(e, "axis:availability", ns) @@ -1554,7 +1531,7 @@ def process_one( checked_out = self._xpath1_boolean(availability, "axis:isCheckedout", ns) on_hold = self._xpath1_boolean(availability, "axis:isInHoldQueue", ns) - info: Optional[Union[LoanInfo, HoldInfo]] = None + info: LoanInfo | HoldInfo | None = None if checked_out: start_date = self._xpath1_date(availability, "axis:checkoutStartDate", ns) end_date = self._xpath1_date(availability, "axis:checkoutEndDate", ns) @@ -1574,7 +1551,7 @@ def process_one( identifier=axis_identifier, ) - fulfillment: Optional[FulfillmentInfo] + fulfillment: FulfillmentInfo | None if download_url and self.internal_format != self.api.AXISNOW: # The patron wants a direct link to the book, which we can deliver # immediately, without making any more API requests. @@ -1653,7 +1630,7 @@ class JSONResponseParser(Generic[T], ResponseParser, ABC): """ @classmethod - def _required_key(cls, key: str, json_obj: Optional[Mapping[str, Any]]) -> Any: + def _required_key(cls, key: str, json_obj: Mapping[str, Any] | None) -> Any: """Raise an exception if the given key is not present in the given object. """ @@ -1669,7 +1646,7 @@ def _required_key(cls, key: str, json_obj: Optional[Mapping[str, Any]]) -> Any: return json_obj[key] @classmethod - def verify_status_code(cls, parsed: Optional[Mapping[str, Any]]) -> None: + def verify_status_code(cls, parsed: Mapping[str, Any] | None) -> None: """Assert that the incoming JSON document represents a successful response. """ @@ -1682,7 +1659,7 @@ def verify_status_code(cls, parsed: Optional[Mapping[str, Any]]) -> None: # an appropriate exception immediately. cls._raise_exception_on_error(code, message) - def parse(self, data: Union[Dict[str, Any], bytes, str], **kwargs: Any) -> T: + def parse(self, data: dict[str, Any] | bytes | str, **kwargs: Any) -> T: """Parse a JSON document.""" if isinstance(data, dict): parsed = data # already parsed @@ -1702,7 +1679,7 @@ def parse(self, data: Union[Dict[str, Any], bytes, str], **kwargs: Any) -> T: return self._parse(parsed, **kwargs) @abstractmethod - def _parse(self, parsed: Dict[str, Any], **kwargs: Any) -> T: + def _parse(self, parsed: dict[str, Any], **kwargs: Any) -> T: """Parse a document we know to represent success on the API level. Called by parse() once the high-level details have been worked out. @@ -1712,7 +1689,7 @@ def _parse(self, parsed: Dict[str, Any], **kwargs: Any) -> T: class Axis360FulfillmentInfoResponseParser( JSONResponseParser[ - Tuple[Union[FindawayManifest, "AxisNowManifest"], datetime.datetime] + tuple[Union[FindawayManifest, "AxisNowManifest"], datetime.datetime] ] ): """Parse JSON documents into Findaway audiobook manifests or AxisNow manifests.""" @@ -1727,10 +1704,10 @@ def __init__(self, api: Axis360API): def _parse( self, - parsed: Dict[str, Any], - license_pool: Optional[LicensePool] = None, + parsed: dict[str, Any], + license_pool: LicensePool | None = None, **kwargs: Any, - ) -> Tuple[Union[FindawayManifest, AxisNowManifest], datetime.datetime]: + ) -> tuple[FindawayManifest | AxisNowManifest, datetime.datetime]: """Extract all useful information from a parsed FulfillmentInfo response. @@ -1749,7 +1726,7 @@ def _parse( expiration_date = self._required_key("ExpirationDate", parsed) expiration_date = self.parse_date(expiration_date) - manifest: Union[FindawayManifest, AxisNowManifest] + manifest: FindawayManifest | AxisNowManifest if "FNDTransactionID" in parsed: manifest = self.parse_findaway(parsed, license_pool) else: @@ -1773,7 +1750,7 @@ def parse_date(self, date: str) -> datetime.datetime: return date_parsed def parse_findaway( - self, parsed: Dict[str, Any], license_pool: LicensePool + self, parsed: dict[str, Any], license_pool: LicensePool ) -> FindawayManifest: k = self._required_key fulfillmentId = k("FNDContentID", parsed) @@ -1796,7 +1773,7 @@ def parse_findaway( spine_items=spine_items, ) - def parse_axisnow(self, parsed: Dict[str, Any]) -> AxisNowManifest: + def parse_axisnow(self, parsed: dict[str, Any]) -> AxisNowManifest: k = self._required_key isbn = k("ISBN", parsed) book_vault_uuid = k("BookVaultUUID", parsed) @@ -1804,14 +1781,14 @@ def parse_axisnow(self, parsed: Dict[str, Any]) -> AxisNowManifest: class AudiobookMetadataParser( - JSONResponseParser[Tuple[Optional[str], List[SpineItem]]] + JSONResponseParser[tuple[Optional[str], list[SpineItem]]] ): """Parse the results of Axis 360's audiobook metadata API call.""" @classmethod def _parse( - cls, parsed: Dict[str, Any], **kwargs: Any - ) -> Tuple[Optional[str], List[SpineItem]]: + cls, parsed: dict[str, Any], **kwargs: Any + ) -> tuple[str | None, list[SpineItem]]: spine_items = [] accountId = parsed.get("fndaccountid", None) for item in parsed.get("readingOrder", []): @@ -1821,7 +1798,7 @@ def _parse( return accountId, spine_items @classmethod - def _extract_spine_item(cls, part: Dict[str, str | int | float]) -> SpineItem: + def _extract_spine_item(cls, part: dict[str, str | int | float]) -> SpineItem: """Convert an element of the 'readingOrder' list to a SpineItem.""" title = part.get("title") # Incoming duration is measured in seconds. @@ -1925,7 +1902,7 @@ def problem_detail_document(self, error_details: str) -> ProblemDetail: ) @property - def as_response(self) -> Union[Response, ProblemDetail]: + def as_response(self) -> Response | ProblemDetail: service_name = urlparse(str(self.content_link)).netloc try: if self.verify: @@ -1957,7 +1934,7 @@ def as_response(self) -> Union[Response, ProblemDetail]: e.code, service_name ) ) - except socket.timeout: + except TimeoutError: return self.problem_detail_document( f"Error connecting to {service_name}. Timeout occurred." ) diff --git a/api/bibliotheca.py b/api/bibliotheca.py index fc11c706b..b30a11087 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -10,9 +10,10 @@ import time import urllib.parse from abc import ABC +from collections.abc import Generator from datetime import datetime, timedelta from io import BytesIO -from typing import Dict, Generator, List, Tuple, Type, TypeVar, Union +from typing import Optional, TypeVar import dateutil.parser from dependency_injector.wiring import Provide, inject @@ -22,6 +23,7 @@ from api.circulation import ( BaseCirculationAPI, + BaseCirculationApiSettings, BaseCirculationLoanSettings, FulfillmentInfo, HoldInfo, @@ -35,7 +37,6 @@ from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider from core.integration.settings import ( - BaseSettings, ConfigurationFormItem, ConfigurationFormItemType, FormField, @@ -82,7 +83,7 @@ from core.util.xmlparser import XMLParser, XMLProcessor -class BibliothecaSettings(BaseSettings): +class BibliothecaSettings(BaseCirculationApiSettings): username: str = FormField( form=ConfigurationFormItem( label=_("Account ID"), @@ -95,7 +96,7 @@ class BibliothecaSettings(BaseSettings): required=True, ) ) - external_account_id: Optional[str] = FormField( + external_account_id: str = FormField( form=ConfigurationFormItem( label=_("Library ID"), required=True, @@ -104,7 +105,7 @@ class BibliothecaSettings(BaseSettings): class BibliothecaLibrarySettings(BaseCirculationLoanSettings): - dont_display_reserves: Optional[str] = FormField( + dont_display_reserves: str | None = FormField( form=ConfigurationFormItem( label=_("Show/Hide Titles with No Available Loans"), required=False, @@ -171,7 +172,7 @@ def __init__(self, _db, collection): self.version = self.DEFAULT_VERSION self.account_id = settings.username self.account_key = settings.password - self.library_id = collection.external_account_id + self.library_id = settings.external_account_id self.base_url = self.DEFAULT_BASE_URL if not self.account_id or not self.account_key or not self.library_id: @@ -339,9 +340,6 @@ def _simple_http_get(self, url, headers, *args, **kwargs): """This will be overridden in MockBibliothecaAPI.""" return Representation.simple_http_get(url, headers, *args, **kwargs) - def external_integration(self, _db): - return self.collection.external_integration - def _run_self_tests(self, _db): def _count_events(): now = utc_now() @@ -361,7 +359,7 @@ def _count_events(): def _count_activity(): result = self.patron_activity(patron, pin) - return "Found %d loans/holds" % len(result) + return "Found %d loans/holds" % len(list(result)) yield self.run_test( "Checking activity for test patron for library %s" % library.name, @@ -669,9 +667,9 @@ def xpath_expression(self) -> str: @classmethod def contributors_from_string( - cls, string: Optional[str], role: str = Contributor.AUTHOR_ROLE - ) -> List[ContributorData]: - contributors: List[ContributorData] = [] + cls, string: str | None, role: str = Contributor.AUTHOR_ROLE + ) -> list[ContributorData]: + contributors: list[ContributorData] = [] if not string: return contributors @@ -688,8 +686,8 @@ def contributors_from_string( return contributors @classmethod - def parse_genre_string(self, s: Optional[str]) -> List[SubjectData]: - genres: List[SubjectData] = [] + def parse_genre_string(self, s: str | None) -> list[SubjectData]: + genres: list[SubjectData] = [] if not s: return genres for i in s.split(","): @@ -711,9 +709,7 @@ def parse_genre_string(self, s: Optional[str]) -> List[SubjectData]: ) return genres - def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Metadata: + def process_one(self, tag: _Element, namespaces: dict[str, str] | None) -> Metadata: """Turn an tag into a Metadata and an encompassed CirculationData objects, and return the Metadata.""" @@ -959,7 +955,7 @@ def process_first(self, string: str | bytes) -> Exception: return return_val def process_one( - self, error_tag: _Element, namespaces: Optional[Dict[str, str]] + self, error_tag: _Element, namespaces: dict[str, str] | None ) -> Exception: message = self.text_of_optional_subtag(error_tag, "Message") if not message: @@ -1034,9 +1030,7 @@ class PatronCirculationParser(XMLParser): def __init__(self, collection: Collection) -> None: self.collection = collection - def process_all( - self, string: bytes | str - ) -> itertools.chain[Union[LoanInfo, HoldInfo]]: + def process_all(self, string: bytes | str) -> itertools.chain[LoanInfo | HoldInfo]: xml = self._load_xml(string) loans = self._process_all( xml, "//Checkouts/Item", namespaces={}, handler=self.process_one_loan @@ -1050,26 +1044,26 @@ def process_all( return itertools.chain(loans, holds, reserves) def process_one_loan( - self, tag: _Element, namespaces: Dict[str, str] - ) -> Optional[LoanInfo]: + self, tag: _Element, namespaces: dict[str, str] + ) -> LoanInfo | None: return self.process_one(tag, namespaces, LoanInfo) def process_one_hold( - self, tag: _Element, namespaces: Dict[str, str] - ) -> Optional[HoldInfo]: + self, tag: _Element, namespaces: dict[str, str] + ) -> HoldInfo | None: return self.process_one(tag, namespaces, HoldInfo) def process_one_reserve( - self, tag: _Element, namespaces: Dict[str, str] - ) -> Optional[HoldInfo]: + self, tag: _Element, namespaces: dict[str, str] + ) -> HoldInfo | None: hold_info = self.process_one(tag, namespaces, HoldInfo) if hold_info is not None: hold_info.hold_position = 0 return hold_info def process_one( - self, tag: _Element, namespaces: Dict[str, str], source_class: Type[T] - ) -> Optional[T]: + self, tag: _Element, namespaces: dict[str, str], source_class: type[T] + ) -> T | None: if not tag.xpath("ItemId"): # This happens for events associated with books # no longer in our collection. @@ -1102,16 +1096,16 @@ def datevalue(key): class DateResponseParser(BibliothecaParser[Optional[datetime]], ABC): """Extract a date from a response.""" - RESULT_TAG_NAME: Optional[str] = None - DATE_TAG_NAME: Optional[str] = None + RESULT_TAG_NAME: str | None = None + DATE_TAG_NAME: str | None = None @property def xpath_expression(self) -> str: return f"/{self.RESULT_TAG_NAME}/{self.DATE_TAG_NAME}" def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Optional[datetime]: + self, tag: _Element, namespaces: dict[str, str] | None + ) -> datetime | None: due_date = tag.text if not due_date: return None @@ -1161,7 +1155,7 @@ def xpath_expression(self) -> str: def process_all( self, string: bytes | str, no_events_error=False ) -> Generator[ - Tuple[str, str, Optional[str], datetime, Optional[datetime], str], None, None + tuple[str, str, str | None, datetime, datetime | None, str], None, None ]: has_events = False for i in super().process_all(string): @@ -1184,8 +1178,8 @@ def process_all( ) def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Tuple[str, str, Optional[str], datetime, Optional[datetime], str]: + self, tag: _Element, namespaces: dict[str, str] | None + ) -> tuple[str, str, str | None, datetime, datetime | None, str]: isbn = self.text_of_subtag(tag, "ISBN") bibliotheca_id = self.text_of_subtag(tag, "ItemId") patron_id = self.text_of_optional_subtag(tag, "PatronId") diff --git a/api/circulation.py b/api/circulation.py index 692bdeeb8..51a0168ff 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -5,9 +5,10 @@ import sys import time from abc import ABC, abstractmethod +from collections.abc import Iterable from threading import Thread from types import TracebackType -from typing import Any, Dict, Iterable, List, Literal, Tuple, Type, TypeVar +from typing import Any, Literal, TypeVar import flask from flask import Response @@ -54,9 +55,9 @@ class CirculationInfo: def __init__( self, collection: Collection | int | None, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, ) -> None: """A loan, hold, or whatever. @@ -74,7 +75,7 @@ def __init__( :param identifier: The string identifying the LicensePool. """ - self.collection_id: Optional[int] + self.collection_id: int | None if isinstance(collection, int): self.collection_id = collection elif isinstance(collection, Collection) and collection.id is not None: @@ -86,7 +87,7 @@ def __init__( self.identifier_type = identifier_type self.identifier = identifier - def collection(self, _db: Session) -> Optional[Collection]: + def collection(self, _db: Session) -> Collection | None: """Find the Collection to which this object belongs.""" if self.collection_id is None: return None @@ -104,7 +105,7 @@ def license_pool(self, _db: Session) -> LicensePool: ) return pool - def fd(self, d: Optional[datetime.datetime]) -> Optional[str]: + def fd(self, d: datetime.datetime | None) -> str | None: # Stupid method to format a date if not d: return None @@ -129,10 +130,10 @@ class DeliveryMechanismInfo(CirculationInfo): def __init__( self, - content_type: Optional[str], - drm_scheme: Optional[str], - rights_uri: Optional[str] = RightsStatus.IN_COPYRIGHT, - resource: Optional[Resource] = None, + content_type: str | None, + drm_scheme: str | None, + rights_uri: str | None = RightsStatus.IN_COPYRIGHT, + resource: Resource | None = None, ) -> None: """Constructor. @@ -153,7 +154,7 @@ def __init__( def apply( self, loan: Loan, autocommit: bool = True - ) -> Optional[LicensePoolDeliveryMechanism]: + ) -> LicensePoolDeliveryMechanism | None: """Set an appropriate LicensePoolDeliveryMechanism on the given `Loan`, creating a DeliveryMechanism if necessary. @@ -211,13 +212,13 @@ class FulfillmentInfo(CirculationInfo): def __init__( self, collection: Collection | int | None, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], - content_link: Optional[str], - content_type: Optional[str], - content: Optional[str], - content_expires: Optional[datetime.datetime], + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, + content_link: str | None, + content_type: str | None, + content: str | None, + content_expires: datetime.datetime | None, content_link_redirect: bool = False, ) -> None: """Constructor. @@ -281,35 +282,35 @@ def as_response(self) -> Response | ProblemDetail | None: return None @property - def content_link(self) -> Optional[str]: + def content_link(self) -> str | None: return self._content_link @content_link.setter - def content_link(self, value: Optional[str]) -> None: + def content_link(self, value: str | None) -> None: self._content_link = value @property - def content_type(self) -> Optional[str]: + def content_type(self) -> str | None: return self._content_type @content_type.setter - def content_type(self, value: Optional[str]) -> None: + def content_type(self, value: str | None) -> None: self._content_type = value @property - def content(self) -> Optional[str]: + def content(self) -> str | None: return self._content @content.setter - def content(self, value: Optional[str]) -> None: + def content(self, value: str | None) -> None: self._content = value @property - def content_expires(self) -> Optional[datetime.datetime]: + def content_expires(self) -> datetime.datetime | None: return self._content_expires @content_expires.setter - def content_expires(self, value: Optional[datetime.datetime]) -> None: + def content_expires(self, value: datetime.datetime | None) -> None: self._content_expires = value @@ -326,10 +327,10 @@ class APIAwareFulfillmentInfo(FulfillmentInfo, ABC): def __init__( self, - api: BaseCirculationAPI[BaseSettings, BaseSettings], - data_source_name: Optional[str], - identifier_type: Optional[str], - identifier: Optional[str], + api: CirculationApiType, + data_source_name: str | None, + identifier_type: str | None, + identifier: str | None, key: Any, ) -> None: """Constructor. @@ -376,39 +377,39 @@ def do_fetch(self) -> None: ... @property - def content_link(self) -> Optional[str]: + def content_link(self) -> str | None: self.fetch() return self._content_link @content_link.setter - def content_link(self, value: Optional[str]) -> None: + def content_link(self, value: str | None) -> None: raise NotImplementedError() @property - def content_type(self) -> Optional[str]: + def content_type(self) -> str | None: self.fetch() return self._content_type @content_type.setter - def content_type(self, value: Optional[str]) -> None: + def content_type(self, value: str | None) -> None: raise NotImplementedError() @property - def content(self) -> Optional[str]: + def content(self) -> str | None: self.fetch() return self._content @content.setter - def content(self, value: Optional[str]) -> None: + def content(self, value: str | None) -> None: raise NotImplementedError() @property - def content_expires(self) -> Optional[datetime.datetime]: + def content_expires(self) -> datetime.datetime | None: self.fetch() return self._content_expires @content_expires.setter - def content_expires(self, value: Optional[datetime.datetime]) -> None: + def content_expires(self, value: datetime.datetime | None) -> None: raise NotImplementedError() @@ -418,14 +419,14 @@ class LoanInfo(CirculationInfo): def __init__( self, collection: Collection | int, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], - start_date: Optional[datetime.datetime], - end_date: Optional[datetime.datetime], - fulfillment_info: Optional[FulfillmentInfo] = None, - external_identifier: Optional[str] = None, - locked_to: Optional[DeliveryMechanismInfo] = None, + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, + start_date: datetime.datetime | None, + end_date: datetime.datetime | None, + fulfillment_info: FulfillmentInfo | None = None, + external_identifier: str | None = None, + locked_to: DeliveryMechanismInfo | None = None, ): """Constructor. @@ -473,13 +474,13 @@ class HoldInfo(CirculationInfo): def __init__( self, collection: Collection | int, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], - start_date: Optional[datetime.datetime], - end_date: Optional[datetime.datetime], - hold_position: Optional[int], - external_identifier: Optional[str] = None, + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, + start_date: datetime.datetime | None, + end_date: datetime.datetime | None, + hold_position: int | None, + external_identifier: str | None = None, ): super().__init__(collection, data_source_name, identifier_type, identifier) self.start_date = start_date @@ -500,7 +501,7 @@ def __repr__(self) -> str: class BaseCirculationEbookLoanSettings(BaseSettings): """A mixin for settings that apply to ebook loans.""" - ebook_loan_duration: Optional[PositiveInt] = FormField( + ebook_loan_duration: PositiveInt | None = FormField( default=Collection.STANDARD_DEFAULT_LOAN_PERIOD, form=ConfigurationFormItem( label=_("Ebook Loan Duration (in Days)"), @@ -515,7 +516,7 @@ class BaseCirculationEbookLoanSettings(BaseSettings): class BaseCirculationLoanSettings(BaseSettings): """A mixin for settings that apply to loans.""" - default_loan_duration: Optional[PositiveInt] = FormField( + default_loan_duration: PositiveInt | None = FormField( default=Collection.STANDARD_DEFAULT_LOAN_PERIOD, form=ConfigurationFormItem( label=_("Default Loan Period (in Days)"), @@ -537,9 +538,7 @@ class CirculationInternalFormatsMixin: # For instance, the combination ("application/epub+zip", # "vnd.adobe/adept+xml") is called "ePub" in Axis 360 and 3M, but # is called "ebook-epub-adobe" in Overdrive. - delivery_mechanism_to_internal_format: Dict[ - Tuple[Optional[str], Optional[str]], str - ] = {} + delivery_mechanism_to_internal_format: dict[tuple[str | None, str | None], str] = {} def internal_format(self, delivery_mechanism: LicensePoolDeliveryMechanism) -> str: """Look up the internal format for this delivery mechanism or @@ -560,7 +559,21 @@ def internal_format(self, delivery_mechanism: LicensePoolDeliveryMechanism) -> s return internal_format -SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) +class BaseCirculationApiSettings(BaseSettings): + _additional_form_fields = { + "export_marc_records": ConfigurationFormItem( + label="Generate MARC Records", + type=ConfigurationFormItemType.SELECT, + description="Generate MARC Records for this collection. This setting only applies if a MARC Exporter is configured.", + options={ + "false": "Do not generate MARC records", + "true": "Generate MARC records", + }, + ) + } + + +SettingsType = TypeVar("SettingsType", bound=BaseCirculationApiSettings, covariant=True) LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) @@ -583,7 +596,7 @@ class BaseCirculationAPI( # wait til the point of fulfillment to set a delivery mechanism # (Overdrive), set this to FULFILL_STEP. If there is no choice of # delivery mechanisms (3M), set this to None. - SET_DELIVERY_MECHANISM_AT: Optional[str] = FULFILL_STEP + SET_DELIVERY_MECHANISM_AT: str | None = FULFILL_STEP def __init__(self, _db: Session, collection: Collection): self._db = _db @@ -626,10 +639,7 @@ def settings(self) -> SettingsType: return self.settings_load(self.integration_configuration()) def library_settings(self, library: Library | int) -> LibrarySettingsType | None: - library_id = library.id if isinstance(library, Library) else library - if library_id is None: - return None - libconfig = self.integration_configuration().for_library(library_id=library_id) + libconfig = self.integration_configuration().for_library(library) if libconfig is None: return None config = self.library_settings_load(libconfig) @@ -666,7 +676,7 @@ def checkout( def can_fulfill_without_loan( self, - patron: Optional[Patron], + patron: Patron | None, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism, ) -> bool: @@ -690,7 +700,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: """Place a book on hold. @@ -714,6 +724,9 @@ def update_availability(self, licensepool: LicensePool) -> None: ... +CirculationApiType = BaseCirculationAPI[BaseCirculationApiSettings, BaseSettings] + + class PatronActivityCirculationAPI( BaseCirculationAPI[SettingsType, LibrarySettingsType], ABC ): @@ -740,10 +753,8 @@ def __init__( self, db: Session, library: Library, - analytics: Optional[Analytics] = None, - registry: Optional[ - IntegrationRegistry[BaseCirculationAPI[BaseSettings, BaseSettings]] - ] = None, + analytics: Analytics | None = None, + registry: IntegrationRegistry[CirculationApiType] | None = None, ): """Constructor. @@ -800,14 +811,14 @@ def __init__( self.collection_ids_for_sync.append(collection.id) @property - def library(self) -> Optional[Library]: + def library(self) -> Library | None: if self.library_id is None: return None return Library.by_id(self._db, self.library_id) def api_for_license_pool( self, licensepool: LicensePool - ) -> Optional[BaseCirculationAPI[BaseSettings, BaseSettings]]: + ) -> CirculationApiType | None: """Find the API to use for the given license pool.""" return self.api_for_collection.get(licensepool.collection.id) @@ -825,8 +836,8 @@ def can_revoke_hold(self, licensepool: LicensePool, hold: Hold) -> bool: def _collect_event( self, - patron: Optional[Patron], - licensepool: Optional[LicensePool], + patron: Patron | None, + licensepool: LicensePool | None, name: str, include_neighborhood: bool = False, ) -> None: @@ -909,8 +920,8 @@ def borrow( pin: str, licensepool: LicensePool, delivery_mechanism: LicensePoolDeliveryMechanism, - hold_notification_email: Optional[str] = None, - ) -> Tuple[Optional[Loan], Optional[Hold], bool]: + hold_notification_email: str | None = None, + ) -> tuple[Loan | None, Hold | None, bool]: """Either borrow a book or put it on hold. Don't worry about fulfilling the loan yet. @@ -1250,9 +1261,9 @@ def patron_at_hold_limit(self, patron: Patron) -> bool: def can_fulfill_without_loan( self, - patron: Optional[Patron], - pool: Optional[LicensePool], - lpdm: Optional[LicensePoolDeliveryMechanism], + patron: Patron | None, + pool: LicensePool | None, + lpdm: LicensePoolDeliveryMechanism | None, ) -> bool: """Can we deliver the given book in the given format to the given patron, even though the patron has no active loan for that @@ -1448,7 +1459,7 @@ def release_hold( def patron_activity( self, patron: Patron, pin: str - ) -> Tuple[List[LoanInfo], List[HoldInfo], bool]: + ) -> tuple[list[LoanInfo], list[HoldInfo], bool]: """Return a record of the patron's current activity vis-a-vis all relevant external loan sources. @@ -1462,18 +1473,20 @@ def patron_activity( class PatronActivityThread(Thread): def __init__( self, - api: PatronActivityCirculationAPI[BaseSettings, BaseSettings], + api: PatronActivityCirculationAPI[ + BaseCirculationApiSettings, BaseSettings + ], patron: Patron, pin: str, ) -> None: self.api = api self.patron = patron self.pin = pin - self.activity: Optional[Iterable[LoanInfo | HoldInfo]] = None - self.exception: Optional[Exception] = None - self.trace: Tuple[ - Type[BaseException], BaseException, TracebackType - ] | Tuple[None, None, None] | None = None + self.activity: Iterable[LoanInfo | HoldInfo] | None = None + self.exception: Exception | None = None + self.trace: tuple[ + type[BaseException], BaseException, TracebackType + ] | tuple[None, None, None] | None = None super().__init__() def run(self) -> None: @@ -1506,8 +1519,8 @@ def run(self) -> None: thread.start() for thread in threads: thread.join() - loans: List[LoanInfo] = [] - holds: List[HoldInfo] = [] + loans: list[LoanInfo] = [] + holds: list[HoldInfo] = [] complete = True for thread in threads: if thread.exception: @@ -1556,7 +1569,7 @@ def local_holds(self, patron: Patron) -> Query[Hold]: def sync_bookshelf( self, patron: Patron, pin: str, force: bool = False - ) -> Tuple[List[Loan] | Query[Loan], List[Hold] | Query[Hold]]: + ) -> tuple[list[Loan] | Query[Loan], list[Hold] | Query[Hold]]: """Sync our internal model of a patron's bookshelf with any external vendors that provide books to the patron's library. @@ -1579,7 +1592,7 @@ def sync_bookshelf( # Assuming everything goes well, we will set # Patron.last_loan_activity_sync to this value -- the moment # just before we started contacting the vendor APIs. - last_loan_activity_sync: Optional[datetime.datetime] = utc_now() + last_loan_activity_sync: datetime.datetime | None = utc_now() # Update the external view of the patron's current state. remote_loans, remote_holds, complete = self.patron_activity(patron, pin) @@ -1624,8 +1637,8 @@ def sync_bookshelf( active_loans = [] active_holds = [] - start: Optional[datetime.datetime] - end: Optional[datetime.datetime] + start: datetime.datetime | None + end: datetime.datetime | None for loan in remote_loans: # This is a remote loan. Find or create the corresponding # local loan. diff --git a/api/circulation_exceptions.py b/api/circulation_exceptions.py index 0b6220611..d4b187a6a 100644 --- a/api/circulation_exceptions.py +++ b/api/circulation_exceptions.py @@ -1,5 +1,3 @@ -from typing import Optional - from flask_babel import lazy_gettext as _ from api.problem_details import * @@ -143,7 +141,7 @@ class LimitReached(CirculationException): """ status_code = 403 - BASE_DOC: Optional[ProblemDetail] = None + BASE_DOC: ProblemDetail | None = None MESSAGE_WITH_LIMIT = None def __init__(self, message=None, debug_info=None, limit=None): diff --git a/api/circulation_manager.py b/api/circulation_manager.py new file mode 100644 index 000000000..15ce86370 --- /dev/null +++ b/api/circulation_manager.py @@ -0,0 +1,466 @@ +from __future__ import annotations + +import urllib.parse +from typing import TYPE_CHECKING + +import flask +from dependency_injector.wiring import Provide, inject +from expiringdict import ExpiringDict +from flask_babel import lazy_gettext as _ +from sqlalchemy import select + +from api.authenticator import Authenticator +from api.circulation import CirculationAPI +from api.config import Configuration +from api.controller.analytics import AnalyticsController +from api.controller.annotation import AnnotationController +from api.controller.catalog_descriptions import CatalogDescriptionsController # Finland +from api.controller.device_tokens import DeviceTokensController +from api.controller.index import IndexController +from api.controller.loan import LoanController +from api.controller.marc import MARCRecordController +from api.controller.odl_notification import ODLNotificationController +from api.controller.opds_feed import OPDSFeedController +from api.controller.patron_auth_token import PatronAuthTokenController +from api.controller.playtime_entries import PlaytimeEntriesController +from api.controller.profile import ProfileController +from api.controller.static_file import StaticFileController +from api.controller.urn_lookup import URNLookupController +from api.controller.work import WorkController +from api.custom_index import CustomIndexView +from api.ekirjasto_controller import EkirjastoController # Finland +from api.lanes import load_lanes +from api.opensearch_analytics_search import OpenSearchAnalyticsSearch # Finland +from api.problem_details import * +from api.saml.controller import SAMLController +from core.app_server import ApplicationVersionController, load_facets_from_request +from core.external_search import ExternalSearchIndex +from core.feed.annotator.circulation import ( + CirculationManagerAnnotator, + LibraryAnnotator, +) +from core.lane import Lane, WorkList +from core.model import ConfigurationSetting, Library +from core.model.discovery_service_registration import DiscoveryServiceRegistration +from core.service.container import Services +from core.service.logging.configuration import LogLevel +from core.util.log import LoggerMixin, elapsed_time_logging, log_elapsed_time + +if TYPE_CHECKING: + from api.admin.controller.admin_search import AdminSearchController + from api.admin.controller.announcement_service import AnnouncementSettings + from api.admin.controller.catalog_services import CatalogServicesController + from api.admin.controller.collection_self_tests import CollectionSelfTestsController + from api.admin.controller.collection_settings import CollectionSettingsController + from api.admin.controller.custom_lists import CustomListsController + from api.admin.controller.dashboard import DashboardController + from api.admin.controller.discovery_service_library_registrations import ( + DiscoveryServiceLibraryRegistrationsController, + ) + from api.admin.controller.discovery_services import DiscoveryServicesController + from api.admin.controller.feed import FeedController + from api.admin.controller.individual_admin_settings import ( + IndividualAdminSettingsController, + ) + from api.admin.controller.lanes import LanesController + from api.admin.controller.library_settings import LibrarySettingsController + from api.admin.controller.metadata_service_self_tests import ( + MetadataServiceSelfTestsController, + ) + from api.admin.controller.metadata_services import MetadataServicesController + from api.admin.controller.patron import PatronController + from api.admin.controller.patron_auth_service_self_tests import ( + PatronAuthServiceSelfTestsController, + ) + from api.admin.controller.patron_auth_services import PatronAuthServicesController + from api.admin.controller.quicksight import QuickSightController + from api.admin.controller.reset_password import ResetPasswordController + from api.admin.controller.search_service_self_tests import ( + SearchServiceSelfTestsController, + ) + from api.admin.controller.self_tests import SelfTestsController + from api.admin.controller.settings import SettingsController + from api.admin.controller.sign_in import SignInController + from api.admin.controller.sitewide_services import ( + SearchServicesController, + SitewideServicesController, + ) + from api.admin.controller.sitewide_settings import ( + SitewideConfigurationSettingsController, + ) + from api.admin.controller.timestamps import TimestampsController + from api.admin.controller.view import ViewController + from api.admin.controller.work_editor import WorkController as AdminWorkController + + +class CirculationManager(LoggerMixin): + # API Controllers + index_controller: IndexController + opds_feeds: OPDSFeedController + marc_records: MARCRecordController + loans: LoanController + annotations: AnnotationController + urn_lookup: URNLookupController + work_controller: WorkController + analytics_controller: AnalyticsController + profiles: ProfileController + patron_devices: DeviceTokensController + version: ApplicationVersionController + odl_notification_controller: ODLNotificationController + static_files: StaticFileController + playtime_entries: PlaytimeEntriesController + + # Admin controllers + admin_sign_in_controller: SignInController + admin_reset_password_controller: ResetPasswordController + timestamps_controller: TimestampsController + admin_work_controller: AdminWorkController + admin_feed_controller: FeedController + admin_custom_lists_controller: CustomListsController + admin_lanes_controller: LanesController + admin_dashboard_controller: DashboardController + admin_settings_controller: SettingsController + admin_patron_controller: PatronController + admin_self_tests_controller: SelfTestsController + admin_discovery_services_controller: DiscoveryServicesController + admin_discovery_service_library_registrations_controller: DiscoveryServiceLibraryRegistrationsController + admin_metadata_services_controller: MetadataServicesController + admin_metadata_service_self_tests_controller: MetadataServiceSelfTestsController + admin_patron_auth_services_controller: PatronAuthServicesController + admin_patron_auth_service_self_tests_controller: PatronAuthServiceSelfTestsController + admin_collection_settings_controller: CollectionSettingsController + admin_collection_self_tests_controller: CollectionSelfTestsController + admin_sitewide_configuration_settings_controller: SitewideConfigurationSettingsController + admin_library_settings_controller: LibrarySettingsController + admin_individual_admin_settings_controller: IndividualAdminSettingsController + admin_sitewide_services_controller: SitewideServicesController + admin_search_service_self_tests_controller: SearchServiceSelfTestsController + admin_search_services_controller: SearchServicesController + admin_catalog_services_controller: CatalogServicesController + admin_announcement_service: AnnouncementSettings + admin_search_controller: AdminSearchController + admin_view_controller: ViewController + admin_quicksight_controller: QuickSightController + + @inject + def __init__( + self, + _db, + services: Services = Provide[Services], + ): + self._db = _db + self.services = services + self.analytics = services.analytics.analytics() + self.site_configuration_last_update = ( + Configuration.site_configuration_last_update(self._db, timeout=0) + ) + self.setup_one_time_controllers() + self.load_settings() + + def load_facets_from_request(self, *args, **kwargs): + """Load a faceting object from the incoming request, but also apply some + application-specific access restrictions: + + * You can't use nonstandard caching rules unless you're an authenticated administrator. + * You can't access a WorkList that's not accessible to you. + """ + + facets = load_facets_from_request(*args, **kwargs) + + worklist = kwargs.get("worklist") + if worklist is not None: + # Try to get the index controller. If it's not initialized + # for any reason, don't run this check -- we have bigger + # problems. + index_controller = getattr(self, "index_controller", None) + if index_controller and not worklist.accessible_to( + index_controller.request_patron + ): + return NO_SUCH_LANE.detailed(_("Lane does not exist")) + + return facets + + def reload_settings_if_changed(self): + """If the site configuration has been updated, reload the + CirculationManager's configuration from the database. + """ + last_update = Configuration.site_configuration_last_update(self._db) + if last_update > self.site_configuration_last_update: + self.load_settings() + self.site_configuration_last_update = last_update + + @log_elapsed_time(log_level=LogLevel.info, message_prefix="load_settings") + def load_settings(self): + """Load all necessary configuration settings and external + integrations from the database. + + This is called once when the CirculationManager is + initialized. It may also be called later to reload the site + configuration after changes are made in the administrative + interface. + """ + with elapsed_time_logging( + log_method=self.log.debug, + skip_start=True, + message_prefix="load_settings - load libraries", + ): + libraries = self._db.query(Library).all() + + with elapsed_time_logging( + log_method=self.log.debug, + skip_start=True, + message_prefix="load_settings - populate caches", + ): + # Populate caches + Library.cache_warm(self._db, lambda: libraries) + ConfigurationSetting.cache_warm(self._db) + + self.auth = Authenticator(self._db, libraries, self.analytics) + + self.setup_external_search() + + # Finland + self.setup_opensearch_analytics_search() + + # Track the Lane configuration for each library by mapping its + # short name to the top-level lane. + new_top_level_lanes = {} + # Create a CirculationAPI for each library. + new_circulation_apis = {} + # Potentially load a CustomIndexView for each library + new_custom_index_views = {} + + with elapsed_time_logging( + log_method=self.log.debug, + message_prefix="load_settings - per-library lanes, custom indexes, api", + ): + for library in libraries: + new_top_level_lanes[library.id] = load_lanes(self._db, library) + new_custom_index_views[library.id] = CustomIndexView.for_library( + library + ) + new_circulation_apis[library.id] = self.setup_circulation( + library, self.analytics + ) + + self.top_level_lanes = new_top_level_lanes + self.circulation_apis = new_circulation_apis + self.custom_index_views = new_custom_index_views + + # Assemble the list of patron web client domains from individual + # library registration settings as well as a sitewide setting. + patron_web_domains = set() + + def get_domain(url): + url = url.strip() + if url == "*": + return url + ( + scheme, + netloc, + path, + parameters, + query, + fragment, + ) = urllib.parse.urlparse(url) + if scheme and netloc: + return scheme + "://" + netloc + else: + return None + + sitewide_patron_web_client_urls = ConfigurationSetting.sitewide( + self._db, Configuration.PATRON_WEB_HOSTNAMES + ).value + if sitewide_patron_web_client_urls: + for url in sitewide_patron_web_client_urls.split("|"): + domain = get_domain(url) + if domain: + patron_web_domains.add(domain) + + domains = self._db.execute( + select(DiscoveryServiceRegistration.web_client).where( + DiscoveryServiceRegistration.web_client != None + ) + ).all() + for row in domains: + patron_web_domains.add(get_domain(row.web_client)) + + self.patron_web_domains = patron_web_domains + self.setup_configuration_dependent_controllers() + authentication_document_cache_time = int( + ConfigurationSetting.sitewide( + self._db, Configuration.AUTHENTICATION_DOCUMENT_CACHE_TIME + ).value_or_default(3600) + ) + self.authentication_for_opds_documents = ExpiringDict( + max_len=1000, max_age_seconds=authentication_document_cache_time + ) + + # Finland + @property + def opensearch_analytics_search(self): + """Retrieve or create a connection to the OpenSearch + analytics interface. + + This is created lazily so that a failure to connect only + affects feeds that depend on the search engine, not the whole + circulation manager. + """ + if not self._opensearch_analytics_search: + self.setup_opensearch_analytics_search() + return self._opensearch_analytics_search + + # Finland + def setup_opensearch_analytics_search(self): + try: + self._opensearch_analytics_search = OpenSearchAnalyticsSearch() + self.opensearch_analytics_search_initialization_exception = None + except Exception as e: + self.log.error("Exception initializing search engine: %s", e) + self._opensearch_analytics_search = None + self.opensearch_analytics_search_initialization_exception = e + return self._opensearch_analytics_search + + @property + def external_search(self): + """Retrieve or create a connection to the search interface. + + This is created lazily so that a failure to connect only + affects feeds that depend on the search engine, not the whole + circulation manager. + """ + if not self._external_search: + self.setup_external_search() + return self._external_search + + def setup_external_search(self): + try: + self._external_search = self.setup_search() + self.external_search_initialization_exception = None + except Exception as e: + self.log.error("Exception initializing search engine: %s", e) + self._external_search = None + self.external_search_initialization_exception = e + return self._external_search + + def log_lanes(self, lanelist=None, level=0): + """Output information about the lane layout.""" + lanelist = lanelist or self.top_level_lane.sublanes + for lane in lanelist: + self.log.debug("%s%r", "-" * level, lane) + if lane.sublanes: + self.log_lanes(lane.sublanes, level + 1) + + def setup_search(self): + """Set up a search client.""" + search = ExternalSearchIndex(self._db) + if not search: + self.log.warn("No external search server configured.") + return None + return search + + def setup_circulation(self, library, analytics): + """Set up the Circulation object.""" + return CirculationAPI(self._db, library, analytics=analytics) + + def setup_one_time_controllers(self): + """Set up all the controllers that will be used by the web app. + + This method will be called only once, no matter how many times the + site configuration changes. + """ + self.index_controller = IndexController(self) + self.opds_feeds = OPDSFeedController(self) + self.marc_records = MARCRecordController(self.services.storage.public()) + self.loans = LoanController(self) + self.annotations = AnnotationController(self) + self.urn_lookup = URNLookupController(self) + self.work_controller = WorkController(self) + self.analytics_controller = AnalyticsController(self) + self.profiles = ProfileController(self) + self.patron_devices = DeviceTokensController(self) + self.version = ApplicationVersionController() + self.odl_notification_controller = ODLNotificationController(self) + self.static_files = StaticFileController(self) + self.patron_auth_token = PatronAuthTokenController(self) + self.catalog_descriptions = CatalogDescriptionsController(self) + self.playtime_entries = PlaytimeEntriesController(self) + + def setup_configuration_dependent_controllers(self): + """Set up all the controllers that depend on the + current site configuration. + + This method will be called fresh every time the site + configuration changes. + """ + self.saml_controller = SAMLController(self, self.auth) + + # Finland + self.ekirjasto_controller = EkirjastoController(self, self.auth) + + def annotator(self, lane, facets=None, *args, **kwargs): + """Create an appropriate OPDS annotator for the given lane. + + :param lane: A Lane or WorkList. + :param facets: A faceting object. + :param annotator_class: Instantiate this annotator class if possible. + Intended for use in unit tests. + """ + library = None + if lane and isinstance(lane, Lane): + library = lane.library + elif lane and isinstance(lane, WorkList): + library = lane.get_library(self._db) + if not library and hasattr(flask.request, "library"): + library = flask.request.library + + # If no library is provided, the best we can do is a generic + # annotator for this application. + if not library: + return CirculationManagerAnnotator(lane) + + # At this point we know the request is in a library context, so we + # can create a LibraryAnnotator customized for that library. + + # Some features are only available if a patron authentication + # mechanism is set up for this library. + authenticator = self.auth.library_authenticators.get(library.short_name) + library_identifies_patrons = ( + authenticator is not None and authenticator.identifies_individuals + ) + annotator_class = kwargs.pop("annotator_class", LibraryAnnotator) + return annotator_class( + self.circulation_apis[library.id], + lane, + library, + top_level_title="All Books", + library_identifies_patrons=library_identifies_patrons, + facets=facets, + *args, + **kwargs, + ) + + @property + def authentication_for_opds_document(self): + """Make sure the current request's library has an Authentication For + OPDS document in the cache, then return the cached version. + + If the cache is disabled, a fresh document is created every time. + + If the query argument `debug` is provided and the + WSGI_DEBUG_KEY site-wide setting is set to True, the + authentication document is annotated with a '_debug' section + describing the current WSGI environment. Since this can reveal + internal details of deployment, it should only be enabled when + diagnosing deployment problems. + """ + name = flask.request.library.short_name + value = self.authentication_for_opds_documents.get(name, None) + if value is None: + # The document was not in the cache, either because it's + # expired or because the cache itself has been disabled. + # Create a new one and stick it in the cache for next + # time. + value = self.auth.create_authentication_document() + self.authentication_for_opds_documents[name] = value + return value diff --git a/api/config.py b/api/config.py index b8e43a916..14db63dbe 100644 --- a/api/config.py +++ b/api/config.py @@ -1,4 +1,4 @@ -from typing import Iterable, List, Optional, Tuple +from collections.abc import Iterable from Crypto.Cipher import PKCS1_OAEP from Crypto.Cipher.PKCS1_OAEP import PKCS1OAEP_Cipher @@ -152,28 +152,28 @@ def estimate_language_collections_when_unset(cls, library: Library) -> None: cls.estimate_language_collections_for_library(library) @classmethod - def large_collection_languages(cls, library: Library) -> List[str]: + def large_collection_languages(cls, library: Library) -> list[str]: cls.estimate_language_collections_when_unset(library) if library.settings.large_collection_languages is None: return [] return library.settings.large_collection_languages @classmethod - def small_collection_languages(cls, library: Library) -> List[str]: + def small_collection_languages(cls, library: Library) -> list[str]: cls.estimate_language_collections_when_unset(library) if library.settings.small_collection_languages is None: return [] return library.settings.small_collection_languages @classmethod - def tiny_collection_languages(cls, library: Library) -> List[str]: + def tiny_collection_languages(cls, library: Library) -> list[str]: cls.estimate_language_collections_when_unset(library) if library.settings.tiny_collection_languages is None: return [] return library.settings.tiny_collection_languages @classmethod - def max_outstanding_fines(cls, library: Library) -> Optional[Money]: + def max_outstanding_fines(cls, library: Library) -> Money | None: if library.settings.max_outstanding_fines is None: return None return MoneyUtility.parse(library.settings.max_outstanding_fines) @@ -245,7 +245,7 @@ def _as_mailto(cls, value): return "mailto:%s" % value @classmethod - def help_uris(cls, library: Library) -> Iterable[Tuple[Optional[str], str]]: + def help_uris(cls, library: Library) -> Iterable[tuple[str | None, str]]: """Find all the URIs that might help patrons get help from this library. @@ -257,7 +257,7 @@ def help_uris(cls, library: Library) -> Iterable[Tuple[Optional[str], str]]: yield "text/html", library.settings.help_web @classmethod - def copyright_designated_agent_uri(cls, library: Library) -> Optional[str]: + def copyright_designated_agent_uri(cls, library: Library) -> str | None: if library.settings.copyright_designated_agent_email_address: email = library.settings.copyright_designated_agent_email_address elif library.settings.help_email: @@ -268,7 +268,7 @@ def copyright_designated_agent_uri(cls, library: Library) -> Optional[str]: return cls._as_mailto(email) @classmethod - def configuration_contact_uri(cls, library: Library) -> Optional[str]: + def configuration_contact_uri(cls, library: Library) -> str | None: if library.settings.configuration_contact_email_address: email = library.settings.configuration_contact_email_address elif library.settings.help_email: diff --git a/api/controller.py b/api/controller.py deleted file mode 100644 index 4bd6c8f14..000000000 --- a/api/controller.py +++ /dev/null @@ -1,2546 +0,0 @@ -from __future__ import annotations - -import email -import json -import logging -import os -import urllib.parse -from collections import defaultdict -from time import mktime -from typing import TYPE_CHECKING, Any -from wsgiref.handlers import format_date_time - -import flask -import pytz -from attr import define -from dependency_injector.wiring import Provide, inject -from expiringdict import ExpiringDict -from flask import Response, make_response, redirect -from flask_babel import lazy_gettext as _ -from lxml import etree -from pydantic import ValidationError -from sqlalchemy import select -from sqlalchemy.orm import eagerload -from sqlalchemy.orm.exc import NoResultFound - -from api.annotations import AnnotationParser, AnnotationWriter -from api.authentication.access_token import AccessTokenProvider -from api.authenticator import Authenticator, CirculationPatronProfileStorage -from api.base_controller import BaseCirculationManagerController -from api.circulation import CirculationAPI -from api.circulation_exceptions import * -from api.config import CannotLoadConfiguration, Configuration -from api.custom_index import CustomIndexView -from api.ekirjasto_controller import EkirjastoController # Finland -from api.lanes import ( - ContributorFacets, - ContributorLane, - CrawlableCollectionBasedLane, - CrawlableCustomListBasedLane, - CrawlableFacets, - HasSeriesFacets, - JackpotFacets, - JackpotWorkList, - RecommendationLane, - RelatedBooksLane, - SeriesFacets, - SeriesLane, - load_lanes, -) -from api.model.patron_auth import PatronAuthAccessToken -from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse -from api.odl import ODLAPI -from api.odl2 import ODL2API -from api.opensearch_analytics_search import OpenSearchAnalyticsSearch # Finland -from api.problem_details import * -from api.saml.controller import SAMLController -from core.analytics import Analytics -from core.app_server import ApplicationVersionController -from core.app_server import URNLookupController as CoreURNLookupController -from core.app_server import ( - load_facets_from_request, - load_pagination_from_request, - url_for, -) -from core.entrypoint import EverythingEntryPoint -from core.external_search import ExternalSearchIndex, SortKeyPagination -from core.feed.acquisition import OPDSAcquisitionFeed -from core.feed.annotator.circulation import ( - CirculationManagerAnnotator, - LibraryAnnotator, -) -from core.feed.navigation import NavigationFeed -from core.feed.opds import NavigationFacets -from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList -from core.marc import MARCExporter -from core.metadata_layer import ContributorData -from core.model import ( - Annotation, - CirculationEvent, - Collection, - ConfigurationSetting, - CustomList, - DataSource, - DeliveryMechanism, - Hold, - Identifier, - Library, - LicensePool, - LicensePoolDeliveryMechanism, - Loan, - Patron, - Representation, - Session, - get_one, - json_serializer, -) -from core.model.devicetokens import ( - DeviceToken, - DuplicateDeviceTokenError, - InvalidTokenTypeError, -) -from core.model.discovery_service_registration import DiscoveryServiceRegistration -from core.opensearch import OpenSearchDocument -from core.query.playtime_entries import PlaytimeEntries -from core.service.container import Services -from core.user_profile import ProfileController as CoreProfileController -from core.util.authentication_for_opds import AuthenticationForOPDSDocument -from core.util.datetime_helpers import utc_now -from core.util.http import RemoteIntegrationException -from core.util.log import elapsed_time_logging, log_elapsed_time -from core.util.opds_writer import OPDSFeed -from core.util.problem_detail import ProblemError - -if TYPE_CHECKING: - from werkzeug import Response as wkResponse - - from api.admin.controller.admin_search import AdminSearchController - from api.admin.controller.announcement_service import AnnouncementSettings - from api.admin.controller.catalog_services import CatalogServicesController - from api.admin.controller.collection_self_tests import CollectionSelfTestsController - from api.admin.controller.collection_settings import CollectionSettingsController - from api.admin.controller.custom_lists import CustomListsController - from api.admin.controller.dashboard import DashboardController - from api.admin.controller.discovery_service_library_registrations import ( - DiscoveryServiceLibraryRegistrationsController, - ) - from api.admin.controller.discovery_services import DiscoveryServicesController - from api.admin.controller.feed import FeedController - from api.admin.controller.individual_admin_settings import ( - IndividualAdminSettingsController, - ) - from api.admin.controller.lanes import LanesController - from api.admin.controller.library_settings import LibrarySettingsController - from api.admin.controller.metadata_service_self_tests import ( - MetadataServiceSelfTestsController, - ) - from api.admin.controller.metadata_services import MetadataServicesController - from api.admin.controller.patron import PatronController - from api.admin.controller.patron_auth_service_self_tests import ( - PatronAuthServiceSelfTestsController, - ) - from api.admin.controller.patron_auth_services import PatronAuthServicesController - from api.admin.controller.quicksight import QuickSightController - from api.admin.controller.reset_password import ResetPasswordController - from api.admin.controller.search_service_self_tests import ( - SearchServiceSelfTestsController, - ) - from api.admin.controller.self_tests import SelfTestsController - from api.admin.controller.settings import SettingsController - from api.admin.controller.sign_in import SignInController - from api.admin.controller.sitewide_services import ( - SearchServicesController, - SitewideServicesController, - ) - from api.admin.controller.sitewide_settings import ( - SitewideConfigurationSettingsController, - ) - from api.admin.controller.timestamps import TimestampsController - from api.admin.controller.view import ViewController - from api.admin.controller.work_editor import WorkController as AdminWorkController - - -class CirculationManager: - log = logging.getLogger("api.controller.CirculationManager") - - # API Controllers - index_controller: IndexController - opds_feeds: OPDSFeedController - marc_records: MARCRecordController - loans: LoanController - annotations: AnnotationController - urn_lookup: URNLookupController - work_controller: WorkController - analytics_controller: AnalyticsController - profiles: ProfileController - patron_devices: DeviceTokensController - version: ApplicationVersionController - odl_notification_controller: ODLNotificationController - static_files: StaticFileController - playtime_entries: PlaytimeEntriesController - - # Admin controllers - admin_sign_in_controller: SignInController - admin_reset_password_controller: ResetPasswordController - timestamps_controller: TimestampsController - admin_work_controller: AdminWorkController - admin_feed_controller: FeedController - admin_custom_lists_controller: CustomListsController - admin_lanes_controller: LanesController - admin_dashboard_controller: DashboardController - admin_settings_controller: SettingsController - admin_patron_controller: PatronController - admin_self_tests_controller: SelfTestsController - admin_discovery_services_controller: DiscoveryServicesController - admin_discovery_service_library_registrations_controller: DiscoveryServiceLibraryRegistrationsController - admin_metadata_services_controller: MetadataServicesController - admin_metadata_service_self_tests_controller: MetadataServiceSelfTestsController - admin_patron_auth_services_controller: PatronAuthServicesController - admin_patron_auth_service_self_tests_controller: PatronAuthServiceSelfTestsController - admin_collection_settings_controller: CollectionSettingsController - admin_collection_self_tests_controller: CollectionSelfTestsController - admin_sitewide_configuration_settings_controller: SitewideConfigurationSettingsController - admin_library_settings_controller: LibrarySettingsController - admin_individual_admin_settings_controller: IndividualAdminSettingsController - admin_sitewide_services_controller: SitewideServicesController - admin_search_service_self_tests_controller: SearchServiceSelfTestsController - admin_search_services_controller: SearchServicesController - admin_catalog_services_controller: CatalogServicesController - admin_announcement_service: AnnouncementSettings - admin_search_controller: AdminSearchController - admin_view_controller: ViewController - admin_quicksight_controller: QuickSightController - - @inject - def __init__( - self, - _db, - analytics: Analytics = Provide[Services.analytics.analytics], - ): - self._db = _db - self.analytics = analytics - self.site_configuration_last_update = ( - Configuration.site_configuration_last_update(self._db, timeout=0) - ) - self.setup_one_time_controllers() - self.load_settings() - - def load_facets_from_request(self, *args, **kwargs): - """Load a faceting object from the incoming request, but also apply some - application-specific access restrictions: - - * You can't use nonstandard caching rules unless you're an authenticated administrator. - * You can't access a WorkList that's not accessible to you. - """ - - facets = load_facets_from_request(*args, **kwargs) - - worklist = kwargs.get("worklist") - if worklist is not None: - # Try to get the index controller. If it's not initialized - # for any reason, don't run this check -- we have bigger - # problems. - index_controller = getattr(self, "index_controller", None) - if index_controller and not worklist.accessible_to( - index_controller.request_patron - ): - return NO_SUCH_LANE.detailed(_("Lane does not exist")) - - return facets - - def reload_settings_if_changed(self): - """If the site configuration has been updated, reload the - CirculationManager's configuration from the database. - """ - last_update = Configuration.site_configuration_last_update(self._db) - if last_update > self.site_configuration_last_update: - self.load_settings() - self.site_configuration_last_update = last_update - - @log_elapsed_time(log_method=log.info, message_prefix="load_settings") - def load_settings(self): - """Load all necessary configuration settings and external - integrations from the database. - - This is called once when the CirculationManager is - initialized. It may also be called later to reload the site - configuration after changes are made in the administrative - interface. - """ - with elapsed_time_logging( - log_method=self.log.debug, - skip_start=True, - message_prefix="load_settings - load libraries", - ): - libraries = self._db.query(Library).all() - - with elapsed_time_logging( - log_method=self.log.debug, - skip_start=True, - message_prefix="load_settings - populate caches", - ): - # Populate caches - Library.cache_warm(self._db, lambda: libraries) - ConfigurationSetting.cache_warm(self._db) - - self.auth = Authenticator(self._db, libraries, self.analytics) - - self.setup_external_search() - - # Finland - self.setup_opensearch_analytics_search() - - # Track the Lane configuration for each library by mapping its - # short name to the top-level lane. - new_top_level_lanes = {} - # Create a CirculationAPI for each library. - new_circulation_apis = {} - # Potentially load a CustomIndexView for each library - new_custom_index_views = {} - - with elapsed_time_logging( - log_method=self.log.debug, - message_prefix="load_settings - per-library lanes, custom indexes, api", - ): - for library in libraries: - new_top_level_lanes[library.id] = load_lanes(self._db, library) - new_custom_index_views[library.id] = CustomIndexView.for_library( - library - ) - new_circulation_apis[library.id] = self.setup_circulation( - library, self.analytics - ) - - self.top_level_lanes = new_top_level_lanes - self.circulation_apis = new_circulation_apis - self.custom_index_views = new_custom_index_views - - # Assemble the list of patron web client domains from individual - # library registration settings as well as a sitewide setting. - patron_web_domains = set() - - def get_domain(url): - url = url.strip() - if url == "*": - return url - scheme, netloc, path, parameters, query, fragment = urllib.parse.urlparse( - url - ) - if scheme and netloc: - return scheme + "://" + netloc - else: - return None - - sitewide_patron_web_client_urls = ConfigurationSetting.sitewide( - self._db, Configuration.PATRON_WEB_HOSTNAMES - ).value - if sitewide_patron_web_client_urls: - for url in sitewide_patron_web_client_urls.split("|"): - domain = get_domain(url) - if domain: - patron_web_domains.add(domain) - - domains = self._db.execute( - select(DiscoveryServiceRegistration.web_client).where( - DiscoveryServiceRegistration.web_client != None - ) - ).all() - for row in domains: - patron_web_domains.add(get_domain(row.web_client)) - - self.patron_web_domains = patron_web_domains - self.setup_configuration_dependent_controllers() - authentication_document_cache_time = int( - ConfigurationSetting.sitewide( - self._db, Configuration.AUTHENTICATION_DOCUMENT_CACHE_TIME - ).value_or_default(3600) - ) - self.authentication_for_opds_documents = ExpiringDict( - max_len=1000, max_age_seconds=authentication_document_cache_time - ) - - # Finland - @property - def opensearch_analytics_search(self): - """Retrieve or create a connection to the OpenSearch - analytics interface. - - This is created lazily so that a failure to connect only - affects feeds that depend on the search engine, not the whole - circulation manager. - """ - if not self._opensearch_analytics_search: - self.setup_opensearch_analytics_search() - return self._opensearch_analytics_search - - # Finland - def setup_opensearch_analytics_search(self): - try: - self._opensearch_analytics_search = OpenSearchAnalyticsSearch() - self.opensearch_analytics_search_initialization_exception = None - except Exception as e: - self.log.error("Exception initializing search engine: %s", e) - self._opensearch_analytics_search = None - self.opensearch_analytics_search_initialization_exception = e - return self._opensearch_analytics_search - - @property - def external_search(self): - """Retrieve or create a connection to the search interface. - - This is created lazily so that a failure to connect only - affects feeds that depend on the search engine, not the whole - circulation manager. - """ - if not self._external_search: - self.setup_external_search() - return self._external_search - - def setup_external_search(self): - try: - self._external_search = self.setup_search() - self.external_search_initialization_exception = None - except Exception as e: - self.log.error("Exception initializing search engine: %s", e) - self._external_search = None - self.external_search_initialization_exception = e - return self._external_search - - def log_lanes(self, lanelist=None, level=0): - """Output information about the lane layout.""" - lanelist = lanelist or self.top_level_lane.sublanes - for lane in lanelist: - self.log.debug("%s%r", "-" * level, lane) - if lane.sublanes: - self.log_lanes(lane.sublanes, level + 1) - - def setup_search(self): - """Set up a search client.""" - search = ExternalSearchIndex(self._db) - if not search: - self.log.warn("No external search server configured.") - return None - return search - - def setup_circulation(self, library, analytics): - """Set up the Circulation object.""" - return CirculationAPI(self._db, library, analytics=analytics) - - def setup_one_time_controllers(self): - """Set up all the controllers that will be used by the web app. - - This method will be called only once, no matter how many times the - site configuration changes. - """ - self.index_controller = IndexController(self) - self.opds_feeds = OPDSFeedController(self) - self.marc_records = MARCRecordController(self) - self.loans = LoanController(self) - self.annotations = AnnotationController(self) - self.urn_lookup = URNLookupController(self) - self.work_controller = WorkController(self) - self.analytics_controller = AnalyticsController(self) - self.profiles = ProfileController(self) - self.patron_devices = DeviceTokensController(self) - self.version = ApplicationVersionController() - self.odl_notification_controller = ODLNotificationController(self) - self.static_files = StaticFileController(self) - self.patron_auth_token = PatronAuthTokenController(self) - self.playtime_entries = PlaytimeEntriesController(self) - - def setup_configuration_dependent_controllers(self): - """Set up all the controllers that depend on the - current site configuration. - - This method will be called fresh every time the site - configuration changes. - """ - self.saml_controller = SAMLController(self, self.auth) - # Finland - self.ekirjasto_controller = EkirjastoController(self, self.auth) - - def annotator(self, lane, facets=None, *args, **kwargs): - """Create an appropriate OPDS annotator for the given lane. - - :param lane: A Lane or WorkList. - :param facets: A faceting object. - :param annotator_class: Instantiate this annotator class if possible. - Intended for use in unit tests. - """ - library = None - if lane and isinstance(lane, Lane): - library = lane.library - elif lane and isinstance(lane, WorkList): - library = lane.get_library(self._db) - if not library and hasattr(flask.request, "library"): - library = flask.request.library - - # If no library is provided, the best we can do is a generic - # annotator for this application. - if not library: - return CirculationManagerAnnotator(lane) - - # At this point we know the request is in a library context, so we - # can create a LibraryAnnotator customized for that library. - - # Some features are only available if a patron authentication - # mechanism is set up for this library. - authenticator = self.auth.library_authenticators.get(library.short_name) - library_identifies_patrons = ( - authenticator is not None and authenticator.identifies_individuals - ) - annotator_class = kwargs.pop("annotator_class", LibraryAnnotator) - return annotator_class( - self.circulation_apis[library.id], - lane, - library, - top_level_title="All Books", - library_identifies_patrons=library_identifies_patrons, - facets=facets, - *args, - **kwargs, - ) - - @property - def authentication_for_opds_document(self): - """Make sure the current request's library has an Authentication For - OPDS document in the cache, then return the cached version. - - If the cache is disabled, a fresh document is created every time. - - If the query argument `debug` is provided and the - WSGI_DEBUG_KEY site-wide setting is set to True, the - authentication document is annotated with a '_debug' section - describing the current WSGI environment. Since this can reveal - internal details of deployment, it should only be enabled when - diagnosing deployment problems. - """ - name = flask.request.library.short_name - value = self.authentication_for_opds_documents.get(name, None) - if value is None: - # The document was not in the cache, either because it's - # expired or because the cache itself has been disabled. - # Create a new one and stick it in the cache for next - # time. - value = self.auth.create_authentication_document() - self.authentication_for_opds_documents[name] = value - return value - - -class CirculationManagerController(BaseCirculationManagerController): - def get_patron_circ_objects(self, object_class, patron, license_pools): - if not patron: - return [] - pool_ids = [pool.id for pool in license_pools] - - return ( - self._db.query(object_class) - .filter( - object_class.patron_id == patron.id, - object_class.license_pool_id.in_(pool_ids), - ) - .options(eagerload(object_class.license_pool)) - .all() - ) - - def get_patron_loan(self, patron, license_pools): - loans = self.get_patron_circ_objects(Loan, patron, license_pools) - if loans: - loan = loans[0] - return loan, loan.license_pool - return None, None - - def get_patron_hold(self, patron, license_pools): - holds = self.get_patron_circ_objects(Hold, patron, license_pools) - if holds: - hold = holds[0] - return hold, hold.license_pool - return None, None - - @property - def circulation(self): - """Return the appropriate CirculationAPI for the request Library.""" - library_id = flask.request.library.id - return self.manager.circulation_apis[library_id] - - @property - def search_engine(self): - """Return the configured external search engine, or a - ProblemDetail if none is configured. - """ - search_engine = self.manager.external_search - if not search_engine: - return REMOTE_INTEGRATION_FAILED.detailed( - _("The search index for this site is not properly configured.") - ) - return search_engine - - def handle_conditional_request(self, last_modified=None): - """Handle a conditional HTTP request. - - :param last_modified: A datetime representing the time this - resource was last modified. - - :return: a Response, if the incoming request can be handled - conditionally. Otherwise, None. - """ - if not last_modified: - return None - - # If-Modified-Since values have resolution of one second. If - # last_modified has millisecond resolution, change its - # resolution to one second. - if last_modified.microsecond: - last_modified = last_modified.replace(microsecond=0) - - if_modified_since = flask.request.headers.get("If-Modified-Since") - if not if_modified_since: - return None - - try: - parsed_if_modified_since = email.utils.parsedate_to_datetime( - if_modified_since - ) - except TypeError: - # Parse error <= Python 3.9 - return None - except ValueError: - # Parse error >= Python 3.10 - return None - if not parsed_if_modified_since: - return None - - # "[I]f the date is conforming to the RFCs it will represent a - # time in UTC but with no indication of the actual source - # timezone of the message the date comes from." - if parsed_if_modified_since.tzinfo is None: - parsed_if_modified_since = parsed_if_modified_since.replace(tzinfo=pytz.UTC) - - if parsed_if_modified_since >= last_modified: - return Response(status=304) - return None - - def load_lane(self, lane_identifier): - """Turn user input into a Lane object.""" - library_id = flask.request.library.id - - lane = None - if lane_identifier is None: - # Return the top-level lane. - lane = self.manager.top_level_lanes[library_id] - if isinstance(lane, Lane): - lane = self._db.merge(lane) - elif isinstance(lane, WorkList): - lane.children = [self._db.merge(child) for child in lane.children] - else: - try: - lane_identifier = int(lane_identifier) - except ValueError as e: - pass - - if isinstance(lane_identifier, int): - lane = get_one( - self._db, Lane, id=lane_identifier, library_id=library_id - ) - - if lane and not lane.accessible_to(self.request_patron): - # The authenticated patron cannot access the lane they - # requested. Act like the lane does not exist. - lane = None - - if not lane: - return NO_SUCH_LANE.detailed( - _( - "Lane %(lane_identifier)s does not exist or is not associated with library %(library_id)s", - lane_identifier=lane_identifier, - library_id=library_id, - ) - ) - - return lane - - def load_work(self, library, identifier_type, identifier): - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - - # We know there is at least one LicensePool, and all LicensePools - # for an Identifier have the same Work. - work = pools[0].work - - if work and not work.age_appropriate_for_patron(self.request_patron): - # This work is not age-appropriate for the authenticated - # patron. Don't show it. - work = NOT_AGE_APPROPRIATE - return work - - def load_licensepools(self, library, identifier_type, identifier): - """Turn user input into one or more LicensePool objects. - - :param library: The LicensePools must be associated with one of this - Library's Collections. - :param identifier_type: A type of identifier, e.g. "ISBN" - :param identifier: An identifier string, used with `identifier_type` - to look up an Identifier. - """ - _db = Session.object_session(library) - pools = ( - _db.query(LicensePool) - .join(LicensePool.collection) - .join(LicensePool.identifier) - .join(Collection.libraries) - .filter(Identifier.type == identifier_type) - .filter(Identifier.identifier == identifier) - .filter(Library.id == library.id) - .all() - ) - if not pools: - return NO_LICENSES.detailed( - _("The item you're asking about (%s/%s) isn't in this collection.") - % (identifier_type, identifier) - ) - return pools - - def load_licensepool(self, license_pool_id): - """Turns user input into a LicensePool""" - license_pool = get_one(self._db, LicensePool, id=license_pool_id) - if not license_pool: - return INVALID_INPUT.detailed( - _("License Pool #%s does not exist.") % license_pool_id - ) - - return license_pool - - def load_licensepooldelivery(self, pool, mechanism_id): - """Turn user input into a LicensePoolDeliveryMechanism object.""" - mechanism = get_one( - self._db, - LicensePoolDeliveryMechanism, - data_source=pool.data_source, - identifier=pool.identifier, - delivery_mechanism_id=mechanism_id, - on_multiple="interchangeable", - ) - return mechanism or BAD_DELIVERY_MECHANISM - - def apply_borrowing_policy(self, patron, license_pool): - """Apply the borrowing policy of the patron's library to the - book they're trying to check out. - - This prevents a patron from borrowing an age-inappropriate book - or from placing a hold in a library that prohibits holds. - - Generally speaking, both of these operations should be - prevented before they get to this point; this is an extra - layer of protection. - - :param patron: A `Patron`. It's okay if this turns out to be a - `ProblemDetail` or `None` due to a problem earlier in the - process. - :param license_pool`: The `LicensePool` the patron is trying to act on. - """ - if patron is None or isinstance(patron, ProblemDetail): - # An earlier stage in the process failed to authenticate - # the patron. - return patron - - work = license_pool.work - if work is not None and not work.age_appropriate_for_patron(patron): - return NOT_AGE_APPROPRIATE - - if ( - not patron.library.settings.allow_holds - and license_pool.licenses_available == 0 - and not license_pool.open_access - and not license_pool.unlimited_access - ): - return FORBIDDEN_BY_POLICY.detailed( - _("Library policy prohibits the placement of holds."), status_code=403 - ) - return None - - -class IndexController(CirculationManagerController): - """Redirect the patron to the appropriate feed.""" - - def __call__(self): - # If this library provides a custom index view, use that. - library = flask.request.library - custom = self.manager.custom_index_views.get(library.id) - if custom is not None: - annotator = self.manager.annotator(None) - return custom(library, annotator) - - # The simple case: the app is equally open to all clients. - library_short_name = flask.request.library.short_name - if not self.has_root_lanes(): - return redirect( - url_for( - "acquisition_groups", - library_short_name=library_short_name, - _external=True, - ) - ) - - # The more complex case. We must authorize the patron, check - # their type, and redirect them to an appropriate feed. - return self.appropriate_index_for_patron_type() - - def authentication_document(self): - """Serve this library's Authentication For OPDS document.""" - return Response( - self.manager.authentication_for_opds_document, - 200, - {"Content-Type": AuthenticationForOPDSDocument.MEDIA_TYPE}, - ) - - def has_root_lanes(self): - """Does the active library feature root lanes for patrons of - certain types? - - :return: A boolean - """ - return flask.request.library.has_root_lanes - - def authenticated_patron_root_lane(self): - patron = self.authenticated_patron_from_request() - if isinstance(patron, ProblemDetail): - return patron - if isinstance(patron, Response): - return patron - return patron.root_lane - - def appropriate_index_for_patron_type(self): - library_short_name = flask.request.library.short_name - root_lane = self.authenticated_patron_root_lane() - if isinstance(root_lane, ProblemDetail): - return root_lane - if isinstance(root_lane, Response): - return root_lane - if root_lane is None: - return redirect( - url_for( - "acquisition_groups", - library_short_name=library_short_name, - _external=True, - ) - ) - - return redirect( - url_for( - "acquisition_groups", - library_short_name=library_short_name, - lane_identifier=root_lane.id, - _external=True, - ) - ) - - -class OPDSFeedController(CirculationManagerController): - def groups(self, lane_identifier, feed_class=OPDSAcquisitionFeed): - """Build or retrieve a grouped acquisition feed. - - :param lane_identifier: An identifier that uniquely identifiers - the WorkList whose feed we want. - :param feed_class: A replacement for AcquisitionFeed, for use in - tests. - """ - library = flask.request.library - - # Special case: a patron with a root lane who attempts to access - # the library's top-level WorkList is redirected to their root - # lane (as though they had accessed the index controller) - # rather than being denied access. - if lane_identifier is None: - patron = self.request_patron - if patron is not None and patron.root_lane: - return redirect( - url_for( - "acquisition_groups", - library_short_name=library.short_name, - lane_identifier=patron.root_lane.id, - _external=True, - ) - ) - - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - - if not lane.children: - # This lane has no children. Although we can technically - # create a grouped feed, it would be an unsatisfying - # gateway to a paginated feed. We should just serve the - # paginated feed. - return self.feed(lane_identifier, feed_class) - - facet_class_kwargs = dict( - minimum_featured_quality=library.settings.minimum_featured_quality, - ) - facets = self.manager.load_facets_from_request( - worklist=lane, - base_class=FeaturedFacets, - base_class_constructor_kwargs=facet_class_kwargs, - ) - if isinstance(facets, ProblemDetail): - return facets - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - url = url_for( - "acquisition_groups", - lane_identifier=lane_identifier, - library_short_name=library.short_name, - _external=True, - ) - - annotator = self.manager.annotator(lane, facets) - return feed_class.groups( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - annotator=annotator, - facets=facets, - search_engine=search_engine, - ).as_response(mime_types=flask.request.accept_mimetypes) - - def feed(self, lane_identifier, feed_class=OPDSAcquisitionFeed): - """Build or retrieve a paginated acquisition feed. - - :param lane_identifier: An identifier that uniquely identifiers - the WorkList whose feed we want. - :param feed_class: A replacement for AcquisitionFeed, for use in - tests. - """ - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - facets = self.manager.load_facets_from_request(worklist=lane) - if isinstance(facets, ProblemDetail): - return facets - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return pagination - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - library_short_name = flask.request.library.short_name - url = url_for( - "feed", - lane_identifier=lane_identifier, - library_short_name=library_short_name, - _external=True, - ) - - annotator = self.manager.annotator(lane, facets=facets) - max_age = flask.request.args.get("max_age") - feed = feed_class.page( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - annotator=annotator, - facets=facets, - pagination=pagination, - search_engine=search_engine, - ) - return feed.as_response( - max_age=int(max_age) if max_age else lane.max_cache_age(), - mime_types=flask.request.accept_mimetypes, - ) - - def navigation(self, lane_identifier): - """Build or retrieve a navigation feed, for clients that do not support groups.""" - - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - library = flask.request.library - library_short_name = library.short_name - url = url_for( - "navigation_feed", - lane_identifier=lane_identifier, - library_short_name=library_short_name, - _external=True, - ) - - title = lane.display_name - facet_class_kwargs = dict( - minimum_featured_quality=library.settings.minimum_featured_quality, - ) - facets = self.manager.load_facets_from_request( - worklist=lane, - base_class=NavigationFacets, - base_class_constructor_kwargs=facet_class_kwargs, - ) - annotator = self.manager.annotator(lane, facets) - return NavigationFeed.navigation( - _db=self._db, - title=title, - url=url, - worklist=lane, - annotator=annotator, - facets=facets, - ).as_response(max_age=lane.max_cache_age()) - - def crawlable_library_feed(self): - """Build or retrieve a crawlable acquisition feed for the - request library. - """ - library = flask.request.library - url = url_for( - "crawlable_library_feed", - library_short_name=library.short_name, - _external=True, - ) - title = library.name - lane = CrawlableCollectionBasedLane() - lane.initialize(library) - return self._crawlable_feed(title=title, url=url, worklist=lane) - - def crawlable_collection_feed(self, collection_name): - """Build or retrieve a crawlable acquisition feed for the - requested collection. - """ - collection = get_one(self._db, Collection, name=collection_name) - if not collection: - return NO_SUCH_COLLECTION - title = collection.name - url = url_for( - "crawlable_collection_feed", collection_name=collection.name, _external=True - ) - lane = CrawlableCollectionBasedLane() - lane.initialize([collection]) - return self._crawlable_feed(title=title, url=url, worklist=lane) - - def crawlable_list_feed(self, list_name): - """Build or retrieve a crawlable, paginated acquisition feed for the - named CustomList, sorted by update date. - """ - # TODO: A library is not strictly required here, since some - # CustomLists aren't associated with a library, but this isn't - # a use case we need to support now. - library = flask.request.library - list = CustomList.find(self._db, list_name, library=library) - if not list: - return NO_SUCH_LIST - library_short_name = library.short_name - title = list.name - url = url_for( - "crawlable_list_feed", - list_name=list.name, - library_short_name=library_short_name, - _external=True, - ) - lane = CrawlableCustomListBasedLane() - lane.initialize(library, list) - return self._crawlable_feed(title=title, url=url, worklist=lane) - - def _crawlable_feed( - self, title, url, worklist, annotator=None, feed_class=OPDSAcquisitionFeed - ): - """Helper method to create a crawlable feed. - - :param title: The title to use for the feed. - :param url: The URL from which the feed will be served. - :param worklist: A crawlable Lane which controls which works show up - in the feed. - :param annotator: A custom Annotator to use when generating the feed. - :param feed_class: A drop-in replacement for OPDSAcquisitionFeed - for use in tests. - """ - pagination = load_pagination_from_request( - SortKeyPagination, default_size=Pagination.DEFAULT_CRAWLABLE_SIZE - ) - if isinstance(pagination, ProblemDetail): - return pagination - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - annotator = annotator or self.manager.annotator(worklist) - - # A crawlable feed has only one possible set of Facets, - # so library settings are irrelevant. - facets = CrawlableFacets.default(None) - - return feed_class.page( - _db=self._db, - title=title, - url=url, - worklist=worklist, - annotator=annotator, - facets=facets, - pagination=pagination, - search_engine=search_engine, - ).as_response( - mime_types=flask.request.accept_mimetypes, max_age=worklist.max_cache_age() - ) - - def _load_search_facets(self, lane): - entrypoints = list(flask.request.library.entrypoints) - if len(entrypoints) > 1: - # There is more than one enabled EntryPoint. - # By default, search them all. - default_entrypoint = EverythingEntryPoint - else: - # There is only one enabled EntryPoint, - # and no need for a special default. - default_entrypoint = None - return self.manager.load_facets_from_request( - worklist=lane, - base_class=SearchFacets, - default_entrypoint=default_entrypoint, - ) - - def search(self, lane_identifier, feed_class=OPDSAcquisitionFeed): - """Search for books.""" - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - - # Although the search query goes against Opensearch, we must - # use normal pagination because the results are sorted by - # match quality, not bibliographic information. - pagination = load_pagination_from_request( - Pagination, default_size=Pagination.DEFAULT_SEARCH_SIZE - ) - if isinstance(pagination, ProblemDetail): - return pagination - - facets = self._load_search_facets(lane) - if isinstance(facets, ProblemDetail): - return facets - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - # Check whether there is a query string -- if not, we want to - # send an OpenSearch document explaining how to search. - query = flask.request.args.get("q") - library_short_name = flask.request.library.short_name - - # Create a function that, when called, generates a URL to the - # search controller. - # - # We'll call this one way if there is no query string in the - # request arguments, and another way if there is a query - # string. - make_url_kwargs = dict(list(facets.items())) - make_url = lambda: url_for( - "lane_search", - lane_identifier=lane_identifier, - library_short_name=library_short_name, - _external=True, - **make_url_kwargs, - ) - if not query: - # Send the search form - open_search_doc = OpenSearchDocument.for_lane(lane, make_url()) - headers = {"Content-Type": "application/opensearchdescription+xml"} - return Response(open_search_doc, 200, headers) - - # We have a query -- add it to the keyword arguments used when - # generating a URL. - make_url_kwargs["q"] = query.encode("utf8") - - # Run a search. - annotator = self.manager.annotator(lane, facets) - info = OpenSearchDocument.search_info(lane) - response = feed_class.search( - _db=self._db, - title=info["name"], - url=make_url(), - lane=lane, - search_engine=search_engine, - query=query, - annotator=annotator, - pagination=pagination, - facets=facets, - ) - if isinstance(response, ProblemDetail): - return response - return response.as_response( - mime_types=flask.request.accept_mimetypes, max_age=lane.max_cache_age() - ) - - def _qa_feed( - self, feed_factory, feed_title, controller_name, facet_class, worklist_factory - ): - """Create some kind of OPDS feed designed for consumption by an - automated QA process. - - :param feed_factory: This function will be called to create the feed. - It must either be AcquisitionFeed.groups or Acquisition.page, - or it must take the same arguments as those methods. - :param feed_title: String title of the feed. - :param controller_name: Controller name to use when generating - the URL to the feed. - :param facet_class: Faceting class to load (through - load_facets_from_request). - :param worklist_factory: Function that takes (Library, Facets) - and returns a Worklist configured to generate the feed. - :return: A ProblemDetail if there's a problem loading the faceting - object; otherwise the return value of `feed_factory`. - """ - library = flask.request.library - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - url = url_for( - controller_name, library_short_name=library.short_name, _external=True - ) - - facets = load_facets_from_request( - base_class=facet_class, default_entrypoint=EverythingEntryPoint - ) - if isinstance(facets, ProblemDetail): - return facets - - worklist = worklist_factory(library, facets) - annotator = self.manager.annotator(worklist) - - # Since this feed will be consumed by an automated client, and - # we're choosing titles for specific purposes, there's no - # reason to put more than a single item in each group. - pagination = Pagination(size=1) - return feed_factory( - _db=self._db, - title=feed_title, - url=url, - pagination=pagination, - worklist=worklist, - annotator=annotator, - search_engine=search_engine, - facets=facets, - max_age=0, - ) - - def qa_feed(self, feed_class=OPDSAcquisitionFeed): - """Create an OPDS feed containing the information necessary to - run a full set of integration tests against this server and - the vendors it relies on. - - :param feed_class: Class to substitute for AcquisitionFeed during - tests. - """ - - def factory(library, facets): - return JackpotWorkList(library, facets) - - return self._qa_feed( - feed_factory=feed_class.groups, - feed_title="QA test feed", - controller_name="qa_feed", - facet_class=JackpotFacets, - worklist_factory=factory, - ) - - def qa_series_feed(self, feed_class=OPDSAcquisitionFeed): - """Create an OPDS feed containing books that belong to _some_ - series, without regard to _which_ series. - - :param feed_class: Class to substitute for AcquisitionFeed during - tests. - """ - - def factory(library, facets): - wl = WorkList() - wl.initialize(library) - return wl - - return self._qa_feed( - feed_factory=feed_class.page, - feed_title="QA series test feed", - controller_name="qa_series_feed", - facet_class=HasSeriesFacets, - worklist_factory=factory, - ) - - -@define -class FeedRequestParameters: - """Frequently used request parameters for feed requests""" - - library: Library | None = None - pagination: Pagination | None = None - facets: Facets | None = None - problem: ProblemDetail | None = None - - -class MARCRecordController(CirculationManagerController): - DOWNLOAD_TEMPLATE = """ - - - -%(body)s - -""" - - def download_page(self): - library = flask.request.library - body = "

Download MARC files for %s

" % library.name - time_format = "%B %-d, %Y" - - # Check if a MARC exporter is configured so we can show a - # message if it's not. - exporter = None - try: - exporter = MARCExporter.from_config(library) - except CannotLoadConfiguration as e: - body += ( - "

" - + _("No MARC exporter is currently configured for this library.") - + "

" - ) - - if len(library.cachedmarcfiles) < 1 and exporter: - body += "

" + _("MARC files aren't ready to download yet.") + "

" - - files_by_lane = defaultdict(dict) - for file in library.cachedmarcfiles: - if file.start_time == None: - files_by_lane[file.lane]["full"] = file - else: - if not files_by_lane[file.lane].get("updates"): - files_by_lane[file.lane]["updates"] = [] - files_by_lane[file.lane]["updates"].append(file) - - # TODO: By default the MARC script only caches one level of lanes, - # so sorting by priority is good enough. - lanes = sorted( - list(files_by_lane.keys()), key=lambda x: x.priority if x else -1 - ) - - for lane in lanes: - files = files_by_lane[lane] - body += "
" - body += "

%s

" % (lane.display_name if lane else _("All Books")) - if files.get("full"): - file = files.get("full") - full_url = file.representation.mirror_url - full_label = _( - "Full file - last updated %(update_time)s", - update_time=file.end_time.strftime(time_format), - ) - body += '{}'.format( - files.get("full").representation.mirror_url, - full_label, - ) - - if files.get("updates"): - body += "

%s

" % _("Update-only files") - body += "
    " - files.get("updates").sort(key=lambda x: x.end_time) - for update in files.get("updates"): - update_url = update.representation.mirror_url - update_label = _( - "Updates from %(start_time)s to %(end_time)s", - start_time=update.start_time.strftime(time_format), - end_time=update.end_time.strftime(time_format), - ) - body += '
  • {}
  • '.format( - update_url, - update_label, - ) - body += "
" - - body += "
" - body += "
" - - html = self.DOWNLOAD_TEMPLATE % dict(body=body) - headers = dict() - headers["Content-Type"] = "text/html" - return Response(html, 200, headers) - - -class LoanController(CirculationManagerController): - def sync(self): - """Sync the authenticated patron's loans and holds with all third-party - providers. - - :return: A Response containing an OPDS feed with up-to-date information. - """ - patron = flask.request.patron - - # Save some time if we don't believe the patron's loans or holds have - # changed since the last time the client requested this feed. - response = self.handle_conditional_request(patron.last_loan_activity_sync) - if isinstance(response, Response): - return response - - # TODO: SimplyE used to make a HEAD request to the bookshelf feed - # as a quick way of checking authentication. Does this still happen? - # It shouldn't -- the patron profile feed should be used instead. - # If it's not used, we can take this out. - if flask.request.method == "HEAD": - return Response() - - # First synchronize our local list of loans and holds with all - # third-party loan providers. - if patron.authorization_identifier: - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - try: - self.circulation.sync_bookshelf(patron, credential) - except Exception as e: - # If anything goes wrong, omit the sync step and just - # display the current active loans, as we understand them. - self.manager.log.error( - "ERROR DURING SYNC for %s: %r", patron.id, e, exc_info=e - ) - - # Then make the feed. - feed = OPDSAcquisitionFeed.active_loans_for(self.circulation, patron) - response = feed.as_response( - max_age=0, - private=True, - mime_types=flask.request.accept_mimetypes, - ) - - last_modified = patron.last_loan_activity_sync - if last_modified: - response.last_modified = last_modified - return response - - def borrow(self, identifier_type, identifier, mechanism_id=None): - """Create a new loan or hold for a book. - - :return: A Response containing an OPDS entry that includes a link of rel - "http://opds-spec.org/acquisition", which can be used to fetch the - book or the license file. - """ - patron = flask.request.patron - library = flask.request.library - - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - - result = self.best_lendable_pool( - library, patron, identifier_type, identifier, mechanism_id - ) - if not result: - # No LicensePools were found and no ProblemDetail - # was returned. Send a generic ProblemDetail. - return NO_LICENSES.detailed(_("I've never heard of this work.")) - if isinstance(result, ProblemDetail): - # There was a problem determining the appropriate - # LicensePool to use. - return result - - if isinstance(result, Loan): - # We already have a Loan, so there's no need to go to the API. - loan_or_hold = result - is_new = False - else: - # We need to actually go out to the API - # and try to take out a loan. - pool, mechanism = result - loan_or_hold, is_new = self._borrow(patron, credential, pool, mechanism) - - if isinstance(loan_or_hold, ProblemDetail): - return loan_or_hold - - # At this point we have either a loan or a hold. If a loan, serve - # a feed that tells the patron how to fulfill the loan. If a hold, - # serve a feed that talks about the hold. - response_kwargs = {} - if is_new: - response_kwargs["status"] = 201 - else: - response_kwargs["status"] = 200 - return OPDSAcquisitionFeed.single_entry_loans_feed( - self.circulation, loan_or_hold, **response_kwargs - ) - - def _borrow(self, patron, credential, pool, mechanism): - """Go out to the API, try to take out a loan, and handle errors as - problem detail documents. - - :param patron: The Patron who's trying to take out the loan - :param credential: A Credential to use when authenticating - as this Patron with the external API. - :param pool: The LicensePool for the book the Patron wants. - :mechanism: The DeliveryMechanism to request when asking for - a loan. - :return: a 2-tuple (result, is_new) `result` is a Loan (if one - could be created or found), a Hold (if a Loan could not be - created but a Hold could be), or a ProblemDetail (if the - entire operation failed). - """ - result = None - is_new = False - try: - loan, hold, is_new = self.circulation.borrow( - patron, credential, pool, mechanism - ) - result = loan or hold - except NoOpenAccessDownload as e: - result = NO_LICENSES.detailed( - _("Couldn't find an open-access download link for this book."), - status_code=404, - ) - except PatronAuthorizationFailedException as e: - result = INVALID_CREDENTIALS - except (PatronLoanLimitReached, PatronHoldLimitReached) as e: - result = e.as_problem_detail_document().with_debug(str(e)) - except DeliveryMechanismError as e: - result = BAD_DELIVERY_MECHANISM.with_debug( - str(e), status_code=e.status_code - ) - except OutstandingFines as e: - result = OUTSTANDING_FINES.detailed( - _( - "You must pay your $%(fine_amount).2f outstanding fines before you can borrow more books.", - fine_amount=patron.fines, - ) - ) - except AuthorizationExpired as e: - result = e.as_problem_detail_document(debug=False) - except AuthorizationBlocked as e: - result = e.as_problem_detail_document(debug=False) - except CannotLoan as e: - result = CHECKOUT_FAILED.with_debug(str(e)) - except CannotHold as e: - result = HOLD_FAILED.with_debug(str(e)) - except CannotRenew as e: - result = RENEW_FAILED.with_debug(str(e)) - except NotFoundOnRemote as e: - result = NOT_FOUND_ON_REMOTE - except CirculationException as e: - # Generic circulation error. - result = CHECKOUT_FAILED.with_debug(str(e)) - - if result is None: - # This shouldn't happen, but if it does, it means no exception - # was raised but we just didn't get a loan or hold. Return a - # generic circulation error. - result = HOLD_FAILED - return result, is_new - - def best_lendable_pool( - self, library, patron, identifier_type, identifier, mechanism_id - ): - """ - Of the available LicensePools for the given Identifier, return the - one that's the best candidate for loaning out right now. - - :return: A Loan if this patron already has an active loan, otherwise a LicensePool. - """ - # Turn source + identifier into a set of LicensePools - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - # Something went wrong. - return pools - - best = None - mechanism = None - problem_doc = None - - existing_loans = ( - self._db.query(Loan) - .filter( - Loan.license_pool_id.in_([lp.id for lp in pools]), Loan.patron == patron - ) - .all() - ) - if existing_loans: - # The patron already has at least one loan on this book already. - # To make the "borrow" operation idempotent, return one of - # those loans instead of an error. - return existing_loans[0] - - # We found a number of LicensePools. Try to locate one that - # we can actually loan to the patron. - for pool in pools: - problem_doc = self.apply_borrowing_policy(patron, pool) - if problem_doc: - # As a matter of policy, the patron is not allowed to borrow - # this book. - continue - - # Beyond this point we know that site policy does not prohibit - # us from lending this pool to this patron. - - if mechanism_id: - # But the patron has requested a license pool that - # supports a specific delivery mechanism. This pool - # must offer that mechanism. - mechanism = self.load_licensepooldelivery(pool, mechanism_id) - if isinstance(mechanism, ProblemDetail): - problem_doc = mechanism - continue - - # Beyond this point we have a license pool that we can - # actually loan or put on hold. - - # But there might be many such LicensePools, and we want - # to pick the one that will get the book to the patron - # with the shortest wait. - if ( - not best - or pool.licenses_available > best.licenses_available - or pool.patrons_in_hold_queue < best.patrons_in_hold_queue - ): - best = pool - - if not best: - # We were unable to find any LicensePool that fit the - # criteria. - return problem_doc - return best, mechanism - - def fulfill( - self, - license_pool_id: int, - mechanism_id: int | None = None, - do_get: Any | None = None, - ) -> wkResponse | ProblemDetail: - """Fulfill a book that has already been checked out, - or which can be fulfilled with no active loan. - - If successful, this will serve the patron a downloadable copy - of the book, a key (such as a DRM license file or bearer - token) which can be used to get the book, or an OPDS entry - containing a link to the book. - - :param license_pool_id: Database ID of a LicensePool. - :param mechanism_id: Database ID of a DeliveryMechanism. - """ - do_get = do_get or Representation.simple_http_get - - # Unlike most controller methods, this one has different - # behavior whether or not the patron is authenticated. This is - # why we're about to do something we don't usually do--call - # authenticated_patron_from_request from within a controller - # method. - authentication_response = self.authenticated_patron_from_request() - if isinstance(authentication_response, Patron): - # The patron is authenticated. - patron = authentication_response - else: - # The patron is not authenticated, either due to bad credentials - # (in which case authentication_response is a Response) - # or due to an integration error with the auth provider (in - # which case it is a ProblemDetail). - # - # There's still a chance this request can succeed, but if not, - # we'll be sending out authentication_response. - patron = None - library = flask.request.library # type: ignore - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - - # Turn source + identifier into a LicensePool. - pool = self.load_licensepool(license_pool_id) - if isinstance(pool, ProblemDetail): - return pool - - loan, loan_license_pool = self.get_patron_loan(patron, [pool]) - - requested_license_pool = loan_license_pool or pool - - # Find the LicensePoolDeliveryMechanism they asked for. - mechanism = None - if mechanism_id: - mechanism = self.load_licensepooldelivery( - requested_license_pool, mechanism_id - ) - if isinstance(mechanism, ProblemDetail): - return mechanism - - if (not loan or not loan_license_pool) and not ( - self.can_fulfill_without_loan( - library, patron, requested_license_pool, mechanism - ) - ): - if patron: - # Since a patron was identified, the problem is they have - # no active loan. - return NO_ACTIVE_LOAN.detailed( - _("You have no active loan for this title.") - ) - else: - # Since no patron was identified, the problem is - # whatever problem was revealed by the earlier - # authenticated_patron_from_request() call -- either the - # patron didn't authenticate or there's a problem - # integrating with the auth provider. - return authentication_response - - if not mechanism: - # See if the loan already has a mechanism set. We can use that. - if loan and loan.fulfillment: - mechanism = loan.fulfillment - else: - return BAD_DELIVERY_MECHANISM.detailed( - _("You must specify a delivery mechanism to fulfill this loan.") - ) - - try: - fulfillment = self.circulation.fulfill( - patron, - credential, - requested_license_pool, - mechanism, - ) - except DeliveryMechanismConflict as e: - return DELIVERY_CONFLICT.detailed(str(e)) - except NoActiveLoan as e: - return NO_ACTIVE_LOAN.detailed( - _("Can't fulfill loan because you have no active loan for this book."), - status_code=e.status_code, - ) - except FormatNotAvailable as e: - return NO_ACCEPTABLE_FORMAT.with_debug(str(e), status_code=e.status_code) - except CannotFulfill as e: - return CANNOT_FULFILL.with_debug(str(e), status_code=e.status_code) - except DeliveryMechanismError as e: - return BAD_DELIVERY_MECHANISM.with_debug(str(e), status_code=e.status_code) - - # A subclass of FulfillmentInfo may want to bypass the whole - # response creation process. - response = fulfillment.as_response - if response is not None: - return response - - headers = dict() - encoding_header = dict() - if ( - fulfillment.data_source_name == DataSource.ENKI - and mechanism.delivery_mechanism.drm_scheme_media_type - == DeliveryMechanism.NO_DRM - ): - encoding_header["Accept-Encoding"] = "deflate" - - if mechanism.delivery_mechanism.is_streaming: - # If this is a streaming delivery mechanism, create an OPDS entry - # with a fulfillment link to the streaming reader url. - feed = OPDSAcquisitionFeed.single_entry_loans_feed( - self.circulation, loan, fulfillment=fulfillment - ) - if isinstance(feed, ProblemDetail): - # This should typically never happen, since we've gone through the entire fulfill workflow - # But for the sake of return-type completeness we are adding this here - return feed - if isinstance(feed, Response): - return feed - else: - content = etree.tostring(feed) - status_code = 200 - headers["Content-Type"] = OPDSFeed.ACQUISITION_FEED_TYPE - elif fulfillment.content_link_redirect is True: - # The fulfillment API has asked us to not be a proxy and instead redirect the client directly - return redirect(fulfillment.content_link) - else: - content = fulfillment.content - if fulfillment.content_link: - # If we have a link to the content on a remote server, web clients may not - # be able to access it if the remote server does not support CORS requests. - - # If the pool is open access though, the web client can link directly to the - # file to download it, so it's safe to redirect. - if requested_license_pool.open_access: - return redirect(fulfillment.content_link) - - # Otherwise, we need to fetch the content and return it instead - # of redirecting to it, since it may be downloaded through an - # indirect acquisition link. - try: - status_code, headers, content = do_get( - fulfillment.content_link, headers=encoding_header - ) - headers = dict(headers) - except RemoteIntegrationException as e: - return e.as_problem_detail_document(debug=False) - else: - status_code = 200 - if fulfillment.content_type: - headers["Content-Type"] = fulfillment.content_type - - return Response(response=content, status=status_code, headers=headers) - - def can_fulfill_without_loan(self, library, patron, pool, lpdm): - """Is it acceptable to fulfill the given LicensePoolDeliveryMechanism - for the given Patron without creating a Loan first? - - This question is usually asked because no Patron has been - authenticated, and thus no Loan can be created, but somebody - wants a book anyway. - - :param library: A Library. - :param patron: A Patron, probably None. - :param lpdm: A LicensePoolDeliveryMechanism. - """ - authenticator = self.manager.auth.library_authenticators.get(library.short_name) - if authenticator and authenticator.identifies_individuals: - # This library identifies individual patrons, so there is - # no reason to fulfill books without a loan. Even if the - # books are free and the 'loans' are nominal, having a - # Loan object makes it possible for a patron to sync their - # collection across devices, so that's the way we do it. - return False - - # If the library doesn't require that individual patrons - # identify themselves, it's up to the CirculationAPI object. - # Most of them will say no. (This would indicate that the - # collection is improperly associated with a library that - # doesn't identify its patrons.) - return self.circulation.can_fulfill_without_loan(patron, pool, lpdm) - - def revoke(self, license_pool_id): - patron = flask.request.patron - pool = self.load_licensepool(license_pool_id) - if isinstance(pool, ProblemDetail): - return pool - - loan, _ignore = self.get_patron_loan(patron, [pool]) - - if loan: - hold = None - else: - hold, _ignore = self.get_patron_hold(patron, [pool]) - - if not loan and not hold: - if not pool.work: - title = "this book" - else: - title = '"%s"' % pool.work.title - return NO_ACTIVE_LOAN_OR_HOLD.detailed( - _( - 'Can\'t revoke because you have no active loan or hold for "%(title)s".', - title=title, - ), - status_code=404, - ) - - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - if loan: - try: - self.circulation.revoke_loan(patron, credential, pool) - except RemoteRefusedReturn as e: - title = _( - "Loan deleted locally but remote refused. Loan is likely to show up again on next sync." - ) - return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, status_code=503) - except CannotReturn as e: - title = _("Loan deleted locally but remote failed.") - return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, 503).with_debug( - str(e) - ) - elif hold: - if not self.circulation.can_revoke_hold(pool, hold): - title = _("Cannot release a hold once it enters reserved state.") - return CANNOT_RELEASE_HOLD.detailed(title, 400) - try: - self.circulation.release_hold(patron, credential, pool) - except CannotReleaseHold as e: - title = _("Hold released locally but remote failed.") - return CANNOT_RELEASE_HOLD.detailed(title, 503).with_debug(str(e)) - - work = pool.work - annotator = self.manager.annotator(None) - return OPDSAcquisitionFeed.entry_as_response( - OPDSAcquisitionFeed.single_entry(work, annotator) - ) - - def detail(self, identifier_type, identifier): - if flask.request.method == "DELETE": - return self.revoke_loan_or_hold(identifier_type, identifier) - - patron = flask.request.patron - library = flask.request.library - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - - loan, pool = self.get_patron_loan(patron, pools) - if loan: - hold = None - else: - hold, pool = self.get_patron_hold(patron, pools) - - if not loan and not hold: - return NO_ACTIVE_LOAN_OR_HOLD.detailed( - _( - 'You have no active loan or hold for "%(title)s".', - title=pool.work.title, - ), - status_code=404, - ) - - if flask.request.method == "GET": - if loan: - item = loan - else: - item = hold - return OPDSAcquisitionFeed.single_entry_loans_feed(self.circulation, item) - - -class AnnotationController(CirculationManagerController): - def container(self, identifier=None, accept_post=True): - headers = dict() - if accept_post: - headers["Allow"] = "GET,HEAD,OPTIONS,POST" - headers["Accept-Post"] = AnnotationWriter.CONTENT_TYPE - else: - headers["Allow"] = "GET,HEAD,OPTIONS" - - if flask.request.method == "HEAD": - return Response(status=200, headers=headers) - - patron = flask.request.patron - - if flask.request.method == "GET": - headers["Link"] = [ - '; rel="type"', - '; rel="http://www.w3.org/ns/ldp#constrainedBy"', - ] - headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE - - container, timestamp = AnnotationWriter.annotation_container_for( - patron, identifier=identifier - ) - etag = 'W/""' - if timestamp: - etag = 'W/"%s"' % timestamp - headers["Last-Modified"] = format_date_time( - mktime(timestamp.timetuple()) - ) - headers["ETag"] = etag - - content = json.dumps(container) - return Response(content, status=200, headers=headers) - - data = flask.request.data - annotation = AnnotationParser.parse(self._db, data, patron) - - if isinstance(annotation, ProblemDetail): - return annotation - - content = json.dumps(AnnotationWriter.detail(annotation)) - status_code = 200 - headers["Link"] = '; rel="type"' - headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE - return Response(content, status_code, headers) - - def container_for_work(self, identifier_type, identifier): - id_obj, ignore = Identifier.for_foreign_id( - self._db, identifier_type, identifier - ) - return self.container(identifier=id_obj, accept_post=False) - - def detail(self, annotation_id): - headers = dict() - headers["Allow"] = "GET,HEAD,OPTIONS,DELETE" - - if flask.request.method == "HEAD": - return Response(status=200, headers=headers) - - patron = flask.request.patron - - annotation = get_one( - self._db, Annotation, patron=patron, id=annotation_id, active=True - ) - - if not annotation: - return NO_ANNOTATION - - if flask.request.method == "DELETE": - annotation.set_inactive() - return Response() - - content = json.dumps(AnnotationWriter.detail(annotation)) - status_code = 200 - headers["Link"] = '; rel="type"' - headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE - return Response(content, status_code, headers) - - -class WorkController(CirculationManagerController): - def _lane_details(self, languages, audiences): - if languages: - languages = languages.split(",") - if audiences: - audiences = [urllib.parse.unquote_plus(a) for a in audiences.split(",")] - - return languages, audiences - - def contributor( - self, contributor_name, languages, audiences, feed_class=OPDSAcquisitionFeed - ): - """Serve a feed of books written by a particular author""" - library = flask.request.library - if not contributor_name: - return NO_SUCH_LANE.detailed(_("No contributor provided")) - - # contributor_name is probably a display_name, but it could be a - # sort_name. Pass it in for both fields and - # ContributorData.lookup() will do its best to figure it out. - contributor = ContributorData.lookup( - self._db, sort_name=contributor_name, display_name=contributor_name - ) - if not contributor: - return NO_SUCH_LANE.detailed( - _("Unknown contributor: %s") % contributor_name - ) - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - languages, audiences = self._lane_details(languages, audiences) - - lane = ContributorLane( - library, contributor, languages=languages, audiences=audiences - ) - facets = self.manager.load_facets_from_request( - worklist=lane, base_class=ContributorFacets - ) - if isinstance(facets, ProblemDetail): - return facets - - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return pagination - - annotator = self.manager.annotator(lane, facets) - - url = annotator.feed_url( - lane, - facets=facets, - pagination=pagination, - ) - - return feed_class.page( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - facets=facets, - pagination=pagination, - annotator=annotator, - search_engine=search_engine, - ).as_response( - max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes - ) - - def permalink(self, identifier_type, identifier): - """Serve an entry for a single book. - - This does not include any loan or hold-specific information for - the authenticated patron. - - This is different from the /works lookup protocol, in that it - returns a single entry while the /works lookup protocol returns a - feed containing any number of entries. - """ - library = flask.request.library - work = self.load_work(library, identifier_type, identifier) - if isinstance(work, ProblemDetail): - return work - - patron = flask.request.patron - - if patron: - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - - loan, pool = self.get_patron_loan(patron, pools) - hold = None - - if not loan: - hold, pool = self.get_patron_hold(patron, pools) - - item = loan or hold - pool = pool or pools[0] - - return OPDSAcquisitionFeed.single_entry_loans_feed( - self.circulation, item or pool - ) - else: - annotator = self.manager.annotator(lane=None) - - return OPDSAcquisitionFeed.entry_as_response( - OPDSAcquisitionFeed.single_entry(work, annotator), - max_age=OPDSFeed.DEFAULT_MAX_AGE, - ) - - def related( - self, - identifier_type, - identifier, - novelist_api=None, - feed_class=OPDSAcquisitionFeed, - ): - """Serve a groups feed of books related to a given book.""" - - library = flask.request.library - work = self.load_work(library, identifier_type, identifier) - if work is None: - return NOT_FOUND_ON_REMOTE - - if isinstance(work, ProblemDetail): - return work - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - try: - lane_name = f"Books Related to {work.title} by {work.author}" - lane = RelatedBooksLane(library, work, lane_name, novelist_api=novelist_api) - except ValueError as e: - # No related books were found. - return NO_SUCH_LANE.detailed(str(e)) - - facets = self.manager.load_facets_from_request( - worklist=lane, - base_class=FeaturedFacets, - base_class_constructor_kwargs=dict( - minimum_featured_quality=library.settings.minimum_featured_quality - ), - ) - if isinstance(facets, ProblemDetail): - return facets - - annotator = self.manager.annotator(lane) - url = annotator.feed_url( - lane, - facets=facets, - ) - - return feed_class.groups( - _db=self._db, - title=lane.DISPLAY_NAME, - url=url, - worklist=lane, - annotator=annotator, - pagination=None, - facets=facets, - search_engine=search_engine, - ).as_response( - max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes - ) - - def recommendations( - self, - identifier_type, - identifier, - novelist_api=None, - feed_class=OPDSAcquisitionFeed, - ): - """Serve a feed of recommendations related to a given book.""" - - library = flask.request.library - work = self.load_work(library, identifier_type, identifier) - if isinstance(work, ProblemDetail): - return work - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - lane_name = f"Recommendations for {work.title} by {work.author}" - try: - lane = RecommendationLane( - library=library, - work=work, - display_name=lane_name, - novelist_api=novelist_api, - ) - except CannotLoadConfiguration as e: - # NoveList isn't configured. - return NO_SUCH_LANE.detailed(_("Recommendations not available")) - - facets = self.manager.load_facets_from_request(worklist=lane) - if isinstance(facets, ProblemDetail): - return facets - - # We use a normal Pagination object because recommendations - # are looked up in a third-party API and paginated through the - # database lookup. - pagination = load_pagination_from_request(Pagination) - if isinstance(pagination, ProblemDetail): - return pagination - - annotator = self.manager.annotator(lane) - url = annotator.feed_url( - lane, - facets=facets, - pagination=pagination, - ) - - return feed_class.page( - _db=self._db, - title=lane.DISPLAY_NAME, - url=url, - worklist=lane, - facets=facets, - pagination=pagination, - annotator=annotator, - search_engine=search_engine, - ).as_response(max_age=lane.max_cache_age()) - - def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFeed): - """Serve a feed of books in a given series.""" - library = flask.request.library - if not series_name: - return NO_SUCH_LANE.detailed(_("No series provided")) - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - languages, audiences = self._lane_details(languages, audiences) - lane = SeriesLane( - library, series_name=series_name, languages=languages, audiences=audiences - ) - - facets = self.manager.load_facets_from_request( - worklist=lane, base_class=SeriesFacets - ) - if isinstance(facets, ProblemDetail): - return facets - - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return pagination - - annotator = self.manager.annotator(lane) - - url = annotator.feed_url(lane, facets=facets, pagination=pagination) - return feed_class.page( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - facets=facets, - pagination=pagination, - annotator=annotator, - search_engine=search_engine, - ).as_response( - max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes - ) - - -class ProfileController(CirculationManagerController): - """Implement the User Profile Management Protocol.""" - - def _controller(self, patron): - """Instantiate a CoreProfileController that actually does the work.""" - storage = CirculationPatronProfileStorage(patron, flask.url_for) - return CoreProfileController(storage) - - def protocol(self): - """Handle a UPMP request.""" - patron = flask.request.patron - controller = self._controller(patron) - if flask.request.method == "GET": - result = controller.get() - else: - result = controller.put(flask.request.headers, flask.request.data) - if isinstance(result, ProblemDetail): - return result - return make_response(*result) - - -class DeviceTokensController(CirculationManagerController): - def get_patron_device(self): - patron = flask.request.patron - device_token = flask.request.args["device_token"] - token: DeviceToken = ( - self._db.query(DeviceToken) - .filter( - DeviceToken.patron_id == patron.id, - DeviceToken.device_token == device_token, - ) - .first() - ) - if not token: - return DEVICE_TOKEN_NOT_FOUND - return dict(token_type=token.token_type, device_token=token.device_token), 200 - - def create_patron_device(self): - patron = flask.request.patron - device_token = flask.request.json["device_token"] - token_type = flask.request.json["token_type"] - - try: - device = DeviceToken.create(self._db, token_type, device_token, patron) - except InvalidTokenTypeError: - return DEVICE_TOKEN_TYPE_INVALID - except DuplicateDeviceTokenError: - return dict(exists=True), 200 - - return "", 201 - - def delete_patron_device(self): - patron = flask.request.patron - device_token = flask.request.json["device_token"] - token_type = flask.request.json["token_type"] - - try: - device: DeviceToken = ( - self._db.query(DeviceToken) - .filter( - DeviceToken.patron == patron, - DeviceToken.device_token == device_token, - DeviceToken.token_type == token_type, - ) - .one() - ) - self._db.delete(device) - except NoResultFound: - return DEVICE_TOKEN_NOT_FOUND - - return Response("", 204) - - -class URNLookupController(CoreURNLookupController): - def __init__(self, manager): - self.manager = manager - super().__init__(manager._db) - - def work_lookup(self, route_name): - """Build a CirculationManagerAnnotor based on the current library's - top-level WorkList, and use it to generate an OPDS lookup - feed. - """ - library = flask.request.library - top_level_worklist = self.manager.top_level_lanes[library.id] - annotator = CirculationManagerAnnotator(top_level_worklist) - return super().work_lookup(annotator, route_name) - - -class AnalyticsController(CirculationManagerController): - def track_event(self, identifier_type, identifier, event_type): - # TODO: It usually doesn't matter, but there should be - # a way to distinguish between different LicensePools for the - # same book. - if event_type in CirculationEvent.CLIENT_EVENTS: - library = flask.request.library - # Authentication on the AnalyticsController is optional, - # so flask.request.patron may or may not be set. - patron = getattr(flask.request, "patron", None) - neighborhood = None - if patron: - neighborhood = getattr(patron, "neighborhood", None) - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - self.manager.analytics.collect_event( - library, pools[0], event_type, utc_now(), neighborhood=neighborhood - ) - return Response({}, 200) - else: - return INVALID_ANALYTICS_EVENT_TYPE - - -class PlaytimeEntriesController(CirculationManagerController): - def track_playtimes(self, collection_id, identifier_type, identifier_idn): - library: Library = flask.request.library - identifier = get_one( - self._db, Identifier, type=identifier_type, identifier=identifier_idn - ) - collection = Collection.by_id(self._db, collection_id) - - if not identifier: - return NOT_FOUND_ON_REMOTE.detailed( - f"The identifier {identifier_type}/{identifier_idn} was not found." - ) - if not collection: - return NOT_FOUND_ON_REMOTE.detailed( - f"The collection {collection_id} was not found." - ) - - if collection not in library.collections: - return INVALID_INPUT.detailed("Collection was not found in the Library.") - - if not identifier.licensed_through_collection(collection): - return INVALID_INPUT.detailed( - "This Identifier was not found in the Collection." - ) - - try: - data = PlaytimeEntriesPost(**flask.request.json) - except ValidationError as ex: - return INVALID_INPUT.detailed(ex.json()) - - responses, summary = PlaytimeEntries.insert_playtime_entries( - self._db, identifier, collection, library, data - ) - - response_data = PlaytimeEntriesPostResponse( - summary=summary, responses=responses - ) - response = flask.jsonify(response_data.dict()) - response.status_code = 207 - return response - - -class ODLNotificationController(CirculationManagerController): - """Receive notifications from an ODL distributor when the - status of a loan changes. - """ - - def notify(self, loan_id): - library = flask.request.library - status_doc = flask.request.data - loan = get_one(self._db, Loan, id=loan_id) - - if not loan: - return NO_ACTIVE_LOAN.detailed(_("No loan was found for this identifier.")) - - collection = loan.license_pool.collection - if collection.protocol not in (ODLAPI.label(), ODL2API.label()): - return INVALID_LOAN_FOR_ODL_NOTIFICATION - - api = self.manager.circulation_apis[library.id].api_for_license_pool( - loan.license_pool - ) - api.update_loan(loan, json.loads(status_doc)) - return Response(_("Success"), 200) - - -class StaticFileController(CirculationManagerController): - def static_file(self, directory, filename): - max_age = ConfigurationSetting.sitewide( - self._db, Configuration.STATIC_FILE_CACHE_TIME - ).int_value - return flask.send_from_directory(directory, filename, max_age=max_age) - - def image(self, filename): - directory = os.path.join( - os.path.abspath(os.path.dirname(__file__)), "..", "resources", "images" - ) - return self.static_file(directory, filename) - - -# Finland -class CatalogDescriptionsController(CirculationManagerController): - def get_catalogs(self, library_uuid=None): - catalogs = [] - libraries = [] - - if library_uuid != None: - try: - libraries = [ - self._db.query(Library).filter(Library.uuid == library_uuid).one() - ] - except NoResultFound: - return LIBRARY_NOT_FOUND - else: - libraries = self._db.query(Library).order_by(Library.name).all() - - for library in libraries: - settings = library.settings_dict - images = [] - if library.logo: - images += [ - { - "rel": "http://opds-spec.org/image/thumbnail", - "href": library.logo.data_url, - "type": "image/png", - } - ] - - authentication_document_url = url_for( - "authentication_document", - library_short_name=library.short_name, - _external=True, - ) - - catalog_url = url_for( - "acquisition_groups", - library_short_name=library.short_name, - _external=True, - ) - - timenow = utc_now().strftime("%Y-%m-%dT%H:%M:%SZ") - - metadata = { - "id": "urn:uuid:" + library.uuid, - "title": library.name, - "short_name": library.short_name, - "modified": timenow, - "updated": timenow, - "isAutomatic": False, - } - - if "library_description" in settings: - metadata["description"] = settings["library_description"] - - links = [ - { - "rel": "http://opds-spec.org/catalog", - "href": catalog_url, - "type": "application/atom+xml;profile=opds-catalog;kind=acquisition", - }, - { - "href": authentication_document_url, - "type": "application/vnd.opds.authentication.v1.0+json", - }, - ] - - if "help_web" in settings: - links += [{"href": settings["help_web"], "rel": "help"}] - elif "help_email" in settings: - links += [{"href": "mailto:" + settings["help_email"], "rel": "help"}] - - catalogs += [{"metadata": metadata, "links": links, "images": images}] - - response_json = { - "metadata": {"title": "Libraries"}, - "catalogs": catalogs, - "links": [ - { - "rel": "self", - "href": url_for("client_libraries", _external=True), - "type": "application/opds+json", - } - ], - } - - return Response( - json_serializer(response_json), - status=200, - mimetype="application/json", - ) - - -class PatronAuthTokenController(CirculationManagerController): - def get_token(self): - """Create a Patron Auth access token for an authenticated patron""" - patron = flask.request.patron - auth = flask.request.authorization - token_expiry = 3600 - - if not patron or auth.type.lower() != "basic": - return PATRON_AUTH_ACCESS_TOKEN_NOT_POSSIBLE - - try: - token = AccessTokenProvider.generate_token( - self._db, - patron, - auth["password"], - expires_in=token_expiry, - ) - except ProblemError as ex: - logging.getLogger(self.__class__.__name__).error( - f"Could not generate Patron Auth Access Token: {ex}" - ) - return ex.problem_detail - - return PatronAuthAccessToken( - access_token=token, expires_in=token_expiry, token_type="Bearer" - ).api_dict() diff --git a/api/controller/analytics.py b/api/controller/analytics.py new file mode 100644 index 000000000..17a4bc21e --- /dev/null +++ b/api/controller/analytics.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import flask +from flask import Response + +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import INVALID_ANALYTICS_EVENT_TYPE +from core.model import CirculationEvent +from core.util.datetime_helpers import utc_now +from core.util.problem_detail import ProblemDetail + + +class AnalyticsController(CirculationManagerController): + def track_event(self, identifier_type, identifier, event_type): + # TODO: It usually doesn't matter, but there should be + # a way to distinguish between different LicensePools for the + # same book. + if event_type in CirculationEvent.CLIENT_EVENTS: + library = flask.request.library + # Authentication on the AnalyticsController is optional, + # so flask.request.patron may or may not be set. + patron = getattr(flask.request, "patron", None) + neighborhood = None + if patron: + neighborhood = getattr(patron, "neighborhood", None) + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + self.manager.analytics.collect_event( + library, pools[0], event_type, utc_now(), neighborhood=neighborhood + ) + return Response({}, 200) + else: + return INVALID_ANALYTICS_EVENT_TYPE diff --git a/api/controller/annotation.py b/api/controller/annotation.py new file mode 100644 index 000000000..64eabbcf9 --- /dev/null +++ b/api/controller/annotation.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import json +from time import mktime +from wsgiref.handlers import format_date_time + +import flask +from flask import Response + +from api.annotations import AnnotationParser, AnnotationWriter +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import NO_ANNOTATION +from core.model import Annotation, Identifier, get_one +from core.util.problem_detail import ProblemDetail + + +class AnnotationController(CirculationManagerController): + def container(self, identifier=None, accept_post=True): + headers = dict() + if accept_post: + headers["Allow"] = "GET,HEAD,OPTIONS,POST" + headers["Accept-Post"] = AnnotationWriter.CONTENT_TYPE + else: + headers["Allow"] = "GET,HEAD,OPTIONS" + + if flask.request.method == "HEAD": + return Response(status=200, headers=headers) + + patron = flask.request.patron + + if flask.request.method == "GET": + headers["Link"] = [ + '; rel="type"', + '; rel="http://www.w3.org/ns/ldp#constrainedBy"', + ] + headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE + + container, timestamp = AnnotationWriter.annotation_container_for( + patron, identifier=identifier + ) + etag = 'W/""' + if timestamp: + etag = 'W/"%s"' % timestamp + headers["Last-Modified"] = format_date_time( + mktime(timestamp.timetuple()) + ) + headers["ETag"] = etag + + content = json.dumps(container) + return Response(content, status=200, headers=headers) + + data = flask.request.data + annotation = AnnotationParser.parse(self._db, data, patron) + + if isinstance(annotation, ProblemDetail): + return annotation + + content = json.dumps(AnnotationWriter.detail(annotation)) + status_code = 200 + headers["Link"] = '; rel="type"' + headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE + return Response(content, status_code, headers) + + def container_for_work(self, identifier_type, identifier): + id_obj, ignore = Identifier.for_foreign_id( + self._db, identifier_type, identifier + ) + return self.container(identifier=id_obj, accept_post=False) + + def detail(self, annotation_id): + headers = dict() + headers["Allow"] = "GET,HEAD,OPTIONS,DELETE" + + if flask.request.method == "HEAD": + return Response(status=200, headers=headers) + + patron = flask.request.patron + + annotation = get_one( + self._db, Annotation, patron=patron, id=annotation_id, active=True + ) + + if not annotation: + return NO_ANNOTATION + + if flask.request.method == "DELETE": + annotation.set_inactive() + return Response() + + content = json.dumps(AnnotationWriter.detail(annotation)) + status_code = 200 + headers["Link"] = '; rel="type"' + headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE + return Response(content, status_code, headers) diff --git a/api/base_controller.py b/api/controller/base.py similarity index 97% rename from api/base_controller.py rename to api/controller/base.py index 62dd1a000..de11511c4 100644 --- a/api/base_controller.py +++ b/api/controller/base.py @@ -6,10 +6,11 @@ from api.circulation_exceptions import * from api.problem_details import * from core.model import Library, Patron +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail -class BaseCirculationManagerController: +class BaseCirculationManagerController(LoggerMixin): """Define minimal standards for a circulation manager controller, mainly around authentication. """ diff --git a/api/controller/catalog_descriptions.py b/api/controller/catalog_descriptions.py new file mode 100644 index 000000000..4e045241f --- /dev/null +++ b/api/controller/catalog_descriptions.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +from flask import Response +from sqlalchemy.orm.exc import NoResultFound + +from api.circulation_exceptions import * +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import * +from core.app_server import url_for +from core.model import Library, json_serializer +from core.util.datetime_helpers import utc_now + + +# Finland +class CatalogDescriptionsController(CirculationManagerController): + def get_catalogs(self, library_uuid=None): + catalogs = [] + libraries = [] + + if library_uuid != None: + try: + libraries = [ + self._db.query(Library).filter(Library.uuid == library_uuid).one() + ] + except NoResultFound: + return LIBRARY_NOT_FOUND + else: + libraries = self._db.query(Library).order_by(Library.name).all() + + for library in libraries: + settings = library.settings_dict + images = [] + if library.logo: + images += [ + { + "rel": "http://opds-spec.org/image/thumbnail", + "href": library.logo.data_url, + "type": "image/png", + } + ] + + authentication_document_url = url_for( + "authentication_document", + library_short_name=library.short_name, + _external=True, + ) + + catalog_url = url_for( + "acquisition_groups", + library_short_name=library.short_name, + _external=True, + ) + + timenow = utc_now().strftime("%Y-%m-%dT%H:%M:%SZ") + + metadata = { + "id": "urn:uuid:" + library.uuid, + "title": library.name, + "short_name": library.short_name, + "modified": timenow, + "updated": timenow, + "isAutomatic": False, + } + + if "library_description" in settings: + metadata["description"] = settings["library_description"] + + links = [ + { + "rel": "http://opds-spec.org/catalog", + "href": catalog_url, + "type": "application/atom+xml;profile=opds-catalog;kind=acquisition", + }, + { + "href": authentication_document_url, + "type": "application/vnd.opds.authentication.v1.0+json", + }, + ] + + if "help_web" in settings: + links += [{"href": settings["help_web"], "rel": "help"}] + elif "help_email" in settings: + links += [{"href": "mailto:" + settings["help_email"], "rel": "help"}] + + catalogs += [{"metadata": metadata, "links": links, "images": images}] + + response_json = { + "metadata": {"title": "Libraries"}, + "catalogs": catalogs, + "links": [ + { + "rel": "self", + "href": url_for("client_libraries", _external=True), + "type": "application/opds+json", + } + ], + } + + return Response( + json_serializer(response_json), + status=200, + mimetype="application/json", + ) diff --git a/api/controller/circulation_manager.py b/api/controller/circulation_manager.py new file mode 100644 index 000000000..79aafc117 --- /dev/null +++ b/api/controller/circulation_manager.py @@ -0,0 +1,281 @@ +from __future__ import annotations + +import email + +import flask +import pytz +from flask import Response +from flask_babel import lazy_gettext as _ +from sqlalchemy import select +from sqlalchemy.orm import Session, eagerload + +from api.controller.base import BaseCirculationManagerController +from api.problem_details import ( + BAD_DELIVERY_MECHANISM, + FORBIDDEN_BY_POLICY, + NO_LICENSES, + NO_SUCH_LANE, + NOT_AGE_APPROPRIATE, + REMOTE_INTEGRATION_FAILED, +) +from core.lane import Lane, WorkList +from core.model import ( + Collection, + Hold, + Identifier, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + LicensePool, + LicensePoolDeliveryMechanism, + Loan, + get_one, +) +from core.problem_details import INVALID_INPUT +from core.util.problem_detail import ProblemDetail + + +class CirculationManagerController(BaseCirculationManagerController): + def get_patron_circ_objects(self, object_class, patron, license_pools): + if not patron: + return [] + pool_ids = [pool.id for pool in license_pools] + + return ( + self._db.query(object_class) + .filter( + object_class.patron_id == patron.id, + object_class.license_pool_id.in_(pool_ids), + ) + .options(eagerload(object_class.license_pool)) + .all() + ) + + def get_patron_loan(self, patron, license_pools): + loans = self.get_patron_circ_objects(Loan, patron, license_pools) + if loans: + loan = loans[0] + return loan, loan.license_pool + return None, None + + def get_patron_hold(self, patron, license_pools): + holds = self.get_patron_circ_objects(Hold, patron, license_pools) + if holds: + hold = holds[0] + return hold, hold.license_pool + return None, None + + @property + def circulation(self): + """Return the appropriate CirculationAPI for the request Library.""" + library_id = flask.request.library.id + return self.manager.circulation_apis[library_id] + + @property + def search_engine(self): + """Return the configured external search engine, or a + ProblemDetail if none is configured. + """ + search_engine = self.manager.external_search + if not search_engine: + return REMOTE_INTEGRATION_FAILED.detailed( + _("The search index for this site is not properly configured.") + ) + return search_engine + + def handle_conditional_request(self, last_modified=None): + """Handle a conditional HTTP request. + + :param last_modified: A datetime representing the time this + resource was last modified. + + :return: a Response, if the incoming request can be handled + conditionally. Otherwise, None. + """ + if not last_modified: + return None + + # If-Modified-Since values have resolution of one second. If + # last_modified has millisecond resolution, change its + # resolution to one second. + if last_modified.microsecond: + last_modified = last_modified.replace(microsecond=0) + + if_modified_since = flask.request.headers.get("If-Modified-Since") + if not if_modified_since: + return None + + try: + parsed_if_modified_since = email.utils.parsedate_to_datetime( + if_modified_since + ) + except TypeError: + # Parse error <= Python 3.9 + return None + except ValueError: + # Parse error >= Python 3.10 + return None + if not parsed_if_modified_since: + return None + + # "[I]f the date is conforming to the RFCs it will represent a + # time in UTC but with no indication of the actual source + # timezone of the message the date comes from." + if parsed_if_modified_since.tzinfo is None: + parsed_if_modified_since = parsed_if_modified_since.replace(tzinfo=pytz.UTC) + + if parsed_if_modified_since >= last_modified: + return Response(status=304) + return None + + def load_lane(self, lane_identifier): + """Turn user input into a Lane object.""" + library_id = flask.request.library.id + + lane = None + if lane_identifier is None: + # Return the top-level lane. + lane = self.manager.top_level_lanes[library_id] + if isinstance(lane, Lane): + lane = self._db.merge(lane) + elif isinstance(lane, WorkList): + lane.children = [self._db.merge(child) for child in lane.children] + else: + try: + lane_identifier = int(lane_identifier) + except ValueError as e: + pass + + if isinstance(lane_identifier, int): + lane = get_one( + self._db, Lane, id=lane_identifier, library_id=library_id + ) + + if lane and not lane.accessible_to(self.request_patron): + # The authenticated patron cannot access the lane they + # requested. Act like the lane does not exist. + lane = None + + if not lane: + return NO_SUCH_LANE.detailed( + _( + "Lane %(lane_identifier)s does not exist or is not associated with library %(library_id)s", + lane_identifier=lane_identifier, + library_id=library_id, + ) + ) + + return lane + + def load_work(self, library, identifier_type, identifier): + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + + # We know there is at least one LicensePool, and all LicensePools + # for an Identifier have the same Work. + work = pools[0].work + + if work and not work.age_appropriate_for_patron(self.request_patron): + # This work is not age-appropriate for the authenticated + # patron. Don't show it. + work = NOT_AGE_APPROPRIATE + return work + + def load_licensepools(self, library, identifier_type, identifier): + """Turn user input into one or more LicensePool objects. + + :param library: The LicensePools must be associated with one of this + Library's Collections. + :param identifier_type: A type of identifier, e.g. "ISBN" + :param identifier: An identifier string, used with `identifier_type` + to look up an Identifier. + """ + _db = Session.object_session(library) + pools = ( + _db.scalars( + select(LicensePool) + .join(Collection, LicensePool.collection_id == Collection.id) + .join(Identifier, LicensePool.identifier_id == Identifier.id) + .join( + IntegrationConfiguration, + Collection.integration_configuration_id + == IntegrationConfiguration.id, + ) + .join( + IntegrationLibraryConfiguration, + IntegrationConfiguration.id + == IntegrationLibraryConfiguration.parent_id, + ) + .where( + Identifier.type == identifier_type, + Identifier.identifier == identifier, + IntegrationLibraryConfiguration.library_id == library.id, + ) + ) + .unique() + .all() + ) + if not pools: + return NO_LICENSES.detailed( + _("The item you're asking about (%s/%s) isn't in this collection.") + % (identifier_type, identifier) + ) + return pools + + def load_licensepool(self, license_pool_id): + """Turns user input into a LicensePool""" + license_pool = get_one(self._db, LicensePool, id=license_pool_id) + if not license_pool: + return INVALID_INPUT.detailed( + _("License Pool #%s does not exist.") % license_pool_id + ) + + return license_pool + + def load_licensepooldelivery(self, pool, mechanism_id): + """Turn user input into a LicensePoolDeliveryMechanism object.""" + mechanism = get_one( + self._db, + LicensePoolDeliveryMechanism, + data_source=pool.data_source, + identifier=pool.identifier, + delivery_mechanism_id=mechanism_id, + on_multiple="interchangeable", + ) + return mechanism or BAD_DELIVERY_MECHANISM + + def apply_borrowing_policy(self, patron, license_pool): + """Apply the borrowing policy of the patron's library to the + book they're trying to check out. + + This prevents a patron from borrowing an age-inappropriate book + or from placing a hold in a library that prohibits holds. + + Generally speaking, both of these operations should be + prevented before they get to this point; this is an extra + layer of protection. + + :param patron: A `Patron`. It's okay if this turns out to be a + `ProblemDetail` or `None` due to a problem earlier in the + process. + :param license_pool`: The `LicensePool` the patron is trying to act on. + """ + if patron is None or isinstance(patron, ProblemDetail): + # An earlier stage in the process failed to authenticate + # the patron. + return patron + + work = license_pool.work + if work is not None and not work.age_appropriate_for_patron(patron): + return NOT_AGE_APPROPRIATE + + if ( + not patron.library.settings.allow_holds + and license_pool.licenses_available == 0 + and not license_pool.open_access + and not license_pool.unlimited_access + ): + return FORBIDDEN_BY_POLICY.detailed( + _("Library policy prohibits the placement of holds."), status_code=403 + ) + return None diff --git a/api/controller/device_tokens.py b/api/controller/device_tokens.py new file mode 100644 index 000000000..399d0f2d6 --- /dev/null +++ b/api/controller/device_tokens.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import flask +from flask import Response +from sqlalchemy.exc import NoResultFound + +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import DEVICE_TOKEN_NOT_FOUND, DEVICE_TOKEN_TYPE_INVALID +from core.model import DeviceToken +from core.model.devicetokens import DuplicateDeviceTokenError, InvalidTokenTypeError + + +class DeviceTokensController(CirculationManagerController): + def get_patron_device(self): + patron = flask.request.patron + device_token = flask.request.args["device_token"] + token: DeviceToken = ( + self._db.query(DeviceToken) + .filter( + DeviceToken.patron_id == patron.id, + DeviceToken.device_token == device_token, + ) + .first() + ) + if not token: + return DEVICE_TOKEN_NOT_FOUND + return dict(token_type=token.token_type, device_token=token.device_token), 200 + + def create_patron_device(self): + patron = flask.request.patron + device_token = flask.request.json["device_token"] + token_type = flask.request.json["token_type"] + + try: + device = DeviceToken.create(self._db, token_type, device_token, patron) + except InvalidTokenTypeError: + return DEVICE_TOKEN_TYPE_INVALID + except DuplicateDeviceTokenError: + return dict(exists=True), 200 + + return "", 201 + + def delete_patron_device(self): + patron = flask.request.patron + device_token = flask.request.json["device_token"] + token_type = flask.request.json["token_type"] + + try: + device: DeviceToken = ( + self._db.query(DeviceToken) + .filter( + DeviceToken.patron == patron, + DeviceToken.device_token == device_token, + DeviceToken.token_type == token_type, + ) + .one() + ) + self._db.delete(device) + except NoResultFound: + return DEVICE_TOKEN_NOT_FOUND + + return Response("", 204) diff --git a/api/controller/index.py b/api/controller/index.py new file mode 100644 index 000000000..7f1eb9871 --- /dev/null +++ b/api/controller/index.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import flask +from flask import Response, redirect, url_for + +from api.controller.circulation_manager import CirculationManagerController +from core.util.authentication_for_opds import AuthenticationForOPDSDocument +from core.util.problem_detail import ProblemDetail + + +class IndexController(CirculationManagerController): + """Redirect the patron to the appropriate feed.""" + + def __call__(self): + # If this library provides a custom index view, use that. + library = flask.request.library + custom = self.manager.custom_index_views.get(library.id) + if custom is not None: + annotator = self.manager.annotator(None) + return custom(library, annotator) + + # The simple case: the app is equally open to all clients. + library_short_name = flask.request.library.short_name + if not self.has_root_lanes(): + return redirect( + url_for( + "acquisition_groups", + library_short_name=library_short_name, + _external=True, + ) + ) + + # The more complex case. We must authorize the patron, check + # their type, and redirect them to an appropriate feed. + return self.appropriate_index_for_patron_type() + + def authentication_document(self): + """Serve this library's Authentication For OPDS document.""" + return Response( + self.manager.authentication_for_opds_document, + 200, + {"Content-Type": AuthenticationForOPDSDocument.MEDIA_TYPE}, + ) + + def has_root_lanes(self): + """Does the active library feature root lanes for patrons of + certain types? + + :return: A boolean + """ + return flask.request.library.has_root_lanes + + def authenticated_patron_root_lane(self): + patron = self.authenticated_patron_from_request() + if isinstance(patron, ProblemDetail): + return patron + if isinstance(patron, Response): + return patron + return patron.root_lane + + def appropriate_index_for_patron_type(self): + library_short_name = flask.request.library.short_name + root_lane = self.authenticated_patron_root_lane() + if isinstance(root_lane, ProblemDetail): + return root_lane + if isinstance(root_lane, Response): + return root_lane + if root_lane is None: + return redirect( + url_for( + "acquisition_groups", + library_short_name=library_short_name, + _external=True, + ) + ) + + return redirect( + url_for( + "acquisition_groups", + library_short_name=library_short_name, + lane_identifier=root_lane.id, + _external=True, + ) + ) diff --git a/api/controller/loan.py b/api/controller/loan.py new file mode 100644 index 000000000..9d8c33f68 --- /dev/null +++ b/api/controller/loan.py @@ -0,0 +1,579 @@ +from __future__ import annotations + +from typing import Any + +import flask +from flask import Response, redirect +from flask_babel import lazy_gettext as _ +from lxml import etree +from werkzeug import Response as wkResponse + +from api.circulation_exceptions import ( + AuthorizationBlocked, + AuthorizationExpired, + CannotFulfill, + CannotHold, + CannotLoan, + CannotReleaseHold, + CannotRenew, + CannotReturn, + CirculationException, + DeliveryMechanismConflict, + DeliveryMechanismError, + FormatNotAvailable, + NoActiveLoan, + NoOpenAccessDownload, + NotFoundOnRemote, + OutstandingFines, + PatronAuthorizationFailedException, + PatronHoldLimitReached, + PatronLoanLimitReached, + RemoteRefusedReturn, +) +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import ( + BAD_DELIVERY_MECHANISM, + CANNOT_FULFILL, + CANNOT_RELEASE_HOLD, + CHECKOUT_FAILED, + COULD_NOT_MIRROR_TO_REMOTE, + DELIVERY_CONFLICT, + HOLD_FAILED, + INVALID_CREDENTIALS, + NO_ACCEPTABLE_FORMAT, + NO_ACTIVE_LOAN, + NO_ACTIVE_LOAN_OR_HOLD, + NO_LICENSES, + NOT_FOUND_ON_REMOTE, + OUTSTANDING_FINES, + RENEW_FAILED, +) +from core.feed.acquisition import OPDSAcquisitionFeed +from core.model import DataSource, DeliveryMechanism, Loan, Patron, Representation +from core.util.http import RemoteIntegrationException +from core.util.opds_writer import OPDSFeed +from core.util.problem_detail import ProblemDetail + + +class LoanController(CirculationManagerController): + def sync(self): + """Sync the authenticated patron's loans and holds with all third-party + providers. + + :return: A Response containing an OPDS feed with up-to-date information. + """ + patron = flask.request.patron + + # Save some time if we don't believe the patron's loans or holds have + # changed since the last time the client requested this feed. + response = self.handle_conditional_request(patron.last_loan_activity_sync) + if isinstance(response, Response): + return response + + # TODO: SimplyE used to make a HEAD request to the bookshelf feed + # as a quick way of checking authentication. Does this still happen? + # It shouldn't -- the patron profile feed should be used instead. + # If it's not used, we can take this out. + if flask.request.method == "HEAD": + return Response() + + # First synchronize our local list of loans and holds with all + # third-party loan providers. + if patron.authorization_identifier: + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + try: + self.circulation.sync_bookshelf(patron, credential) + except Exception as e: + # If anything goes wrong, omit the sync step and just + # display the current active loans, as we understand them. + self.manager.log.error( + "ERROR DURING SYNC for %s: %r", patron.id, e, exc_info=e + ) + + # Then make the feed. + feed = OPDSAcquisitionFeed.active_loans_for(self.circulation, patron) + response = feed.as_response( + max_age=0, + private=True, + mime_types=flask.request.accept_mimetypes, + ) + + last_modified = patron.last_loan_activity_sync + if last_modified: + response.last_modified = last_modified + return response + + def borrow(self, identifier_type, identifier, mechanism_id=None): + """Create a new loan or hold for a book. + + :return: A Response containing an OPDS entry that includes a link of rel + "http://opds-spec.org/acquisition", which can be used to fetch the + book or the license file. + """ + patron = flask.request.patron + library = flask.request.library + + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + + result = self.best_lendable_pool( + library, patron, identifier_type, identifier, mechanism_id + ) + if not result: + # No LicensePools were found and no ProblemDetail + # was returned. Send a generic ProblemDetail. + return NO_LICENSES.detailed(_("I've never heard of this work.")) + if isinstance(result, ProblemDetail): + # There was a problem determining the appropriate + # LicensePool to use. + return result + + if isinstance(result, Loan): + # We already have a Loan, so there's no need to go to the API. + loan_or_hold = result + is_new = False + else: + # We need to actually go out to the API + # and try to take out a loan. + pool, mechanism = result + loan_or_hold, is_new = self._borrow(patron, credential, pool, mechanism) + + if isinstance(loan_or_hold, ProblemDetail): + return loan_or_hold + + # At this point we have either a loan or a hold. If a loan, serve + # a feed that tells the patron how to fulfill the loan. If a hold, + # serve a feed that talks about the hold. + response_kwargs = {} + if is_new: + response_kwargs["status"] = 201 + else: + response_kwargs["status"] = 200 + return OPDSAcquisitionFeed.single_entry_loans_feed( + self.circulation, loan_or_hold, **response_kwargs + ) + + def _borrow(self, patron, credential, pool, mechanism): + """Go out to the API, try to take out a loan, and handle errors as + problem detail documents. + + :param patron: The Patron who's trying to take out the loan + :param credential: A Credential to use when authenticating + as this Patron with the external API. + :param pool: The LicensePool for the book the Patron wants. + :mechanism: The DeliveryMechanism to request when asking for + a loan. + :return: a 2-tuple (result, is_new) `result` is a Loan (if one + could be created or found), a Hold (if a Loan could not be + created but a Hold could be), or a ProblemDetail (if the + entire operation failed). + """ + result = None + is_new = False + try: + loan, hold, is_new = self.circulation.borrow( + patron, credential, pool, mechanism + ) + result = loan or hold + except NoOpenAccessDownload as e: + result = NO_LICENSES.detailed( + _("Couldn't find an open-access download link for this book."), + status_code=404, + ) + except PatronAuthorizationFailedException as e: + result = INVALID_CREDENTIALS + except (PatronLoanLimitReached, PatronHoldLimitReached) as e: + result = e.as_problem_detail_document().with_debug(str(e)) + except DeliveryMechanismError as e: + result = BAD_DELIVERY_MECHANISM.with_debug( + str(e), status_code=e.status_code + ) + except OutstandingFines as e: + result = OUTSTANDING_FINES.detailed( + _( + "You must pay your $%(fine_amount).2f outstanding fines before you can borrow more books.", + fine_amount=patron.fines, + ) + ) + except AuthorizationExpired as e: + result = e.as_problem_detail_document(debug=False) + except AuthorizationBlocked as e: + result = e.as_problem_detail_document(debug=False) + except CannotLoan as e: + result = CHECKOUT_FAILED.with_debug(str(e)) + except CannotHold as e: + result = HOLD_FAILED.with_debug(str(e)) + except CannotRenew as e: + result = RENEW_FAILED.with_debug(str(e)) + except NotFoundOnRemote as e: + result = NOT_FOUND_ON_REMOTE + except CirculationException as e: + # Generic circulation error. + result = CHECKOUT_FAILED.with_debug(str(e)) + + if result is None: + # This shouldn't happen, but if it does, it means no exception + # was raised but we just didn't get a loan or hold. Return a + # generic circulation error. + result = HOLD_FAILED + return result, is_new + + def best_lendable_pool( + self, library, patron, identifier_type, identifier, mechanism_id + ): + """ + Of the available LicensePools for the given Identifier, return the + one that's the best candidate for loaning out right now. + + :return: A Loan if this patron already has an active loan, otherwise a LicensePool. + """ + # Turn source + identifier into a set of LicensePools + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + # Something went wrong. + return pools + + best = None + mechanism = None + problem_doc = None + + existing_loans = ( + self._db.query(Loan) + .filter( + Loan.license_pool_id.in_([lp.id for lp in pools]), Loan.patron == patron + ) + .all() + ) + if existing_loans: + # The patron already has at least one loan on this book already. + # To make the "borrow" operation idempotent, return one of + # those loans instead of an error. + return existing_loans[0] + + # We found a number of LicensePools. Try to locate one that + # we can actually loan to the patron. + for pool in pools: + problem_doc = self.apply_borrowing_policy(patron, pool) + if problem_doc: + # As a matter of policy, the patron is not allowed to borrow + # this book. + continue + + # Beyond this point we know that site policy does not prohibit + # us from lending this pool to this patron. + + if mechanism_id: + # But the patron has requested a license pool that + # supports a specific delivery mechanism. This pool + # must offer that mechanism. + mechanism = self.load_licensepooldelivery(pool, mechanism_id) + if isinstance(mechanism, ProblemDetail): + problem_doc = mechanism + continue + + # Beyond this point we have a license pool that we can + # actually loan or put on hold. + + # But there might be many such LicensePools, and we want + # to pick the one that will get the book to the patron + # with the shortest wait. + if ( + not best + or pool.licenses_available > best.licenses_available + or pool.patrons_in_hold_queue < best.patrons_in_hold_queue + ): + best = pool + + if not best: + # We were unable to find any LicensePool that fit the + # criteria. + return problem_doc + return best, mechanism + + def fulfill( + self, + license_pool_id: int, + mechanism_id: int | None = None, + do_get: Any | None = None, + ) -> wkResponse | ProblemDetail: + """Fulfill a book that has already been checked out, + or which can be fulfilled with no active loan. + + If successful, this will serve the patron a downloadable copy + of the book, a key (such as a DRM license file or bearer + token) which can be used to get the book, or an OPDS entry + containing a link to the book. + + :param license_pool_id: Database ID of a LicensePool. + :param mechanism_id: Database ID of a DeliveryMechanism. + """ + do_get = do_get or Representation.simple_http_get + + # Unlike most controller methods, this one has different + # behavior whether or not the patron is authenticated. This is + # why we're about to do something we don't usually do--call + # authenticated_patron_from_request from within a controller + # method. + authentication_response = self.authenticated_patron_from_request() + if isinstance(authentication_response, Patron): + # The patron is authenticated. + patron = authentication_response + else: + # The patron is not authenticated, either due to bad credentials + # (in which case authentication_response is a Response) + # or due to an integration error with the auth provider (in + # which case it is a ProblemDetail). + # + # There's still a chance this request can succeed, but if not, + # we'll be sending out authentication_response. + patron = None + library = flask.request.library # type: ignore + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + + # Turn source + identifier into a LicensePool. + pool = self.load_licensepool(license_pool_id) + if isinstance(pool, ProblemDetail): + return pool + + loan, loan_license_pool = self.get_patron_loan(patron, [pool]) + + requested_license_pool = loan_license_pool or pool + + # Find the LicensePoolDeliveryMechanism they asked for. + mechanism = None + if mechanism_id: + mechanism = self.load_licensepooldelivery( + requested_license_pool, mechanism_id + ) + if isinstance(mechanism, ProblemDetail): + return mechanism + + if (not loan or not loan_license_pool) and not ( + self.can_fulfill_without_loan( + library, patron, requested_license_pool, mechanism + ) + ): + if patron: + # Since a patron was identified, the problem is they have + # no active loan. + return NO_ACTIVE_LOAN.detailed( + _("You have no active loan for this title.") + ) + else: + # Since no patron was identified, the problem is + # whatever problem was revealed by the earlier + # authenticated_patron_from_request() call -- either the + # patron didn't authenticate or there's a problem + # integrating with the auth provider. + return authentication_response + + if not mechanism: + # See if the loan already has a mechanism set. We can use that. + if loan and loan.fulfillment: + mechanism = loan.fulfillment + else: + return BAD_DELIVERY_MECHANISM.detailed( + _("You must specify a delivery mechanism to fulfill this loan.") + ) + + try: + fulfillment = self.circulation.fulfill( + patron, + credential, + requested_license_pool, + mechanism, + ) + except DeliveryMechanismConflict as e: + return DELIVERY_CONFLICT.detailed(str(e)) + except NoActiveLoan as e: + return NO_ACTIVE_LOAN.detailed( + _("Can't fulfill loan because you have no active loan for this book."), + status_code=e.status_code, + ) + except FormatNotAvailable as e: + return NO_ACCEPTABLE_FORMAT.with_debug(str(e), status_code=e.status_code) + except CannotFulfill as e: + return CANNOT_FULFILL.with_debug(str(e), status_code=e.status_code) + except DeliveryMechanismError as e: + return BAD_DELIVERY_MECHANISM.with_debug(str(e), status_code=e.status_code) + + # A subclass of FulfillmentInfo may want to bypass the whole + # response creation process. + response = fulfillment.as_response + if response is not None: + return response + + headers = dict() + encoding_header = dict() + if ( + fulfillment.data_source_name == DataSource.ENKI + and mechanism.delivery_mechanism.drm_scheme_media_type + == DeliveryMechanism.NO_DRM + ): + encoding_header["Accept-Encoding"] = "deflate" + + if mechanism.delivery_mechanism.is_streaming: + # If this is a streaming delivery mechanism, create an OPDS entry + # with a fulfillment link to the streaming reader url. + feed = OPDSAcquisitionFeed.single_entry_loans_feed( + self.circulation, loan, fulfillment=fulfillment + ) + if isinstance(feed, ProblemDetail): + # This should typically never happen, since we've gone through the entire fulfill workflow + # But for the sake of return-type completeness we are adding this here + return feed + if isinstance(feed, Response): + return feed + else: + content = etree.tostring(feed) + status_code = 200 + headers["Content-Type"] = OPDSFeed.ACQUISITION_FEED_TYPE + elif fulfillment.content_link_redirect is True: + # The fulfillment API has asked us to not be a proxy and instead redirect the client directly + return redirect(fulfillment.content_link) + else: + content = fulfillment.content + if fulfillment.content_link: + # If we have a link to the content on a remote server, web clients may not + # be able to access it if the remote server does not support CORS requests. + + # If the pool is open access though, the web client can link directly to the + # file to download it, so it's safe to redirect. + if requested_license_pool.open_access: + return redirect(fulfillment.content_link) + + # Otherwise, we need to fetch the content and return it instead + # of redirecting to it, since it may be downloaded through an + # indirect acquisition link. + try: + status_code, headers, content = do_get( + fulfillment.content_link, headers=encoding_header + ) + headers = dict(headers) + except RemoteIntegrationException as e: + return e.as_problem_detail_document(debug=False) + else: + status_code = 200 + if fulfillment.content_type: + headers["Content-Type"] = fulfillment.content_type + + return Response(response=content, status=status_code, headers=headers) + + def can_fulfill_without_loan(self, library, patron, pool, lpdm): + """Is it acceptable to fulfill the given LicensePoolDeliveryMechanism + for the given Patron without creating a Loan first? + + This question is usually asked because no Patron has been + authenticated, and thus no Loan can be created, but somebody + wants a book anyway. + + :param library: A Library. + :param patron: A Patron, probably None. + :param lpdm: A LicensePoolDeliveryMechanism. + """ + authenticator = self.manager.auth.library_authenticators.get(library.short_name) + if authenticator and authenticator.identifies_individuals: + # This library identifies individual patrons, so there is + # no reason to fulfill books without a loan. Even if the + # books are free and the 'loans' are nominal, having a + # Loan object makes it possible for a patron to sync their + # collection across devices, so that's the way we do it. + return False + + # If the library doesn't require that individual patrons + # identify themselves, it's up to the CirculationAPI object. + # Most of them will say no. (This would indicate that the + # collection is improperly associated with a library that + # doesn't identify its patrons.) + return self.circulation.can_fulfill_without_loan(patron, pool, lpdm) + + def revoke(self, license_pool_id): + patron = flask.request.patron + pool = self.load_licensepool(license_pool_id) + if isinstance(pool, ProblemDetail): + return pool + + loan, _ignore = self.get_patron_loan(patron, [pool]) + + if loan: + hold = None + else: + hold, _ignore = self.get_patron_hold(patron, [pool]) + + if not loan and not hold: + if not pool.work: + title = "this book" + else: + title = '"%s"' % pool.work.title + return NO_ACTIVE_LOAN_OR_HOLD.detailed( + _( + 'Can\'t revoke because you have no active loan or hold for "%(title)s".', + title=title, + ), + status_code=404, + ) + + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + if loan: + try: + self.circulation.revoke_loan(patron, credential, pool) + except RemoteRefusedReturn as e: + title = _( + "Loan deleted locally but remote refused. Loan is likely to show up again on next sync." + ) + return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, status_code=503) + except CannotReturn as e: + title = _("Loan deleted locally but remote failed.") + return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, 503).with_debug( + str(e) + ) + elif hold: + if not self.circulation.can_revoke_hold(pool, hold): + title = _("Cannot release a hold once it enters reserved state.") + return CANNOT_RELEASE_HOLD.detailed(title, 400) + try: + self.circulation.release_hold(patron, credential, pool) + except CannotReleaseHold as e: + title = _("Hold released locally but remote failed.") + return CANNOT_RELEASE_HOLD.detailed(title, 503).with_debug(str(e)) + + work = pool.work + annotator = self.manager.annotator(None) + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator) + ) + + def detail(self, identifier_type, identifier): + if flask.request.method == "DELETE": + return self.revoke_loan_or_hold(identifier_type, identifier) + + patron = flask.request.patron + library = flask.request.library + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + + loan, pool = self.get_patron_loan(patron, pools) + if loan: + hold = None + else: + hold, pool = self.get_patron_hold(patron, pools) + + if not loan and not hold: + return NO_ACTIVE_LOAN_OR_HOLD.detailed( + _( + 'You have no active loan or hold for "%(title)s".', + title=pool.work.title, + ), + status_code=404, + ) + + if flask.request.method == "GET": + if loan: + item = loan + else: + item = hold + return OPDSAcquisitionFeed.single_entry_loans_feed(self.circulation, item) diff --git a/api/controller/marc.py b/api/controller/marc.py new file mode 100644 index 000000000..c54fdd715 --- /dev/null +++ b/api/controller/marc.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +from collections import defaultdict +from dataclasses import dataclass, field +from datetime import datetime + +import flask +from flask import Response +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.integration.goals import Goals +from core.marc import MARCExporter +from core.model import ( + Collection, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + Library, + MarcFile, +) +from core.service.storage.s3 import S3Service + + +@dataclass +class MarcFileDeltaResult: + key: str + since: datetime + created: datetime + + +@dataclass +class MarcFileFullResult: + key: str + created: datetime + + +@dataclass +class MarcFileCollectionResult: + full: MarcFileFullResult | None = None + deltas: list[MarcFileDeltaResult] = field(default_factory=list) + + +class MARCRecordController: + DOWNLOAD_TEMPLATE = """ + + + +%(body)s + +""" + + def __init__(self, storage_service: S3Service | None) -> None: + self.storage_service = storage_service + + @staticmethod + def library() -> Library: + return flask.request.library # type: ignore[no-any-return,attr-defined] + + @staticmethod + def has_integration(session: Session, library: Library) -> bool: + integration_query = ( + select(IntegrationLibraryConfiguration) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, + IntegrationConfiguration.protocol == MARCExporter.__name__, + IntegrationLibraryConfiguration.library == library, + ) + ) + integration = session.execute(integration_query).one_or_none() + return integration is not None + + @staticmethod + def get_files( + session: Session, library: Library + ) -> dict[str, MarcFileCollectionResult]: + marc_files = session.execute( + select( + IntegrationConfiguration.name, + MarcFile.key, + MarcFile.since, + MarcFile.created, + ) + .select_from(MarcFile) + .join(Collection) + .join(IntegrationConfiguration) + .join(IntegrationLibraryConfiguration) + .where( + MarcFile.library == library, + Collection.export_marc_records == True, + IntegrationLibraryConfiguration.library == library, + ) + .order_by( + IntegrationConfiguration.name, + MarcFile.created.desc(), + ) + ).all() + + files_by_collection: dict[str, MarcFileCollectionResult] = defaultdict( + MarcFileCollectionResult + ) + for file_row in marc_files: + if file_row.since is None: + full_file_result = MarcFileFullResult( + key=file_row.key, + created=file_row.created, + ) + if files_by_collection[file_row.name].full is not None: + # We already have a newer full file, so skip this one. + continue + files_by_collection[file_row.name].full = full_file_result + else: + delta_file_result = MarcFileDeltaResult( + key=file_row.key, + since=file_row.since, + created=file_row.created, + ) + files_by_collection[file_row.name].deltas.append(delta_file_result) + return files_by_collection + + def download_page_body(self, session: Session, library: Library) -> str: + time_format = "%B %-d, %Y" + + # Check if a MARC exporter is configured, so we can show a + # message if it's not. + integration = self.has_integration(session, library) + + if not integration: + return ( + "

" + + "No MARC exporter is currently configured for this library." + + "

" + ) + + if not self.storage_service: + return "

" + "No storage service is currently configured." + "

" + + # Get the MARC files for this library. + marc_files = self.get_files(session, library) + + if len(marc_files) == 0: + # Are there any collections configured to export MARC records? + if any(c.export_marc_records for c in library.collections): + return "

" + "MARC files aren't ready to download yet." + "

" + else: + return ( + "

" + + "No collections are configured to export MARC records." + + "

" + ) + + body = "" + for collection_name, files in marc_files.items(): + body += "
" + body += f"

{collection_name}

" + if files.full is not None: + file = files.full + full_url = self.storage_service.generate_url(file.key) + full_label = ( + f"Full file - last updated {file.created.strftime(time_format)}" + ) + body += f'{full_label}' + + if files.deltas: + body += f"

Update-only files

" + body += "
    " + for update in files.deltas: + update_url = self.storage_service.generate_url(update.key) + update_label = f"Updates from {update.since.strftime(time_format)} to {update.created.strftime(time_format)}" + body += f'
  • {update_label}
  • ' + body += "
" + + body += "
" + body += "
" + + return body + + def download_page(self) -> Response: + library = self.library() + body = "

Download MARC files for %s

" % library.name + + session = Session.object_session(library) + body += self.download_page_body(session, library) + + html = self.DOWNLOAD_TEMPLATE % dict(body=body) + headers = dict() + headers["Content-Type"] = "text/html" + return Response(html, 200, headers) diff --git a/api/controller/odl_notification.py b/api/controller/odl_notification.py new file mode 100644 index 000000000..9babcae46 --- /dev/null +++ b/api/controller/odl_notification.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import json + +import flask +from flask import Response +from flask_babel import lazy_gettext as _ + +from api.controller.circulation_manager import CirculationManagerController +from api.odl import ODLAPI +from api.odl2 import ODL2API +from api.problem_details import INVALID_LOAN_FOR_ODL_NOTIFICATION, NO_ACTIVE_LOAN +from core.model import Loan, get_one + + +class ODLNotificationController(CirculationManagerController): + """Receive notifications from an ODL distributor when the + status of a loan changes. + """ + + def notify(self, loan_id): + library = flask.request.library + status_doc = flask.request.data + loan = get_one(self._db, Loan, id=loan_id) + + if not loan: + return NO_ACTIVE_LOAN.detailed(_("No loan was found for this identifier.")) + + collection = loan.license_pool.collection + if collection.protocol not in (ODLAPI.label(), ODL2API.label()): + return INVALID_LOAN_FOR_ODL_NOTIFICATION + + api = self.manager.circulation_apis[library.id].api_for_license_pool( + loan.license_pool + ) + api.update_loan(loan, json.loads(status_doc)) + return Response(_("Success"), 200) diff --git a/api/controller/opds_feed.py b/api/controller/opds_feed.py new file mode 100644 index 000000000..26dd89e44 --- /dev/null +++ b/api/controller/opds_feed.py @@ -0,0 +1,456 @@ +from __future__ import annotations + +import flask +from flask import Response, redirect, url_for + +from api.controller.circulation_manager import CirculationManagerController +from api.lanes import ( + CrawlableCollectionBasedLane, + CrawlableCustomListBasedLane, + CrawlableFacets, + HasSeriesFacets, + JackpotFacets, + JackpotWorkList, +) +from api.problem_details import NO_SUCH_COLLECTION, NO_SUCH_LIST +from core.app_server import load_facets_from_request, load_pagination_from_request +from core.entrypoint import EverythingEntryPoint +from core.external_search import SortKeyPagination +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.navigation import NavigationFeed +from core.feed.opds import NavigationFacets +from core.lane import FeaturedFacets, Pagination, SearchFacets, WorkList +from core.model import Collection, CustomList +from core.opensearch import OpenSearchDocument +from core.util.problem_detail import ProblemDetail + + +class OPDSFeedController(CirculationManagerController): + def groups(self, lane_identifier, feed_class=OPDSAcquisitionFeed): + """Build or retrieve a grouped acquisition feed. + + :param lane_identifier: An identifier that uniquely identifiers + the WorkList whose feed we want. + :param feed_class: A replacement for AcquisitionFeed, for use in + tests. + """ + library = flask.request.library + + # Special case: a patron with a root lane who attempts to access + # the library's top-level WorkList is redirected to their root + # lane (as though they had accessed the index controller) + # rather than being denied access. + if lane_identifier is None: + patron = self.request_patron + if patron is not None and patron.root_lane: + return redirect( + url_for( + "acquisition_groups", + library_short_name=library.short_name, + lane_identifier=patron.root_lane.id, + _external=True, + ) + ) + + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + + if not lane.children: + # This lane has no children. Although we can technically + # create a grouped feed, it would be an unsatisfying + # gateway to a paginated feed. We should just serve the + # paginated feed. + return self.feed(lane_identifier, feed_class) + + facet_class_kwargs = dict( + minimum_featured_quality=library.settings.minimum_featured_quality, + ) + facets = self.manager.load_facets_from_request( + worklist=lane, + base_class=FeaturedFacets, + base_class_constructor_kwargs=facet_class_kwargs, + ) + if isinstance(facets, ProblemDetail): + return facets + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + url = url_for( + "acquisition_groups", + lane_identifier=lane_identifier, + library_short_name=library.short_name, + _external=True, + ) + + annotator = self.manager.annotator(lane, facets) + return feed_class.groups( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + annotator=annotator, + facets=facets, + search_engine=search_engine, + ).as_response(mime_types=flask.request.accept_mimetypes) + + def feed(self, lane_identifier, feed_class=OPDSAcquisitionFeed): + """Build or retrieve a paginated acquisition feed. + + :param lane_identifier: An identifier that uniquely identifiers + the WorkList whose feed we want. + :param feed_class: A replacement for AcquisitionFeed, for use in + tests. + """ + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + facets = self.manager.load_facets_from_request(worklist=lane) + if isinstance(facets, ProblemDetail): + return facets + pagination = load_pagination_from_request(SortKeyPagination) + if isinstance(pagination, ProblemDetail): + return pagination + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + library_short_name = flask.request.library.short_name + url = url_for( + "feed", + lane_identifier=lane_identifier, + library_short_name=library_short_name, + _external=True, + ) + + annotator = self.manager.annotator(lane, facets=facets) + max_age = flask.request.args.get("max_age") + feed = feed_class.page( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + annotator=annotator, + facets=facets, + pagination=pagination, + search_engine=search_engine, + ) + return feed.as_response( + max_age=int(max_age) if max_age else lane.max_cache_age(), + mime_types=flask.request.accept_mimetypes, + ) + + def navigation(self, lane_identifier): + """Build or retrieve a navigation feed, for clients that do not support groups.""" + + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + library = flask.request.library + library_short_name = library.short_name + url = url_for( + "navigation_feed", + lane_identifier=lane_identifier, + library_short_name=library_short_name, + _external=True, + ) + + title = lane.display_name + facet_class_kwargs = dict( + minimum_featured_quality=library.settings.minimum_featured_quality, + ) + facets = self.manager.load_facets_from_request( + worklist=lane, + base_class=NavigationFacets, + base_class_constructor_kwargs=facet_class_kwargs, + ) + annotator = self.manager.annotator(lane, facets) + return NavigationFeed.navigation( + _db=self._db, + title=title, + url=url, + worklist=lane, + annotator=annotator, + facets=facets, + ).as_response(max_age=lane.max_cache_age()) + + def crawlable_library_feed(self): + """Build or retrieve a crawlable acquisition feed for the + request library. + """ + library = flask.request.library + url = url_for( + "crawlable_library_feed", + library_short_name=library.short_name, + _external=True, + ) + title = library.name + lane = CrawlableCollectionBasedLane() + lane.initialize(library) + return self._crawlable_feed(title=title, url=url, worklist=lane) + + def crawlable_collection_feed(self, collection_name): + """Build or retrieve a crawlable acquisition feed for the + requested collection. + """ + collection = Collection.by_name(self._db, collection_name) + if not collection: + return NO_SUCH_COLLECTION + title = collection.name + url = url_for( + "crawlable_collection_feed", collection_name=collection.name, _external=True + ) + lane = CrawlableCollectionBasedLane() + lane.initialize([collection]) + return self._crawlable_feed(title=title, url=url, worklist=lane) + + def crawlable_list_feed(self, list_name): + """Build or retrieve a crawlable, paginated acquisition feed for the + named CustomList, sorted by update date. + """ + # TODO: A library is not strictly required here, since some + # CustomLists aren't associated with a library, but this isn't + # a use case we need to support now. + library = flask.request.library + list = CustomList.find(self._db, list_name, library=library) + if not list: + return NO_SUCH_LIST + library_short_name = library.short_name + title = list.name + url = url_for( + "crawlable_list_feed", + list_name=list.name, + library_short_name=library_short_name, + _external=True, + ) + lane = CrawlableCustomListBasedLane() + lane.initialize(library, list) + return self._crawlable_feed(title=title, url=url, worklist=lane) + + def _crawlable_feed( + self, title, url, worklist, annotator=None, feed_class=OPDSAcquisitionFeed + ): + """Helper method to create a crawlable feed. + + :param title: The title to use for the feed. + :param url: The URL from which the feed will be served. + :param worklist: A crawlable Lane which controls which works show up + in the feed. + :param annotator: A custom Annotator to use when generating the feed. + :param feed_class: A drop-in replacement for OPDSAcquisitionFeed + for use in tests. + """ + pagination = load_pagination_from_request( + SortKeyPagination, default_size=Pagination.DEFAULT_CRAWLABLE_SIZE + ) + if isinstance(pagination, ProblemDetail): + return pagination + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + # A crawlable feed has only one possible set of Facets, + # so library settings are irrelevant. + facets = self.manager.load_facets_from_request( + worklist=worklist, + base_class=CrawlableFacets, + ) + annotator = annotator or self.manager.annotator(worklist, facets=facets) + + return feed_class.page( + _db=self._db, + title=title, + url=url, + worklist=worklist, + annotator=annotator, + facets=facets, + pagination=pagination, + search_engine=search_engine, + ).as_response( + mime_types=flask.request.accept_mimetypes, max_age=worklist.max_cache_age() + ) + + def _load_search_facets(self, lane): + entrypoints = list(flask.request.library.entrypoints) + if len(entrypoints) > 1: + # There is more than one enabled EntryPoint. + # By default, search them all. + default_entrypoint = EverythingEntryPoint + else: + # There is only one enabled EntryPoint, + # and no need for a special default. + default_entrypoint = None + return self.manager.load_facets_from_request( + worklist=lane, + base_class=SearchFacets, + default_entrypoint=default_entrypoint, + ) + + def search(self, lane_identifier, feed_class=OPDSAcquisitionFeed): + """Search for books.""" + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + + # Although the search query goes against Opensearch, we must + # use normal pagination because the results are sorted by + # match quality, not bibliographic information. + pagination = load_pagination_from_request( + Pagination, default_size=Pagination.DEFAULT_SEARCH_SIZE + ) + if isinstance(pagination, ProblemDetail): + return pagination + + facets = self._load_search_facets(lane) + if isinstance(facets, ProblemDetail): + return facets + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + # Check whether there is a query string -- if not, we want to + # send an OpenSearch document explaining how to search. + query = flask.request.args.get("q") + library_short_name = flask.request.library.short_name + + # Create a function that, when called, generates a URL to the + # search controller. + # + # We'll call this one way if there is no query string in the + # request arguments, and another way if there is a query + # string. + make_url_kwargs = dict(list(facets.items())) + make_url = lambda: url_for( + "lane_search", + lane_identifier=lane_identifier, + library_short_name=library_short_name, + _external=True, + **make_url_kwargs, + ) + if not query: + # Send the search form + open_search_doc = OpenSearchDocument.for_lane(lane, make_url()) + headers = {"Content-Type": "application/opensearchdescription+xml"} + return Response(open_search_doc, 200, headers) + + # We have a query -- add it to the keyword arguments used when + # generating a URL. + make_url_kwargs["q"] = query.encode("utf8") + + # Run a search. + annotator = self.manager.annotator(lane, facets) + info = OpenSearchDocument.search_info(lane) + response = feed_class.search( + _db=self._db, + title=info["name"], + url=make_url(), + lane=lane, + search_engine=search_engine, + query=query, + annotator=annotator, + pagination=pagination, + facets=facets, + ) + if isinstance(response, ProblemDetail): + return response + return response.as_response( + mime_types=flask.request.accept_mimetypes, max_age=lane.max_cache_age() + ) + + def _qa_feed( + self, feed_factory, feed_title, controller_name, facet_class, worklist_factory + ): + """Create some kind of OPDS feed designed for consumption by an + automated QA process. + + :param feed_factory: This function will be called to create the feed. + It must either be AcquisitionFeed.groups or Acquisition.page, + or it must take the same arguments as those methods. + :param feed_title: String title of the feed. + :param controller_name: Controller name to use when generating + the URL to the feed. + :param facet_class: Faceting class to load (through + load_facets_from_request). + :param worklist_factory: Function that takes (Library, Facets) + and returns a Worklist configured to generate the feed. + :return: A ProblemDetail if there's a problem loading the faceting + object; otherwise the return value of `feed_factory`. + """ + library = flask.request.library + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + url = url_for( + controller_name, library_short_name=library.short_name, _external=True + ) + + facets = load_facets_from_request( + base_class=facet_class, default_entrypoint=EverythingEntryPoint + ) + if isinstance(facets, ProblemDetail): + return facets + + worklist = worklist_factory(library, facets) + annotator = self.manager.annotator(worklist) + + # Since this feed will be consumed by an automated client, and + # we're choosing titles for specific purposes, there's no + # reason to put more than a single item in each group. + pagination = Pagination(size=1) + return feed_factory( + _db=self._db, + title=feed_title, + url=url, + pagination=pagination, + worklist=worklist, + annotator=annotator, + search_engine=search_engine, + facets=facets, + max_age=0, + ) + + def qa_feed(self, feed_class=OPDSAcquisitionFeed): + """Create an OPDS feed containing the information necessary to + run a full set of integration tests against this server and + the vendors it relies on. + + :param feed_class: Class to substitute for AcquisitionFeed during + tests. + """ + + def factory(library, facets): + return JackpotWorkList(library, facets) + + return self._qa_feed( + feed_factory=feed_class.groups, + feed_title="QA test feed", + controller_name="qa_feed", + facet_class=JackpotFacets, + worklist_factory=factory, + ) + + def qa_series_feed(self, feed_class=OPDSAcquisitionFeed): + """Create an OPDS feed containing books that belong to _some_ + series, without regard to _which_ series. + + :param feed_class: Class to substitute for AcquisitionFeed during + tests. + """ + + def factory(library, facets): + wl = WorkList() + wl.initialize(library) + return wl + + return self._qa_feed( + feed_factory=feed_class.page, + feed_title="QA series test feed", + controller_name="qa_series_feed", + facet_class=HasSeriesFacets, + worklist_factory=factory, + ) diff --git a/api/controller/patron_auth_token.py b/api/controller/patron_auth_token.py new file mode 100644 index 000000000..9c5010f42 --- /dev/null +++ b/api/controller/patron_auth_token.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +import logging + +import flask + +from api.authentication.access_token import AccessTokenProvider +from api.controller.circulation_manager import CirculationManagerController +from api.model.patron_auth import PatronAuthAccessToken +from api.problem_details import PATRON_AUTH_ACCESS_TOKEN_NOT_POSSIBLE +from core.util.problem_detail import ProblemError + + +class PatronAuthTokenController(CirculationManagerController): + def get_token(self): + """Create a Patron Auth access token for an authenticated patron""" + patron = flask.request.patron + auth = flask.request.authorization + token_expiry = 3600 + + if not patron or auth.type.lower() != "basic": + return PATRON_AUTH_ACCESS_TOKEN_NOT_POSSIBLE + + try: + token = AccessTokenProvider.generate_token( + self._db, + patron, + auth["password"], + expires_in=token_expiry, + ) + except ProblemError as ex: + logging.getLogger(self.__class__.__name__).error( + f"Could not generate Patron Auth Access Token: {ex}" + ) + return ex.problem_detail + + return PatronAuthAccessToken( + access_token=token, expires_in=token_expiry, token_type="Bearer" + ).api_dict() diff --git a/api/controller/playtime_entries.py b/api/controller/playtime_entries.py new file mode 100644 index 000000000..8f52129d1 --- /dev/null +++ b/api/controller/playtime_entries.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import flask +from pydantic import ValidationError + +from api.controller.circulation_manager import CirculationManagerController +from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse +from api.problem_details import NOT_FOUND_ON_REMOTE +from core.model import Collection, Identifier, Library, get_one +from core.problem_details import INVALID_INPUT +from core.query.playtime_entries import PlaytimeEntries + + +class PlaytimeEntriesController(CirculationManagerController): + def track_playtimes(self, collection_id, identifier_type, identifier_idn): + library: Library = flask.request.library + identifier = get_one( + self._db, Identifier, type=identifier_type, identifier=identifier_idn + ) + collection = Collection.by_id(self._db, collection_id) + + if not identifier: + return NOT_FOUND_ON_REMOTE.detailed( + f"The identifier {identifier_type}/{identifier_idn} was not found." + ) + if not collection: + return NOT_FOUND_ON_REMOTE.detailed( + f"The collection {collection_id} was not found." + ) + + if collection not in library.collections: + return INVALID_INPUT.detailed("Collection was not found in the Library.") + + if not identifier.licensed_through_collection(collection): + return INVALID_INPUT.detailed( + "This Identifier was not found in the Collection." + ) + + try: + data = PlaytimeEntriesPost(**flask.request.json) + except ValidationError as ex: + return INVALID_INPUT.detailed(ex.json()) + + responses, summary = PlaytimeEntries.insert_playtime_entries( + self._db, identifier, collection, library, data + ) + + response_data = PlaytimeEntriesPostResponse( + summary=summary, responses=responses + ) + response = flask.jsonify(response_data.dict()) + response.status_code = 207 + return response diff --git a/api/controller/profile.py b/api/controller/profile.py new file mode 100644 index 000000000..8cf6d393e --- /dev/null +++ b/api/controller/profile.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import flask +from flask import make_response + +from api.authenticator import CirculationPatronProfileStorage +from api.controller.circulation_manager import CirculationManagerController +from core.user_profile import ProfileController as CoreProfileController +from core.util.problem_detail import ProblemDetail + + +class ProfileController(CirculationManagerController): + """Implement the User Profile Management Protocol.""" + + def _controller(self, patron): + """Instantiate a CoreProfileController that actually does the work.""" + storage = CirculationPatronProfileStorage(patron, flask.url_for) + return CoreProfileController(storage) + + def protocol(self): + """Handle a UPMP request.""" + patron = flask.request.patron + controller = self._controller(patron) + if flask.request.method == "GET": + result = controller.get() + else: + result = controller.put(flask.request.headers, flask.request.data) + if isinstance(result, ProblemDetail): + return result + return make_response(*result) diff --git a/api/controller/static_file.py b/api/controller/static_file.py new file mode 100644 index 000000000..4016f0966 --- /dev/null +++ b/api/controller/static_file.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import os + +import flask + +from api.config import Configuration +from api.controller.circulation_manager import CirculationManagerController +from core.model import ConfigurationSetting + + +class StaticFileController(CirculationManagerController): + def static_file(self, directory, filename): + max_age = ConfigurationSetting.sitewide( + self._db, Configuration.STATIC_FILE_CACHE_TIME + ).int_value + return flask.send_from_directory(directory, filename, max_age=max_age) + + def image(self, filename): + directory = os.path.join( + os.path.abspath(os.path.dirname(__file__)), + "..", + "..", + "resources", + "images", + ) + return self.static_file(directory, filename) diff --git a/api/controller/urn_lookup.py b/api/controller/urn_lookup.py new file mode 100644 index 000000000..fe38b113f --- /dev/null +++ b/api/controller/urn_lookup.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +import flask + +from core.app_server import URNLookupController as CoreURNLookupController +from core.feed.annotator.circulation import CirculationManagerAnnotator + + +class URNLookupController(CoreURNLookupController): + def __init__(self, manager): + self.manager = manager + super().__init__(manager._db) + + def work_lookup(self, route_name): + """Build a CirculationManagerAnnotor based on the current library's + top-level WorkList, and use it to generate an OPDS lookup + feed. + """ + library = flask.request.library + top_level_worklist = self.manager.top_level_lanes[library.id] + annotator = CirculationManagerAnnotator(top_level_worklist) + return super().work_lookup(annotator, route_name) diff --git a/api/controller/work.py b/api/controller/work.py new file mode 100644 index 000000000..d50928f8a --- /dev/null +++ b/api/controller/work.py @@ -0,0 +1,293 @@ +from __future__ import annotations + +import urllib.parse + +import flask +from flask_babel import lazy_gettext as _ + +from api.controller.circulation_manager import CirculationManagerController +from api.lanes import ( + ContributorFacets, + ContributorLane, + RecommendationLane, + RelatedBooksLane, + SeriesFacets, + SeriesLane, +) +from api.problem_details import NO_SUCH_LANE, NOT_FOUND_ON_REMOTE +from core.app_server import load_pagination_from_request +from core.config import CannotLoadConfiguration +from core.external_search import SortKeyPagination +from core.feed.acquisition import OPDSAcquisitionFeed +from core.lane import FeaturedFacets, Pagination +from core.metadata_layer import ContributorData +from core.util.opds_writer import OPDSFeed +from core.util.problem_detail import ProblemDetail + + +class WorkController(CirculationManagerController): + def _lane_details(self, languages, audiences): + if languages: + languages = languages.split(",") + if audiences: + audiences = [urllib.parse.unquote_plus(a) for a in audiences.split(",")] + + return languages, audiences + + def contributor( + self, contributor_name, languages, audiences, feed_class=OPDSAcquisitionFeed + ): + """Serve a feed of books written by a particular author""" + library = flask.request.library + if not contributor_name: + return NO_SUCH_LANE.detailed(_("No contributor provided")) + + # contributor_name is probably a display_name, but it could be a + # sort_name. Pass it in for both fields and + # ContributorData.lookup() will do its best to figure it out. + contributor = ContributorData.lookup( + self._db, sort_name=contributor_name, display_name=contributor_name + ) + if not contributor: + return NO_SUCH_LANE.detailed( + _("Unknown contributor: %s") % contributor_name + ) + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + languages, audiences = self._lane_details(languages, audiences) + + lane = ContributorLane( + library, contributor, languages=languages, audiences=audiences + ) + facets = self.manager.load_facets_from_request( + worklist=lane, base_class=ContributorFacets + ) + if isinstance(facets, ProblemDetail): + return facets + + pagination = load_pagination_from_request(SortKeyPagination) + if isinstance(pagination, ProblemDetail): + return pagination + + annotator = self.manager.annotator(lane, facets) + + url = annotator.feed_url( + lane, + facets=facets, + pagination=pagination, + ) + + return feed_class.page( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + facets=facets, + pagination=pagination, + annotator=annotator, + search_engine=search_engine, + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) + + def permalink(self, identifier_type, identifier): + """Serve an entry for a single book. + + This does not include any loan or hold-specific information for + the authenticated patron. + + This is different from the /works lookup protocol, in that it + returns a single entry while the /works lookup protocol returns a + feed containing any number of entries. + """ + library = flask.request.library + work = self.load_work(library, identifier_type, identifier) + if isinstance(work, ProblemDetail): + return work + + patron = flask.request.patron + + if patron: + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + + loan, pool = self.get_patron_loan(patron, pools) + hold = None + + if not loan: + hold, pool = self.get_patron_hold(patron, pools) + + item = loan or hold + pool = pool or pools[0] + + return OPDSAcquisitionFeed.single_entry_loans_feed( + self.circulation, item or pool + ) + else: + annotator = self.manager.annotator(lane=None) + + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator), + max_age=OPDSFeed.DEFAULT_MAX_AGE, + ) + + def related( + self, + identifier_type, + identifier, + novelist_api=None, + feed_class=OPDSAcquisitionFeed, + ): + """Serve a groups feed of books related to a given book.""" + + library = flask.request.library + work = self.load_work(library, identifier_type, identifier) + if work is None: + return NOT_FOUND_ON_REMOTE + + if isinstance(work, ProblemDetail): + return work + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + try: + lane_name = f"Books Related to {work.title} by {work.author}" + lane = RelatedBooksLane(library, work, lane_name, novelist_api=novelist_api) + except ValueError as e: + # No related books were found. + return NO_SUCH_LANE.detailed(str(e)) + + facets = self.manager.load_facets_from_request( + worklist=lane, + base_class=FeaturedFacets, + base_class_constructor_kwargs=dict( + minimum_featured_quality=library.settings.minimum_featured_quality + ), + ) + if isinstance(facets, ProblemDetail): + return facets + + annotator = self.manager.annotator(lane) + url = annotator.feed_url( + lane, + facets=facets, + ) + + return feed_class.groups( + _db=self._db, + title=lane.DISPLAY_NAME, + url=url, + worklist=lane, + annotator=annotator, + pagination=None, + facets=facets, + search_engine=search_engine, + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) + + def recommendations( + self, + identifier_type, + identifier, + novelist_api=None, + feed_class=OPDSAcquisitionFeed, + ): + """Serve a feed of recommendations related to a given book.""" + + library = flask.request.library + work = self.load_work(library, identifier_type, identifier) + if isinstance(work, ProblemDetail): + return work + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + lane_name = f"Recommendations for {work.title} by {work.author}" + try: + lane = RecommendationLane( + library=library, + work=work, + display_name=lane_name, + novelist_api=novelist_api, + ) + except CannotLoadConfiguration as e: + # NoveList isn't configured. + return NO_SUCH_LANE.detailed(_("Recommendations not available")) + + facets = self.manager.load_facets_from_request(worklist=lane) + if isinstance(facets, ProblemDetail): + return facets + + # We use a normal Pagination object because recommendations + # are looked up in a third-party API and paginated through the + # database lookup. + pagination = load_pagination_from_request(Pagination) + if isinstance(pagination, ProblemDetail): + return pagination + + annotator = self.manager.annotator(lane) + url = annotator.feed_url( + lane, + facets=facets, + pagination=pagination, + ) + + return feed_class.page( + _db=self._db, + title=lane.DISPLAY_NAME, + url=url, + worklist=lane, + facets=facets, + pagination=pagination, + annotator=annotator, + search_engine=search_engine, + ).as_response(max_age=lane.max_cache_age()) + + def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFeed): + """Serve a feed of books in a given series.""" + library = flask.request.library + if not series_name: + return NO_SUCH_LANE.detailed(_("No series provided")) + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + languages, audiences = self._lane_details(languages, audiences) + lane = SeriesLane( + library, series_name=series_name, languages=languages, audiences=audiences + ) + + facets = self.manager.load_facets_from_request( + worklist=lane, base_class=SeriesFacets + ) + if isinstance(facets, ProblemDetail): + return facets + + pagination = load_pagination_from_request(SortKeyPagination) + if isinstance(pagination, ProblemDetail): + return pagination + + annotator = self.manager.annotator(lane) + + url = annotator.feed_url(lane, facets=facets, pagination=pagination) + return feed_class.page( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + facets=facets, + pagination=pagination, + annotator=annotator, + search_engine=search_engine, + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) diff --git a/api/discovery/opds_registration.py b/api/discovery/opds_registration.py index 67be3278e..3f1307823 100644 --- a/api/discovery/opds_registration.py +++ b/api/discovery/opds_registration.py @@ -3,18 +3,8 @@ import base64 import json import sys -from typing import ( - Any, - Callable, - Dict, - List, - Literal, - Optional, - Tuple, - Type, - Union, - overload, -) +from collections.abc import Callable +from typing import Any, Literal, overload from Crypto.Cipher.PKCS1_OAEP import PKCS1OAEP_Cipher from flask_babel import lazy_gettext as _ @@ -99,13 +89,13 @@ def protocol_details(cls, db: Session) -> dict[str, Any]: } @classmethod - def settings_class(cls) -> Type[OpdsRegistrationServiceSettings]: + def settings_class(cls) -> type[OpdsRegistrationServiceSettings]: """Get the settings for this integration.""" return OpdsRegistrationServiceSettings @classmethod @overload - def for_integration(cls, _db: Session, integration: int) -> Optional[Self]: + def for_integration(cls, _db: Session, integration: int) -> Self | None: ... @classmethod @@ -118,7 +108,7 @@ def for_integration( @classmethod def for_integration( cls, _db: Session, integration: int | IntegrationConfiguration - ) -> Optional[Self]: + ) -> Self | None: """ Find a OpdsRegistrationService object configured by the given IntegrationConfiguration ID. """ @@ -138,14 +128,14 @@ def get_request(url: str) -> Response: @staticmethod def post_request( - url: str, payload: Union[str, Dict[str, Any]], **kwargs: Any + url: str, payload: str | dict[str, Any], **kwargs: Any ) -> Response: return HTTP.debuggable_post(url, payload, **kwargs) @classmethod def for_protocol_goal_and_url( cls, _db: Session, protocol: str, goal: Goals, url: str - ) -> Optional[Self]: + ) -> Self | None: """Get a LibraryRegistry for the given protocol, goal, and URL. Create the corresponding ExternalIntegration if necessary. """ @@ -161,7 +151,7 @@ def for_protocol_goal_and_url( return cls(integration, settings) @property - def registrations(self) -> List[DiscoveryServiceRegistration]: + def registrations(self) -> list[DiscoveryServiceRegistration]: """Find all of this site's registrations with this OpdsRegistrationService. :yield: A sequence of Registration objects. @@ -175,7 +165,7 @@ def registrations(self) -> List[DiscoveryServiceRegistration]: def fetch_catalog( self, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Fetch the root catalog for this OpdsRegistrationService. :return: A ProblemDetail if there's a problem communicating @@ -187,7 +177,7 @@ def fetch_catalog( return self._extract_catalog_information(response) @classmethod - def _extract_catalog_information(cls, response: Response) -> Tuple[str, str]: + def _extract_catalog_information(cls, response: Response) -> tuple[str, str]: """From an OPDS catalog, extract information that's essential to kickstarting the OPDS Directory Registration Protocol. @@ -220,7 +210,7 @@ def _extract_catalog_information(cls, response: Response) -> Tuple[str, str]: def fetch_registration_document( self, - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[str | None, str | None]: """Fetch a discovery service's registration document and extract useful information from it. @@ -237,7 +227,7 @@ def fetch_registration_document( @classmethod def _extract_registration_information( cls, response: Response - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[str | None, str | None]: """From an OPDS registration document, extract information that's useful to kickstarting the OPDS Directory Registration Protocol. @@ -277,7 +267,7 @@ def _extract_registration_information( @classmethod def _extract_links( cls, response: Response - ) -> Tuple[Optional[Dict[str, Any]], List[Dict[str, str]]]: + ) -> tuple[dict[str, Any] | None, list[dict[str, str]]]: """Parse an OPDS 2 feed out of a Requests response object. :return: A 2-tuple (parsed_catalog, links), @@ -388,7 +378,7 @@ def _create_registration_payload( library: Library, stage: RegistrationStage, url_for: Callable[..., str], - ) -> Dict[str, str]: + ) -> dict[str, str]: """Collect the key-value pairs to be sent when kicking off the registration protocol. @@ -416,7 +406,7 @@ def _create_registration_payload( @staticmethod def _create_registration_headers( registration: DiscoveryServiceRegistration, - ) -> Dict[str, str]: + ) -> dict[str, str]: shared_secret = registration.shared_secret headers = {} if shared_secret: @@ -427,8 +417,8 @@ def _create_registration_headers( def _send_registration_request( cls, register_url: str, - headers: Dict[str, str], - payload: Dict[str, str], + headers: dict[str, str], + payload: dict[str, str], ) -> Response: """Send the request that actually kicks off the OPDS Directory Registration Protocol. @@ -471,7 +461,7 @@ def _decrypt_shared_secret( def _process_registration_result( cls, registration: DiscoveryServiceRegistration, - catalog: Dict[str, Any] | Any, + catalog: dict[str, Any] | Any, cipher: PKCS1OAEP_Cipher, desired_stage: RegistrationStage, ) -> Literal[True]: @@ -494,7 +484,7 @@ def _process_registration_result( f"Remote service served '{catalog}', which I can't make sense of as an OPDS document.", ) ) - metadata: Dict[str, str] = catalog.get("metadata", {}) + metadata: dict[str, str] = catalog.get("metadata", {}) short_name = metadata.get("short_name") encrypted_shared_secret = metadata.get("shared_secret") links = catalog.get("links", []) diff --git a/api/discovery/registration_script.py b/api/discovery/registration_script.py index 4d75cd7b5..a0d19e748 100644 --- a/api/discovery/registration_script.py +++ b/api/discovery/registration_script.py @@ -1,13 +1,14 @@ from __future__ import annotations from argparse import ArgumentParser -from typing import Callable, List, Literal, Optional +from collections.abc import Callable +from typing import Literal from flask import url_for from sqlalchemy.orm import Session +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry from api.util.flask import PalaceFlask @@ -41,8 +42,8 @@ def arg_parser(cls, _db: Session) -> ArgumentParser: # type: ignore[override] def do_run( self, - cmd_args: Optional[List[str]] = None, - manager: Optional[CirculationManager] = None, + cmd_args: list[str] | None = None, + manager: CirculationManager | None = None, ) -> PalaceFlask | Literal[False]: parsed = self.parse_command_line(self._db, cmd_args) diff --git a/api/ekirjasto_authentication.py b/api/ekirjasto_authentication.py index 804faae52..0034a23f1 100644 --- a/api/ekirjasto_authentication.py +++ b/api/ekirjasto_authentication.py @@ -5,7 +5,7 @@ from abc import ABC from base64 import b64decode, b64encode from enum import Enum -from typing import Any, Tuple +from typing import Any import jwt import requests @@ -452,7 +452,7 @@ def validate_ekirjasto_delegate_token( def remote_refresh_token( self, token: str - ) -> Tuple[ProblemDetail, None] | Tuple[str, int]: + ) -> tuple[ProblemDetail, None] | tuple[str, int]: """Refresh ekirjasto token with ekirjasto API call. We assume that the token is valid, API call fails if not. @@ -610,7 +610,7 @@ def local_patron_lookup( def ekirjasto_authenticate( self, _db: Session, ekirjasto_token: str - ) -> Tuple[PatronData | Patron | ProblemDetail | None, bool]: + ) -> tuple[Patron | ProblemDetail | None, bool]: """Authenticate patron with remote ekirjasto API and if necessary, create authenticated patron if not in database. @@ -627,10 +627,11 @@ def ekirjasto_authenticate( if isinstance(auth_result, PatronData): # We didn't find the patron, but authentication to external truth was # successful, so we create a new patron with the information we have. - patron, is_new = auth_result.get_or_create_patron( + new_patron, is_new = auth_result.get_or_create_patron( _db, self.library_id, analytics=self.analytics ) - patron.last_external_sync = utc_now() + new_patron.last_external_sync = utc_now() + return new_patron, is_new return auth_result, is_new diff --git a/api/enki.py b/api/enki.py index 9f663230f..b290d4c73 100644 --- a/api/enki.py +++ b/api/enki.py @@ -4,7 +4,8 @@ import json import logging import time -from typing import Any, Callable, Generator, Mapping, Tuple, cast +from collections.abc import Callable, Generator, Mapping +from typing import Any, cast from dependency_injector.wiring import Provide from flask_babel import lazy_gettext as _ @@ -14,6 +15,7 @@ from api.circulation import ( BaseCirculationAPI, + BaseCirculationApiSettings, FulfillmentInfo, HoldInfo, LoanInfo, @@ -65,7 +67,7 @@ class EnkiConstants: PRODUCTION_BASE_URL = "https://enkilibrary.org/API/" -class EnkiSettings(BaseSettings): +class EnkiSettings(BaseCirculationApiSettings): url: HttpUrl = FormField( default=EnkiConstants.PRODUCTION_BASE_URL, form=ConfigurationFormItem( @@ -78,7 +80,7 @@ class EnkiLibrarySettings(BaseSettings): enki_library_id: str = FormField( form=ConfigurationFormItem(label=_("Library ID"), required=True) ) - dont_display_reserves: Optional[str] = FormField( + dont_display_reserves: str | None = FormField( form=ConfigurationFormItem( label=_("Show/Hide Titles with No Available Loans"), required=False, @@ -148,7 +150,7 @@ def __init__(self, _db: Session, collection: Collection): self.collection_id = collection.id self.base_url = self.settings.url or self.PRODUCTION_BASE_URL - def enki_library_id(self, library: Library) -> Optional[str]: + def enki_library_id(self, library: Library) -> str | None: """Find the Enki library ID for the given library.""" if library.id is None: return None @@ -197,7 +199,7 @@ def count_title_changes() -> str: % library.name ) - def count_patron_loans_and_holds(patron: Patron, pin: Optional[str]) -> str: + def count_patron_loans_and_holds(patron: Patron, pin: str | None) -> str: activity = list(self.patron_activity(patron, pin)) return "Total loans and holds: %s" % len(activity) @@ -207,9 +209,9 @@ def request( self, url: str, method: str = "get", - extra_headers: Optional[Mapping[str, str]] = None, - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + extra_headers: Mapping[str, str] | None = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, retry_on_timeout: bool = True, **kwargs: Any, ) -> RequestsResponse: @@ -217,7 +219,7 @@ def request( headers = dict(extra_headers) if extra_headers else {} try: response = self._request( - method, url, headers=headers, data=data, params=params, **kwargs + url, method, headers=headers, data=data, params=params, **kwargs ) except RequestTimedOut as e: if not retry_on_timeout: @@ -246,8 +248,8 @@ def _request( url: str, method: str, headers: Mapping[str, str], - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, **kwargs: Any, ) -> RequestsResponse: """Actually make an HTTP request. @@ -330,7 +332,7 @@ def updated_titles( response = self.request(url, params=args) yield from BibliographicParser().process_all(response.content) - def get_item(self, enki_id: Optional[str]) -> Optional[Metadata]: + def get_item(self, enki_id: str | None) -> Metadata | None: """Retrieve bibliographic and availability information for a specific title. @@ -436,10 +438,10 @@ def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: def loan_request( self, - barcode: Optional[str], - pin: Optional[str], - book_id: Optional[str], - enki_library_id: Optional[str], + barcode: str | None, + pin: str | None, + book_id: str | None, + enki_library_id: str | None, ) -> RequestsResponse: self.log.debug("Sending checkout request for %s" % book_id) url = str(self.base_url) + str(self.user_endpoint) @@ -516,7 +518,7 @@ def parse_fulfill_result( return url, item_type, expires def patron_activity( - self, patron: Patron, pin: Optional[str] + self, patron: Patron, pin: str | None ) -> Generator[LoanInfo | HoldInfo, None, None]: enki_library_id = self.enki_library_id(patron.library) response = self.patron_request( @@ -543,7 +545,7 @@ def patron_activity( yield hold_info def patron_request( - self, patron: Optional[str], pin: Optional[str], enki_library_id: Optional[str] + self, patron: str | None, pin: str | None, enki_library_id: str | None ) -> RequestsResponse: self.log.debug("Querying Enki for information on patron %s" % patron) url = str(self.base_url) + str(self.user_endpoint) @@ -573,7 +575,7 @@ def parse_patron_loans(self, checkout_data: Mapping[str, Any]) -> LoanInfo: fulfillment_info=None, ) - def parse_patron_holds(self, hold_data: Mapping[str, Any]) -> Optional[HoldInfo]: + def parse_patron_holds(self, hold_data: Mapping[str, Any]) -> HoldInfo | None: self.log.warning( "Hold information received, but parsing patron holds is not implemented. %r", hold_data, @@ -585,7 +587,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: raise NotImplementedError() @@ -735,8 +737,8 @@ def extract_circulation( self, primary_identifier: IdentifierData, availability: Mapping[str, str], - formattype: Optional[str], - ) -> Optional[CirculationData]: + formattype: str | None, + ) -> CirculationData | None: """Turn the 'availability' portion of an Enki API response into a CirculationData. """ @@ -808,8 +810,8 @@ def collection(self) -> Collection | None: def catch_up_from( self, - start: Optional[datetime.datetime], - cutoff: Optional[datetime.datetime], + start: datetime.datetime | None, + cutoff: datetime.datetime | None, progress: TimestampData, ) -> None: """Find Enki books that changed recently. @@ -911,7 +913,7 @@ def _update_circulation( return circulation_changes - def process_book(self, bibliographic: Metadata) -> Tuple[Edition, LicensePool]: + def process_book(self, bibliographic: Metadata) -> tuple[Edition, LicensePool]: """Make the local database reflect the state of the remote Enki collection for the given book. @@ -955,7 +957,7 @@ def __init__( api = api_class self.api = api - def process_item(self, identifier: Identifier) -> Optional[CirculationData]: + def process_item(self, identifier: Identifier) -> CirculationData | None: self.log.debug("Seeing if %s needs reaping", identifier.identifier) metadata = self.api.get_item(identifier.identifier) if metadata: diff --git a/api/firstbook2.py b/api/firstbook2.py index f9e2dbd2f..22a3870f7 100644 --- a/api/firstbook2.py +++ b/api/firstbook2.py @@ -2,7 +2,7 @@ import re import time -from typing import Optional, Pattern, Union +from re import Pattern import jwt import requests @@ -48,7 +48,7 @@ class FirstBookAuthSettings(BasicAuthProviderSettings): weight=10, ), ) - password_regular_expression: Optional[Pattern] = FormField( + password_regular_expression: Pattern | None = FormField( re.compile(r"^[0-9]+$"), form=ConfigurationFormItem( label="Password Regular Expression", @@ -107,8 +107,8 @@ def __init__( self.secret = settings.password def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: # All FirstBook credentials are in upper-case. if username is None or username == "": return None @@ -128,8 +128,8 @@ def remote_authenticate( ) def remote_patron_lookup( - self, patron_or_patrondata: Union[PatronData, Patron] - ) -> Optional[PatronData]: + self, patron_or_patrondata: PatronData | Patron + ) -> PatronData | None: if isinstance(patron_or_patrondata, PatronData): return patron_or_patrondata diff --git a/api/integration/registry/catalog_services.py b/api/integration/registry/catalog_services.py new file mode 100644 index 000000000..f23627dbd --- /dev/null +++ b/api/integration/registry/catalog_services.py @@ -0,0 +1,9 @@ +from core.integration.goals import Goals +from core.integration.registry import IntegrationRegistry +from core.marc import MARCExporter + + +class CatalogServicesRegistry(IntegrationRegistry[MARCExporter]): + def __init__(self) -> None: + super().__init__(Goals.CATALOG_GOAL) + self.register(MARCExporter) diff --git a/api/integration/registry/license_providers.py b/api/integration/registry/license_providers.py index 134ec494e..0f47eb703 100644 --- a/api/integration/registry/license_providers.py +++ b/api/integration/registry/license_providers.py @@ -6,13 +6,10 @@ from core.integration.registry import IntegrationRegistry if TYPE_CHECKING: - from api.circulation import BaseCirculationAPI # noqa: autoflake - from core.integration.settings import BaseSettings # noqa: autoflake + from api.circulation import CirculationApiType # noqa: autoflake -class LicenseProvidersRegistry( - IntegrationRegistry["BaseCirculationAPI[BaseSettings, BaseSettings]"] -): +class LicenseProvidersRegistry(IntegrationRegistry["CirculationApiType"]): def __init__(self) -> None: super().__init__(Goals.LICENSE_GOAL) diff --git a/api/kansas_patron.py b/api/kansas_patron.py index 8d44bb32c..2fc504a58 100644 --- a/api/kansas_patron.py +++ b/api/kansas_patron.py @@ -1,5 +1,3 @@ -from typing import Optional, Type, Union - from flask_babel import lazy_gettext as _ from lxml import etree from pydantic import HttpUrl @@ -37,11 +35,11 @@ def description(cls) -> str: return "An authentication service for the Kansas State Library." @classmethod - def settings_class(cls) -> Type[KansasAuthSettings]: + def settings_class(cls) -> type[KansasAuthSettings]: return KansasAuthSettings @classmethod - def library_settings_class(cls) -> Type[BasicAuthProviderLibrarySettings]: + def library_settings_class(cls) -> type[BasicAuthProviderLibrarySettings]: return BasicAuthProviderLibrarySettings def __init__( @@ -61,8 +59,8 @@ def __init__( # methods. def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: # Create XML doc for request authorization_request = self.create_authorize_request(username, password) # Post request to the server @@ -83,8 +81,8 @@ def remote_authenticate( ) def remote_patron_lookup( - self, patron_or_patrondata: Union[PatronData, Patron] - ) -> Optional[PatronData]: + self, patron_or_patrondata: PatronData | Patron + ) -> PatronData | None: # Kansas auth gives very little data about the patron. So this function is just a passthrough. if isinstance(patron_or_patrondata, PatronData): return patron_or_patrondata diff --git a/api/lanes.py b/api/lanes.py index 282746904..13f269bde 100644 --- a/api/lanes.py +++ b/api/lanes.py @@ -1,5 +1,4 @@ import logging -from typing import Optional import core.classifier as genres from api.config import CannotLoadConfiguration, Configuration @@ -957,8 +956,8 @@ def works(self, *args, **kwargs): class WorkBasedLane(DynamicLane): """A lane that shows works related to one particular Work.""" - DISPLAY_NAME: Optional[str] = None - ROUTE: Optional[str] = None + DISPLAY_NAME: str | None = None + ROUTE: str | None = None def __init__(self, library, work, display_name=None, children=None, **kwargs): self.work = work @@ -1361,7 +1360,15 @@ class CrawlableFacets(Facets): @classmethod def available_facets(cls, config, facet_group_name): - return [cls.SETTINGS[facet_group_name]] + facets = [cls.SETTINGS[facet_group_name]] + + if ( + facet_group_name == Facets.DISTRIBUTOR_FACETS_GROUP_NAME + or facet_group_name == Facets.COLLECTION_NAME_FACETS_GROUP_NAME + ) and config is not None: + facets.extend(config.enabled_facets(facet_group_name)) + + return facets @classmethod def default_facet(cls, config, facet_group_name): diff --git a/api/local_analytics_exporter.py b/api/local_analytics_exporter.py index bd9f28f2c..ecd85d7a7 100644 --- a/api/local_analytics_exporter.py +++ b/api/local_analytics_exporter.py @@ -14,6 +14,7 @@ Edition, Genre, Identifier, + IntegrationConfiguration, Library, LicensePool, Work, @@ -221,7 +222,7 @@ def analytics_query(self, start, end, locations=None, library=None): Edition.imprint, Edition.language, CirculationEvent.location, - Collection.name.label("collection_name"), + IntegrationConfiguration.name.label("collection_name"), Library.short_name.label("library_short_name"), Library.name.label("library_name"), Edition.medium, @@ -239,6 +240,11 @@ def analytics_query(self, start, end, locations=None, library=None): .join(Work, Work.id == LicensePool.work_id) .join(Edition, Work.presentation_edition_id == Edition.id) .join(Collection, LicensePool.collection_id == Collection.id) + .join( + IntegrationConfiguration, + Collection.integration_configuration_id + == IntegrationConfiguration.id, + ) .join(DataSource, LicensePool.data_source_id == DataSource.id) .outerjoin(Library, CirculationEvent.library_id == Library.id) ) diff --git a/api/marc.py b/api/marc.py deleted file mode 100644 index 1ad5e8c54..000000000 --- a/api/marc.py +++ /dev/null @@ -1,101 +0,0 @@ -import urllib.error -import urllib.parse -import urllib.request - -from pymarc import Field, Subfield -from sqlalchemy import select - -from core.config import Configuration -from core.marc import Annotator, MARCExporter -from core.model import ConfigurationSetting, Session -from core.model.discovery_service_registration import DiscoveryServiceRegistration - - -class LibraryAnnotator(Annotator): - def __init__(self, library): - super().__init__() - self.library = library - _db = Session.object_session(library) - self.base_url = ConfigurationSetting.sitewide( - _db, Configuration.BASE_URL_KEY - ).value - - def value(self, key, integration): - _db = Session.object_session(integration) - return ConfigurationSetting.for_library_and_externalintegration( - _db, key, self.library, integration - ).value - - def annotate_work_record( - self, - work, - active_license_pool, - edition, - identifier, - record, - integration=None, - updated=None, - ): - super().annotate_work_record( - work, active_license_pool, edition, identifier, record, integration, updated - ) - - if integration: - marc_org = self.value(MARCExporter.MARC_ORGANIZATION_CODE, integration) - include_summary = ( - self.value(MARCExporter.INCLUDE_SUMMARY, integration) == "true" - ) - include_genres = ( - self.value(MARCExporter.INCLUDE_SIMPLIFIED_GENRES, integration) - == "true" - ) - - if marc_org: - self.add_marc_organization_code(record, marc_org) - - if include_summary: - self.add_summary(record, work) - - if include_genres: - self.add_simplified_genres(record, work) - - self.add_web_client_urls(record, self.library, identifier, integration) - - def add_web_client_urls(self, record, library, identifier, integration=None): - _db = Session.object_session(library) - settings = [] - - if integration: - marc_setting = self.value(MARCExporter.WEB_CLIENT_URL, integration) - if marc_setting: - settings.append(marc_setting) - - settings += [ - s.web_client - for s in _db.execute( - select(DiscoveryServiceRegistration.web_client).where( - DiscoveryServiceRegistration.library == library, - DiscoveryServiceRegistration.web_client != None, - ) - ).all() - ] - - qualified_identifier = urllib.parse.quote( - identifier.type + "/" + identifier.identifier, safe="" - ) - - for web_client_base_url in settings: - link = "{}/{}/works/{}".format( - self.base_url, - library.short_name, - qualified_identifier, - ) - encoded_link = urllib.parse.quote(link, safe="") - url = f"{web_client_base_url}/book/{encoded_link}" - record.add_field( - Field( - tag="856", - indicators=["4", "0"], - subfields=[Subfield(code="u", value=url)], - ) - ) diff --git a/api/millenium_patron.py b/api/millenium_patron.py index a5085a7b8..27e702b9c 100644 --- a/api/millenium_patron.py +++ b/api/millenium_patron.py @@ -1,7 +1,6 @@ import datetime import re from enum import Enum -from typing import List, Optional, Type, Union from urllib import parse import dateutil @@ -73,7 +72,7 @@ def validate_neighborhood_mode(cls, v): ) # The field to use when seeing which values of MBLOCK[p56] mean a patron # is blocked. By default, any value other than '-' indicates a block. - block_types: Optional[str] = FormField( + block_types: str | None = FormField( None, form=ConfigurationFormItem( label="Block Types", @@ -84,7 +83,7 @@ def validate_neighborhood_mode(cls, v): # Identifiers that contain any of these strings are ignored when # finding the "correct" identifier in a patron's record, even if # it means they end up with no identifier at all. - identifier_blacklist: List[str] = FormField( + identifier_blacklist: list[str] = FormField( [], form=ConfigurationFormItem( label="Identifier Blacklist", @@ -178,11 +177,11 @@ def description(cls) -> str: return _("III Millenium Patron API") @classmethod - def settings_class(cls) -> Type[MilleniumPatronSettings]: + def settings_class(cls) -> type[MilleniumPatronSettings]: return MilleniumPatronSettings @classmethod - def library_settings_class(cls) -> Type[MilleniumPatronLibrarySettings]: + def library_settings_class(cls) -> type[MilleniumPatronLibrarySettings]: return MilleniumPatronLibrarySettings ERROR_MESSAGE_FIELD = "ERRMSG" @@ -212,7 +211,7 @@ def __init__( integration_id: int, settings: MilleniumPatronSettings, library_settings: MilleniumPatronLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): super().__init__( library_id, integration_id, settings, library_settings, analytics @@ -243,8 +242,8 @@ def _request(self, path): """Make an HTTP request and parse the response.""" def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: """Does the Millenium Patron API approve of these credentials? :return: False if the credentials are invalid. If they are @@ -282,8 +281,8 @@ def remote_authenticate( return None def _remote_authenticate_pintest( - self, username: str, password: Optional[str] - ) -> Optional[PatronData]: + self, username: str, password: str | None + ) -> PatronData | None: # Patrons are authenticated with a secret PIN. # # The PIN is URL-encoded. The username is not: as far as @@ -330,8 +329,8 @@ def family_name_match(self, actual_name, supposed_family_name): return False def remote_patron_lookup( - self, patron_or_patrondata_or_identifier: Union[PatronData, Patron, str] - ) -> Optional[PatronData]: + self, patron_or_patrondata_or_identifier: PatronData | Patron | str + ) -> PatronData | None: if isinstance(patron_or_patrondata_or_identifier, str): identifier = patron_or_patrondata_or_identifier else: @@ -394,7 +393,7 @@ def _patron_block_reason(cls, block_types, mblock_value): return PatronData.NO_VALUE @classmethod - def _code_from_field(cls, field_name: Optional[str]) -> Optional[str]: + def _code_from_field(cls, field_name: str | None) -> str | None: """Convert a Millenium property key to its code. A field name may comprise a label and a code or just a code. diff --git a/api/model/time_tracking.py b/api/model/time_tracking.py index 283e14df7..7a7675540 100644 --- a/api/model/time_tracking.py +++ b/api/model/time_tracking.py @@ -1,6 +1,6 @@ import datetime import logging -from typing import Any, Dict, List, Optional +from typing import Any from pydantic import Field, validator @@ -44,13 +44,13 @@ def validate_seconds_played(cls, value: int): class PlaytimeEntriesPost(CustomBaseModel): - book_id: Optional[str] = Field( + book_id: str | None = Field( description="An identifier of a book (currently ignored)." ) - library_id: Optional[str] = Field( + library_id: str | None = Field( description="And identifier for the library (currently ignored)." ) - time_entries: List[PlaytimeTimeEntry] = Field(description="A List of time entries") + time_entries: list[PlaytimeTimeEntry] = Field(description="A List of time entries") class PlaytimeEntriesPostSummary(CustomBaseModel): @@ -60,7 +60,7 @@ class PlaytimeEntriesPostSummary(CustomBaseModel): class PlaytimeEntriesPostResponse(CustomBaseModel): - responses: List[Dict[str, Any]] = Field( + responses: list[dict[str, Any]] = Field( description="Responses as part of the multi-reponse" ) summary: PlaytimeEntriesPostSummary = Field( diff --git a/api/monitor.py b/api/monitor.py index cfe361925..444a04e15 100644 --- a/api/monitor.py +++ b/api/monitor.py @@ -1,5 +1,3 @@ -from typing import Type - from sqlalchemy import and_, or_ from api.odl import ODLAPI @@ -8,6 +6,7 @@ Collection, ExternalIntegration, Hold, + IntegrationConfiguration, LicensePool, Loan, ) @@ -36,7 +35,7 @@ def where_clause(self): """ source_of_truth = or_( LicensePool.open_access == True, - ExternalIntegration.protocol.in_(self.SOURCE_OF_TRUTH_PROTOCOLS), + IntegrationConfiguration.protocol.in_(self.SOURCE_OF_TRUTH_PROTOCOLS), ) source_of_truth_subquery = ( @@ -44,8 +43,8 @@ def where_clause(self): .join(self.MODEL_CLASS.license_pool) .join(LicensePool.collection) .join( - ExternalIntegration, - Collection.external_integration_id == ExternalIntegration.id, + IntegrationConfiguration, + Collection.integration_configuration_id == IntegrationConfiguration.id, ) .filter(source_of_truth) ) @@ -55,7 +54,7 @@ def where_clause(self): class LoanReaper(LoanlikeReaperMonitor): """Remove expired and abandoned loans from the database.""" - MODEL_CLASS: Type[Loan] = Loan + MODEL_CLASS: type[Loan] = Loan MAX_AGE = 90 @property diff --git a/api/odl.py b/api/odl.py index 22107dca8..2a79c4ec0 100644 --- a/api/odl.py +++ b/api/odl.py @@ -5,14 +5,15 @@ import json import uuid from abc import ABC -from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, TypeVar +from collections.abc import Callable +from typing import Any, Literal, TypeVar import dateutil from dependency_injector.wiring import Provide, inject from flask import url_for from flask_babel import lazy_gettext as _ from lxml.etree import Element -from pydantic import HttpUrl, PositiveInt +from pydantic import AnyHttpUrl, HttpUrl, PositiveInt from requests import Response from sqlalchemy.sql.expression import or_ from uritemplate import URITemplate @@ -79,7 +80,7 @@ class ODLAPIConstants: class ODLSettings(OPDSImporterSettings): - external_account_id: Optional[HttpUrl] = FormField( + external_account_id: AnyHttpUrl = FormField( form=ConfigurationFormItem( label=_("ODL feed URL"), required=True, @@ -101,7 +102,7 @@ class ODLSettings(OPDSImporterSettings): ), ) - default_reservation_period: Optional[PositiveInt] = FormField( + default_reservation_period: PositiveInt | None = FormField( default=Collection.STANDARD_DEFAULT_RESERVATION_PERIOD, form=ConfigurationFormItem( label=_("Default Reservation Period (in Days)"), @@ -226,7 +227,7 @@ def __init__( self._hasher_factory = HasherFactory() self._credential_factory = LCPCredentialFactory() - self._hasher_instance: Optional[Hasher] = None + self._hasher_instance: Hasher | None = None def _get_hasher(self) -> Hasher: """Returns a Hasher instance @@ -241,7 +242,7 @@ def _get_hasher(self) -> Hasher: return self._hasher_instance - def _get(self, url: str, headers: Optional[Dict[str, str]] = None) -> Response: + def _get(self, url: str, headers: dict[str, str] | None = None) -> Response: """Make a normal HTTP request, but include an authentication header with the credentials for the collection. """ @@ -258,7 +259,7 @@ def _url_for(self, *args: Any, **kwargs: Any) -> str: """Wrapper around flask's url_for to be overridden for tests.""" return url_for(*args, **kwargs) - def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: + def get_license_status_document(self, loan: Loan) -> dict[str, Any]: """Get the License Status Document for a loan. For a new loan, create a local loan with no external identifier and @@ -422,7 +423,7 @@ def checkout( ) def _checkout( - self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None + self, patron: Patron, licensepool: LicensePool, hold: Hold | None = None ) -> Loan: _db = Session.object_session(patron) @@ -510,9 +511,9 @@ def fulfill( @staticmethod def _find_content_link_and_type( - links: List[Dict[str, str]], - drm_scheme: Optional[str], - ) -> Tuple[Optional[str], Optional[str]]: + links: list[dict[str, str]], + drm_scheme: str | None, + ) -> tuple[str | None, str | None]: """Find a content link with the type information corresponding to the selected delivery mechanism. :param links: List of dict-like objects containing information about available links in the LCP license file @@ -741,7 +742,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: """Create a new hold.""" return self._place_hold(patron, licensepool) @@ -813,7 +814,7 @@ def _release_hold(self, hold: Hold) -> Literal[True]: self.update_licensepool(licensepool) return True - def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: + def patron_activity(self, patron: Patron, pin: str) -> list[LoanInfo | HoldInfo]: """Look up non-expired loans for this collection in the database.""" _db = Session.object_session(patron) loans = ( @@ -865,9 +866,7 @@ def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo] for hold in remaining_holds ] - def update_loan( - self, loan: Loan, status_doc: Optional[Dict[str, Any]] = None - ) -> None: + def update_loan(self, loan: Loan, status_doc: dict[str, Any] | None = None) -> None: """Check a loan's status, and if it is no longer active, delete the loan and update its pool's availability. """ @@ -919,11 +918,11 @@ class ODLAPI( """ @classmethod - def settings_class(cls) -> Type[ODLSettings]: + def settings_class(cls) -> type[ODLSettings]: return ODLSettings @classmethod - def library_settings_class(cls) -> Type[ODLLibrarySettings]: + def library_settings_class(cls) -> type[ODLLibrarySettings]: return ODLLibrarySettings @classmethod @@ -957,8 +956,8 @@ class BaseODLImporter(BaseOPDSImporter[SettingsType], ABC): @classmethod def fetch_license_info( - cls, document_link: str, do_get: Callable[..., Tuple[int, Any, bytes]] - ) -> Optional[Dict[str, Any]]: + cls, document_link: str, do_get: Callable[..., tuple[int, Any, bytes]] + ) -> dict[str, Any] | None: status_code, _, response = do_get(document_link, headers={}) if status_code in (200, 201): license_info_document = json.loads(response) @@ -973,10 +972,10 @@ def fetch_license_info( @classmethod def parse_license_info( cls, - license_info_document: Dict[str, Any], + license_info_document: dict[str, Any], license_info_link: str, - checkout_link: Optional[str], - ) -> Optional[LicenseData]: + checkout_link: str | None, + ) -> LicenseData | None: """Check the license's attributes passed as parameters: - if they're correct, turn them into a LicenseData object - otherwise, return a None @@ -1061,12 +1060,12 @@ def parse_license_info( def get_license_data( cls, license_info_link: str, - checkout_link: Optional[str], - feed_license_identifier: Optional[str], - feed_license_expires: Optional[datetime.datetime], - feed_concurrency: Optional[int], - do_get: Callable[..., Tuple[int, Any, bytes]], - ) -> Optional[LicenseData]: + checkout_link: str | None, + feed_license_identifier: str | None, + feed_license_expires: datetime.datetime | None, + feed_concurrency: int | None, + do_get: Callable[..., tuple[int, Any, bytes]], + ) -> LicenseData | None: license_info_document = cls.fetch_license_info(license_info_link, do_get) if not license_info_document: @@ -1127,7 +1126,7 @@ class ODLImporter(OPDSImporter, BaseODLImporter[ODLSettings]): LICENSE_INFO_DOCUMENT_MEDIA_TYPE = "application/vnd.odl.info+json" @classmethod - def settings_class(cls) -> Type[ODLSettings]: + def settings_class(cls) -> type[ODLSettings]: return ODLSettings @classmethod @@ -1135,9 +1134,9 @@ def _detail_for_elementtree_entry( cls, parser: OPDSXMLParser, entry_tag: Element, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Dict[str, Any]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> dict[str, Any]: do_get = do_get or Representation.cautious_http_get # TODO: Review for consistency when updated ODL spec is ready. @@ -1158,7 +1157,7 @@ def _detail_for_elementtree_entry( # By default, dcterms:format includes the media type of a # DRM-free resource. content_type = full_content_type - drm_schemes: List[str | None] = [] + drm_schemes: list[str | None] = [] # But it may instead describe an audiobook protected with # the Feedbooks access-control scheme. @@ -1259,7 +1258,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: type[OPDSImporter], **import_class_kwargs: Any, ): # Always force reimport ODL collections to get up to date license information @@ -1279,7 +1278,7 @@ def __init__( self, _db: Session, collection: Collection, - api: Optional[ODLAPI] = None, + api: ODLAPI | None = None, **kwargs: Any, ): super().__init__(_db, collection, **kwargs) diff --git a/api/odl2.py b/api/odl2.py index d5654bab6..74777b250 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -1,7 +1,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Type +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from flask_babel import lazy_gettext as _ from pydantic import PositiveInt @@ -37,8 +38,8 @@ from core.model.patron import Hold, Loan, Patron -class ODL2Settings(OPDS2ImporterSettings, ODLSettings): - skipped_license_formats: List[str] = FormField( +class ODL2Settings(ODLSettings, OPDS2ImporterSettings): + skipped_license_formats: list[str] = FormField( default=["text/html"], alias="odl2_skipped_license_formats", form=ConfigurationFormItem( @@ -51,7 +52,7 @@ class ODL2Settings(OPDS2ImporterSettings, ODLSettings): ), ) - loan_limit: Optional[PositiveInt] = FormField( + loan_limit: PositiveInt | None = FormField( default=None, alias="odl2_loan_limit", form=ConfigurationFormItem( @@ -64,7 +65,7 @@ class ODL2Settings(OPDS2ImporterSettings, ODLSettings): ), ) - hold_limit: Optional[PositiveInt] = FormField( + hold_limit: PositiveInt | None = FormField( default=None, alias="odl2_hold_limit", form=ConfigurationFormItem( @@ -80,11 +81,11 @@ class ODL2Settings(OPDS2ImporterSettings, ODLSettings): class ODL2API(BaseODLAPI[ODL2Settings, ODLLibrarySettings]): @classmethod - def settings_class(cls) -> Type[ODL2Settings]: + def settings_class(cls) -> type[ODL2Settings]: return ODL2Settings @classmethod - def library_settings_class(cls) -> Type[ODLLibrarySettings]: + def library_settings_class(cls) -> type[ODLLibrarySettings]: return ODLLibrarySettings @classmethod @@ -101,7 +102,7 @@ def __init__(self, _db: Session, collection: Collection) -> None: self.hold_limit = self.settings.hold_limit def _checkout( - self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None + self, patron: Patron, licensepool: LicensePool, hold: Hold | None = None ) -> Loan: # If the loan limit is not None or 0 if self.loan_limit: @@ -139,16 +140,16 @@ class ODL2Importer(BaseODLImporter[ODL2Settings], OPDS2Importer): NAME = ODL2API.label() @classmethod - def settings_class(cls) -> Type[ODL2Settings]: + def settings_class(cls) -> type[ODL2Settings]: return ODL2Settings def __init__( self, db: Session, collection: Collection, - parser: Optional[RWPMManifestParser] = None, + parser: RWPMManifestParser | None = None, data_source_name: str | None = None, - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): """Initialize a new instance of ODL2Importer class. @@ -183,7 +184,7 @@ def _extract_publication_metadata( self, feed: OPDS2Feed, publication: OPDS2Publication, - data_source_name: Optional[str], + data_source_name: str | None, ) -> Metadata: """Extract a Metadata object from webpub-manifest-parser's publication. @@ -254,7 +255,7 @@ def _extract_publication_metadata( if not medium: medium = Edition.medium_from_media_type(license_format) - drm_schemes: List[str | None] + drm_schemes: list[str | None] if license_format in self.LICENSE_FORMATS: # Special case to handle DeMarque audiobooks which include the protection # in the content type. When we see a license format of @@ -304,7 +305,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[ODL2Importer], + import_class: type[ODL2Importer], **import_class_kwargs: Any, ) -> None: # Always force reimport ODL collections to get up to date license information diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 04fcaea61..dce7f2ea8 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -2,7 +2,8 @@ import datetime import json -from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Set, Tuple, Type +from collections.abc import Generator +from typing import TYPE_CHECKING, Any import feedparser from flask_babel import lazy_gettext as _ @@ -14,13 +15,13 @@ LibraryAuthorizationFailedException, ) from api.selftest import HasCollectionSelfTests +from core.coverage import CoverageFailure from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField -from core.metadata_layer import FormatData, TimestampData +from core.metadata_layer import FormatData, Metadata, TimestampData from core.model import ( Collection, Credential, DeliveryMechanism, - ExternalIntegration, Hyperlink, Identifier, LicensePool, @@ -29,6 +30,7 @@ Session, get_one, ) +from core.model.constants import EditionConstants from core.opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor from core.util import base64 from core.util.datetime_helpers import utc_now @@ -85,11 +87,11 @@ class OPDSForDistributorsAPI( ] @classmethod - def settings_class(cls) -> Type[OPDSForDistributorsSettings]: + def settings_class(cls) -> type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings @classmethod - def library_settings_class(cls) -> Type[OPDSForDistributorsLibrarySettings]: + def library_settings_class(cls) -> type[OPDSForDistributorsLibrarySettings]: return OPDSForDistributorsLibrarySettings @classmethod @@ -102,24 +104,20 @@ def label(cls) -> str: def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) - self.external_integration_id = collection.external_integration.id settings = self.settings self.data_source_name = settings.data_source self.username = settings.username self.password = settings.password - self.feed_url = collection.external_account_id - self.auth_url: Optional[str] = None - - def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: - return get_one(_db, ExternalIntegration, id=self.external_integration_id) + self.feed_url = settings.external_account_id + self.auth_url: str | None = None def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: """Try to get a token.""" yield self.run_test("Negotiate a fulfillment token", self._get_token, _db) def _request_with_timeout( - self, method: str, url: Optional[str], *args: Any, **kwargs: Any + self, method: str, url: str | None, *args: Any, **kwargs: Any ) -> Response: """Wrapper around HTTP.request_with_timeout to be overridden for tests.""" if url is None: @@ -223,7 +221,7 @@ def refresh(credential: Credential) -> None: def can_fulfill_without_loan( self, - patron: Optional[Patron], + patron: Patron | None, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism, ) -> bool: @@ -345,7 +343,7 @@ def fulfill( content_expires=credential.expires, ) - def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: + def patron_activity(self, patron: Patron, pin: str) -> list[LoanInfo | HoldInfo]: # Look up loans for this collection in the database. _db = Session.object_session(patron) loans = ( @@ -376,7 +374,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: # All the books for this integration are available as simultaneous # use, so there's no need to place a hold. @@ -390,7 +388,7 @@ class OPDSForDistributorsImporter(OPDSImporter): NAME = OPDSForDistributorsAPI.label() @classmethod - def settings_class(cls) -> Type[OPDSForDistributorsSettings]: + def settings_class(cls) -> type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings def update_work_for_edition( @@ -427,6 +425,21 @@ def _add_format_data(cls, circulation: CirculationData) -> None: ) ) + def extract_feed_data( + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: + metadatas, failures = super().extract_feed_data(feed, feed_url) + + # Force all audiobook licensepools to track playtime + for _, metadata in metadatas.items(): + if ( + metadata.medium == EditionConstants.AUDIO_MEDIUM + and metadata.circulation is not None + ): + metadata.circulation.should_track_playtime = True + + return metadatas, failures + class OPDSForDistributorsImportMonitor(OPDSImportMonitor): """Monitor an OPDS feed that requires or allows authentication, @@ -440,7 +453,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: type[OPDSImporter], **kwargs: Any, ) -> None: super().__init__(_db, collection, import_class, **kwargs) @@ -448,8 +461,8 @@ def __init__( self.api = OPDSForDistributorsAPI(_db, collection) def _get( - self, url: str, headers: Dict[str, str] - ) -> Tuple[int, Dict[str, str], bytes]: + self, url: str, headers: dict[str, str] + ) -> tuple[int, dict[str, str], bytes]: """Make a normal HTTP request for an OPDS feed, but add in an auth header with the credentials for the collection. """ @@ -472,11 +485,11 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: type[OPDSImporter], **kwargs: Any, ) -> None: super().__init__(_db, collection, import_class, **kwargs) - self.seen_identifiers: Set[str] = set() + self.seen_identifiers: set[str] = set() def feed_contains_new_data(self, feed: bytes | str) -> bool: # Always return True so that the importer will crawl the @@ -485,7 +498,7 @@ def feed_contains_new_data(self, feed: bytes | str) -> bool: def import_one_feed( self, feed: bytes | str - ) -> Tuple[List[Edition], Dict[str, List[CoverageFailure]]]: + ) -> tuple[list[Edition], dict[str, list[CoverageFailure]]]: # Collect all the identifiers in the feed. parsed_feed = feedparser.parse(feed) identifiers = [entry.get("id") for entry in parsed_feed.get("entries", [])] diff --git a/api/opensearch_analytics_provider.py b/api/opensearch_analytics_provider.py index b142eba6c..b10d484ec 100644 --- a/api/opensearch_analytics_provider.py +++ b/api/opensearch_analytics_provider.py @@ -1,5 +1,4 @@ import datetime -from typing import Dict, Optional from opensearch_dsl import Search from opensearchpy import OpenSearch @@ -111,8 +110,8 @@ def _create_event_object( time: datetime.datetime, old_value, new_value, - neighborhood: Optional[str] = None, - ) -> Dict: + neighborhood: str | None = None, + ) -> dict: """Create a Python dict containing required information about the event. :param library: Library associated with the event diff --git a/api/overdrive.py b/api/overdrive.py index 36959e960..9e658b340 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -9,7 +9,7 @@ import time import urllib.parse from threading import RLock -from typing import Any, Dict, List, Set, Tuple, Union, cast +from typing import Any from urllib.parse import quote, urlsplit, urlunsplit import dateutil @@ -19,12 +19,13 @@ from flask_babel import lazy_gettext as _ from requests import Response from requests.structures import CaseInsensitiveDict -from sqlalchemy.exc import NoResultFound +from sqlalchemy import select from sqlalchemy.orm import Query, Session from sqlalchemy.orm.exc import StaleDataError from api.circulation import ( BaseCirculationAPI, + BaseCirculationApiSettings, BaseCirculationEbookLoanSettings, CirculationInternalFormatsMixin, DeliveryMechanismInfo, @@ -40,7 +41,11 @@ from core.config import CannotLoadConfiguration, Configuration from core.connection_config import ConnectionSetting from core.coverage import BibliographicCoverageProvider -from core.integration.base import HasChildIntegrationConfiguration +from core.integration.base import ( + HasChildIntegrationConfiguration, + integration_settings_update, +) +from core.integration.goals import Goals from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -70,6 +75,7 @@ ExternalIntegration, Hyperlink, Identifier, + IntegrationConfiguration, LicensePool, LicensePoolDeliveryMechanism, Measurement, @@ -77,7 +83,6 @@ Patron, Representation, Subject, - get_one_or_create, ) from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor from core.scripts import InputScript, Script @@ -89,19 +94,6 @@ class OverdriveConstants: - OVERDRIVE_CLIENT_KEY = "overdrive_client_key" - OVERDRIVE_CLIENT_SECRET = "overdrive_client_secret" - OVERDRIVE_SERVER_NICKNAME = "overdrive_server_nickname" - OVERDRIVE_WEBSITE_ID = "overdrive_website_id" - - # Note that the library ID is not included here because it is not Overdrive-specific - OVERDRIVE_CONFIGURATION_KEYS = { - OVERDRIVE_CLIENT_KEY, - OVERDRIVE_CLIENT_SECRET, - OVERDRIVE_SERVER_NICKNAME, - OVERDRIVE_WEBSITE_ID, - } - PRODUCTION_SERVERS = "production" TESTING_SERVERS = "testing" @@ -134,10 +126,10 @@ class OverdriveConstants: ILS_NAME_DEFAULT = "default" -class OverdriveSettings(ConnectionSetting): +class OverdriveSettings(ConnectionSetting, BaseCirculationApiSettings): """The basic Overdrive configuration""" - external_account_id: Optional[str] = FormField( + external_account_id: str | None = FormField( form=ConfigurationFormItem( label=_("Library ID"), type=ConfigurationFormItemType.TEXT, @@ -198,7 +190,7 @@ class OverdriveLibrarySettings(BaseCirculationEbookLoanSettings): class OverdriveChildSettings(BaseSettings): - external_account_id: Optional[str] = FormField( + external_account_id: str | None = FormField( form=ConfigurationFormItem( label=_("Library ID"), required=True, @@ -207,10 +199,10 @@ class OverdriveChildSettings(BaseSettings): class OverdriveAPI( - PatronActivityCirculationAPI, + PatronActivityCirculationAPI[OverdriveSettings, OverdriveLibrarySettings], CirculationInternalFormatsMixin, HasCollectionSelfTests, - HasChildIntegrationConfiguration, + HasChildIntegrationConfiguration[OverdriveSettings, OverdriveChildSettings], OverdriveConstants, ): SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.FULFILL_STEP @@ -370,62 +362,40 @@ def __init__(self, _db, collection): % collection.protocol ) - _library_id = collection.external_account_id - if not _library_id: - raise ValueError( - "Collection %s must have an external account ID" % collection.id - ) - else: - self._library_id = _library_id - - self._db = _db - self._external_integration = collection.external_integration - if collection.id is None: - raise ValueError( - "Collection passed into OverdriveAPI must have an ID, but %s does not" - % collection.name - ) - self._collection_id = collection.id - - # Initialize configuration information. - self._integration_configuration_id = cast( - int, collection.integration_configuration.id - ) - self._configuration = OverdriveData() - if collection.parent: # This is an Overdrive Advantage account. - self.parent_library_id = collection.parent.external_account_id + parent_settings = self.settings_load( + collection.parent.integration_configuration + ) + self.parent_library_id = parent_settings.external_account_id # We're going to inherit all of the Overdrive credentials # from the parent (the main Overdrive account), except for the # library ID, which we already set. - parent_integration = collection.parent.integration_configuration - parent_config = self.settings_load(parent_integration) - for key in OverdriveConstants.OVERDRIVE_CONFIGURATION_KEYS: - parent_value = getattr(parent_config, key, None) - setattr(self._configuration, key, parent_value) + self._settings = self.settings_load( + collection.integration_configuration, + collection.parent.integration_configuration, + ) else: self.parent_library_id = None + self._settings = self.settings_load(collection.integration_configuration) - # Self settings should override parent settings where available - settings = collection.integration_configuration.settings_dict - for name, schema in self.settings_class().schema()["properties"].items(): - if name in settings or not hasattr(self._configuration, name): - setattr( - self._configuration, name, settings.get(name, schema.get("default")) - ) + self._library_id = self._settings.external_account_id + if not self._library_id: + raise ValueError( + "Collection %s must have an external account ID" % collection.id + ) - if not self._configuration.overdrive_client_key: + if not self._settings.overdrive_client_key: raise CannotLoadConfiguration("Overdrive client key is not configured") - if not self._configuration.overdrive_client_secret: + if not self._settings.overdrive_client_secret: raise CannotLoadConfiguration( "Overdrive client password/secret is not configured" ) - if not self._configuration.overdrive_website_id: + if not self._settings.overdrive_website_id: raise CannotLoadConfiguration("Overdrive website ID is not configured") - self._server_nickname = self._configuration.overdrive_server_nickname + self._server_nickname = self._settings.overdrive_server_nickname self._hosts = self._determine_hosts(server_nickname=self._server_nickname) @@ -439,11 +409,11 @@ def __init__(self, _db, collection): OverdriveBibliographicCoverageProvider(collection, api_class=self) ) - def configuration(self): - """Overdrive has a different implementation for configuration""" - return self._configuration + @property + def settings(self) -> OverdriveSettings: + return self._settings - def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: + def _determine_hosts(self, *, server_nickname: str) -> dict[str, str]: # Figure out which hostnames we'll be using when constructing # endpoint URLs. if server_nickname not in self.HOSTS: @@ -451,9 +421,6 @@ def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: return dict(self.HOSTS[server_nickname]) - def external_integration(self, db: Session) -> ExternalIntegration: - return self._external_integration - def endpoint(self, url: str, **kwargs) -> str: """Create the URL to an Overdrive API endpoint. @@ -494,10 +461,6 @@ def collection_token(self): self._collection_token = library["collectionToken"] return self._collection_token - @property - def collection(self) -> Optional[Collection]: - return Collection.by_id(self._db, id=self._collection_id) - @property def source(self): return DataSource.lookup(self._db, DataSource.OVERDRIVE) @@ -507,7 +470,7 @@ def ils_name(self, library): config = self.integration_configuration().for_library(library.id) if not config: return self.ILS_NAME_DEFAULT - return config.settings_dict.get(self.ILS_NAME_KEY, self.ILS_NAME_DEFAULT) + return self.library_settings_load(config).ils_name @property def advantage_library_id(self): @@ -567,7 +530,7 @@ def refresh_creds(self, credential): def get( self, url: str, extra_headers={}, exception_on_401=False - ) -> Tuple[int, CaseInsensitiveDict, bytes]: + ) -> tuple[int, CaseInsensitiveDict, bytes]: """Make an HTTP GET request using the active Bearer Token.""" request_headers = dict(Authorization="Bearer %s" % self.token) request_headers.update(extra_headers) @@ -622,7 +585,7 @@ def fulfillment_authorization_header(self) -> str: def token_post( self, url: str, - payload: Dict[str, str], + payload: dict[str, str], is_fulfillment=False, headers={}, **kwargs, @@ -820,30 +783,30 @@ def make_link_safe(cls, url: str) -> str: def _do_get(self, url: str, headers, **kwargs) -> Response: """This method is overridden in MockOverdriveAPI.""" url = self.endpoint(url) - kwargs["max_retry_count"] = int(self._configuration.max_retry_count) + kwargs["max_retry_count"] = self.settings.max_retry_count kwargs["timeout"] = 120 return HTTP.get_with_timeout(url, headers=headers, **kwargs) def _do_post(self, url: str, payload, headers, **kwargs) -> Response: """This method is overridden in MockOverdriveAPI.""" url = self.endpoint(url) - kwargs["max_retry_count"] = int(self._configuration.max_retry_count) + kwargs["max_retry_count"] = self.settings.max_retry_count kwargs["timeout"] = 120 return HTTP.post_with_timeout(url, payload, headers=headers, **kwargs) def website_id(self) -> bytes: - return self._configuration.overdrive_website_id.encode("utf-8") + return self.settings.overdrive_website_id.encode("utf-8") def client_key(self) -> bytes: - return self._configuration.overdrive_client_key.encode("utf-8") + return self.settings.overdrive_client_key.encode("utf-8") def client_secret(self) -> bytes: - return self._configuration.overdrive_client_secret.encode("utf-8") + return self.settings.overdrive_client_secret.encode("utf-8") def library_id(self) -> str: return self._library_id - def hosts(self) -> Dict[str, str]: + def hosts(self) -> dict[str, str]: return dict(self._hosts) def _run_self_tests(self, _db): @@ -937,7 +900,7 @@ def patron_request( return response def get_patron_credential( - self, patron: Patron, pin: Optional[str], is_fulfillment=False + self, patron: Patron, pin: str | None, is_fulfillment=False ) -> Credential: """Create an OAuth token for the given patron. @@ -968,7 +931,7 @@ def scope_string(self, library): its own Patron Authentication. """ return "websiteid:{} authorizationname:{}".format( - self._configuration.overdrive_website_id, + self.settings.overdrive_website_id, self.ils_name(library), ) @@ -1216,8 +1179,8 @@ def raise_exception_on_error(self, data, custom_error_to_exception={}): raise d[error](message) def get_loan( - self, patron: Patron, pin: Optional[str], overdrive_id: str - ) -> Dict[str, Any]: + self, patron: Patron, pin: str | None, overdrive_id: str + ) -> dict[str, Any]: """Get patron's loan information for the identified item. :param patron: A patron. @@ -1300,8 +1263,8 @@ def fulfill( ) def get_fulfillment_link( - self, patron: Patron, pin: Optional[str], overdrive_id: str, format_type: str - ) -> Union[OverdriveManifestFulfillmentInfo, Tuple[str, str]]: + self, patron: Patron, pin: str | None, overdrive_id: str, format_type: str + ) -> OverdriveManifestFulfillmentInfo | tuple[str, str]: """Get the link to the ACSM or manifest for an existing loan.""" try: loan = self.get_loan(patron, pin, overdrive_id) @@ -1380,7 +1343,7 @@ def get_fulfillment_link( def get_fulfillment_link_from_download_link( self, patron, pin, download_link, fulfill_url=None - ) -> Tuple[str, str]: + ) -> tuple[str, str]: # If this for Overdrive's streaming reader, and the link expires, # the patron can go back to the circulation manager fulfill url # again to get a new one. @@ -1440,9 +1403,7 @@ def get_patron_information(self, patron, pin): self.raise_exception_on_error(data) return data - def get_patron_checkouts( - self, patron: Patron, pin: Optional[str] - ) -> Dict[str, Any]: + def get_patron_checkouts(self, patron: Patron, pin: str | None) -> dict[str, Any]: """Get information for the given patron's loans. :param patron: A patron. @@ -1517,7 +1478,7 @@ def patron_activity(self, patron, pin): ) @classmethod - def process_checkout_data(cls, checkout: Dict[str, Any], collection: Collection): + def process_checkout_data(cls, checkout: dict[str, Any], collection: Collection): """Convert one checkout from Overdrive's list of checkouts into a LoanInfo object. @@ -2294,7 +2255,7 @@ def internal_formats(cls, overdrive_format): else: yield result - ignorable_overdrive_formats: Set[str] = set() + ignorable_overdrive_formats: set[str] = set() overdrive_role_to_simplified_role = { "actor": Contributor.ACTOR_ROLE, @@ -2427,8 +2388,8 @@ def book_info_to_circulation(self, book): ) def _get_applicable_accounts( - self, accounts: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + self, accounts: list[dict[str, Any]] + ) -> list[dict[str, Any]]: """ Returns those accounts from the accounts array that apply the current overdrive collection context. @@ -2494,7 +2455,7 @@ def book_info_to_metadata( # Otherwise we'll probably give it a fraction of this weight. trusted_weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT - duration: Optional[int] = None + duration: int | None = None if include_bibliographic: title = book.get("title", None) @@ -2615,7 +2576,7 @@ def book_info_to_metadata( links = [] sample_hrefs = set() for format in book.get("formats", []): - duration_str: Optional[str] = format.get("duration") + duration_str: str | None = format.get("duration") if duration_str is not None: # Using this method only the last valid duration attribute is captured # If there are multiple formats with different durations, the edition will ignore the rest @@ -2851,37 +2812,53 @@ def to_collection(self, _db): collection, Overdrive Advantage collection) """ # First find the parent Collection. - try: - parent = ( - Collection.by_protocol(_db, ExternalIntegration.OVERDRIVE) - .filter(Collection.external_account_id == self.parent_library_id) - .one() + parent = _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.protocol == ExternalIntegration.OVERDRIVE, + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + IntegrationConfiguration.settings_dict.contains( + {"external_account_id": self.parent_library_id} + ), ) - except NoResultFound as e: + ).scalar_one_or_none() + if parent is None: # Without the parent's credentials we can't access the child. raise ValueError( "Cannot create a Collection whose parent does not already exist." ) name = parent.name + " / " + self.name - child, is_new = get_one_or_create( - _db, - Collection, - parent_id=parent.id, - external_account_id=self.library_id, - create_method_kwargs=dict(name=name), - ) - if is_new: - # Make sure the child has its protocol set appropriately. - integration = child.create_external_integration( - ExternalIntegration.OVERDRIVE + child = _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where( + Collection.parent_id == parent.id, + IntegrationConfiguration.protocol == ExternalIntegration.OVERDRIVE, + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + IntegrationConfiguration.settings_dict.contains( + {"external_account_id" == self.library_id} + ), ) - configuration = child.create_integration_configuration( - ExternalIntegration.OVERDRIVE + ).scalar_one_or_none() + + if child is None: + # The child doesn't exist yet. Create it. + child, _ = Collection.by_name_and_protocol( + _db, name, ExternalIntegration.OVERDRIVE + ) + child.parent = parent + child_settings = OverdriveChildSettings.construct( + external_account_id=self.library_id ) + integration_settings_update( + OverdriveChildSettings, child.integration_configuration, child_settings + ) + else: + # Set or update the name of the collection to reflect the name of + # the library, just in case that name has changed. + child.integration_configuration.name = name - # Set or update the name of the collection to reflect the name of - # the library, just in case that name has changed. - child.name = name return parent, child @@ -2959,7 +2936,7 @@ class GenerateOverdriveAdvantageAccountList(InputScript): def __init__(self, _db=None, *args, **kwargs): super().__init__(_db, *args, **kwargs) - self._data: List[List[str]] = list() + self._data: list[list[str]] = list() def _create_overdrive_api(self, collection: Collection): return OverdriveAPI(_db=self._db, collection=collection) @@ -2971,11 +2948,11 @@ def do_run(self, *args, **kwargs): query: Query = Collection.by_protocol( self._db, protocol=ExternalIntegration.OVERDRIVE ) - for c in query.filter(Collection.parent_id == None): - collection: Collection = c + for collection in query.filter(Collection.parent_id == None): api = self._create_overdrive_api(collection=collection) client_key = api.client_key().decode() client_secret = api.client_secret().decode() + library_id = api.library_id() try: library_token = api.collection_token @@ -2986,12 +2963,15 @@ def do_run(self, *args, **kwargs): Collection.parent_id == collection.id ) already_configured_aa_libraries = [ - e.external_account_id for e in existing_child_collections + OverdriveAPI.child_settings_load( + e.integration_configuration + ).external_account_id + for e in existing_child_collections ] self._data.append( [ collection.name, - collection.external_account_id, + library_id, client_key, client_secret, library_token, @@ -3003,7 +2983,7 @@ def do_run(self, *args, **kwargs): ) except Exception as e: logging.error( - f"Could not connect to collection {c.name}: reason: {str(e)}." + f"Could not connect to collection {collection.name}: reason: {str(e)}." ) file_path = parsed.output_file_path[0] diff --git a/api/problem_details.py b/api/problem_details.py index 02731941a..d93f4a6d5 100644 --- a/api/problem_details.py +++ b/api/problem_details.py @@ -305,13 +305,6 @@ detail=_("The analytics event must be a supported type."), ) -PATRON_NOT_OPTED_IN_TO_ANNOTATION_SYNC = pd( - "http://librarysimplified.org/terms/problem/opt-in-required", - status_code=403, - title=_("Patron must opt in."), - detail=_("The patron must opt in to synchronize annotations to a server."), -) - INVALID_ANNOTATION_MOTIVATION = pd( "http://librarysimplified.org/terms/problem/invalid-annotation-motivation", status_code=400, diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index e604a9a6c..48f9038b3 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -4,7 +4,7 @@ import json import random import string -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING from core.config import CannotLoadConfiguration from core.local_analytics_provider import LocalAnalyticsProvider @@ -17,7 +17,7 @@ class S3AnalyticsProvider(LocalAnalyticsProvider): """Analytics provider storing data in a S3 bucket.""" - def __init__(self, s3_service: Optional[S3Service]): + def __init__(self, s3_service: S3Service | None): self.s3_service = s3_service @staticmethod @@ -28,8 +28,8 @@ def _create_event_object( time: datetime.datetime, old_value, new_value, - neighborhood: Optional[str] = None, - ) -> Dict: + neighborhood: str | None = None, + ) -> dict: """Create a Python dict containing required information about the event. :param library: Library associated with the event @@ -189,10 +189,10 @@ def collect_event( def _get_file_key( self, library: Library, - license_pool: Optional[LicensePool], + license_pool: LicensePool | None, event_type: str, end_time: datetime.datetime, - start_time: Optional[datetime.datetime] = None, + start_time: datetime.datetime | None = None, ): """The path to the analytics data file for the given library, license pool and date range.""" diff --git a/api/saml/configuration/model.py b/api/saml/configuration/model.py index d67239be7..f19f43e43 100644 --- a/api/saml/configuration/model.py +++ b/api/saml/configuration/model.py @@ -1,8 +1,8 @@ import html from datetime import datetime -from enum import Enum +from re import Pattern from threading import Lock -from typing import Any, Dict, List, Optional, Pattern, Union +from typing import Any from flask_babel import lazy_gettext as _ from onelogin.saml2.settings import OneLogin_Saml2_Settings @@ -33,6 +33,7 @@ from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, + ConfigurationFormOptionsType, FormField, SettingsValidationError, ) @@ -52,9 +53,9 @@ def __init__(self) -> None: """Initialize a new instance of FederatedIdentityProviderOptions class.""" self._mutex = Lock() self._last_updated_at = datetime.min - self._options: Dict[Union[Enum, str], str] = {} + self._options: ConfigurationFormOptionsType = {} - def __call__(self, db: Session) -> Dict[Union[Enum, str], str]: + def __call__(self, db: Session) -> ConfigurationFormOptionsType: """Get federated identity provider options.""" with self._mutex: if self._needs_refresh(db): @@ -76,7 +77,7 @@ def _needs_refresh(self, db: Session) -> bool: return needs_refresh @staticmethod - def _fetch(db: Session) -> Dict[Union[Enum, str], str]: + def _fetch(db: Session) -> ConfigurationFormOptionsType: """Fetch federated identity provider options.""" identity_providers = ( db.query( @@ -118,7 +119,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="sp_private_key", ) - federated_identity_provider_entity_ids: Optional[List[str]] = FormField( + federated_identity_provider_entity_ids: list[str] | None = FormField( None, form=ConfigurationFormItem( label="List of Federated IdPs", @@ -148,7 +149,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="saml_patron_id_use_name_id", ) - patron_id_attributes: Optional[List[str]] = FormField( + patron_id_attributes: list[str] | None = FormField( [ SAMLAttributeType.eduPersonUniqueId.name, SAMLAttributeType.eduPersonTargetedID.name, @@ -170,7 +171,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): alias="saml_patron_id_attributes", format="narrow", ) - patron_id_regular_expression: Optional[Pattern] = FormField( + patron_id_regular_expression: Pattern | None = FormField( None, form=ConfigurationFormItem( label="Patron ID: Regular expression", @@ -194,7 +195,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="saml_patron_id_regular_expression", ) - non_federated_identity_provider_xml_metadata: Optional[str] = FormField( + non_federated_identity_provider_xml_metadata: str | None = FormField( None, form=ConfigurationFormItem( label="Identity Provider's XML metadata", @@ -208,7 +209,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="idp_xml_metadata", ) - session_lifetime: Optional[PositiveInt] = FormField( + session_lifetime: PositiveInt | None = FormField( None, form=ConfigurationFormItem( label="Session Lifetime", @@ -226,7 +227,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="saml_session_lifetime", ) - filter_expression: Optional[str] = FormField( + filter_expression: str | None = FormField( None, form=ConfigurationFormItem( label="Filter Expression", @@ -392,17 +393,17 @@ def __init__(self, configuration: SAMLWebSSOAuthSettings): :param configuration: Configuration object containing SAML metadata """ self._configuration = configuration - self._service_provider_loaded: Optional[SAMLServiceProviderMetadata] = None - self._service_provider_settings: Optional[Dict[str, Any]] = None - self._identity_providers_loaded: Optional[ - List[SAMLIdentityProviderMetadata] - ] = None - self._identity_providers_settings: Dict[str, Dict[str, Any]] = {} + self._service_provider_loaded: SAMLServiceProviderMetadata | None = None + self._service_provider_settings: dict[str, Any] | None = None + self._identity_providers_loaded: None | ( + list[SAMLIdentityProviderMetadata] + ) = None + self._identity_providers_settings: dict[str, dict[str, Any]] = {} self._metadata_parser = SAMLMetadataParser() def _get_federated_identity_providers( self, db: Session - ) -> List[SAMLFederatedIdentityProvider]: + ) -> list[SAMLFederatedIdentityProvider]: """Return a list of federated IdPs corresponding to the entity IDs selected by the admin. :param db: Database session @@ -424,7 +425,7 @@ def _get_federated_identity_providers( def _load_identity_providers( self, db: Session - ) -> List[SAMLIdentityProviderMetadata]: + ) -> list[SAMLIdentityProviderMetadata]: """Loads IdP settings from the library's configuration settings :param db: Database session @@ -484,7 +485,7 @@ def _load_service_provider(self) -> SAMLServiceProviderMetadata: return service_provider - def get_identity_providers(self, db: Session) -> List[SAMLIdentityProviderMetadata]: + def get_identity_providers(self, db: Session) -> list[SAMLIdentityProviderMetadata]: """Returns identity providers :param db: Database session @@ -512,7 +513,7 @@ def get_service_provider(self) -> SAMLServiceProviderMetadata: def _get_identity_provider_settings( self, identity_provider: SAMLIdentityProviderMetadata - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Converts ServiceProviderMetadata object to the OneLogin's SAML Toolkit format :param identity_provider: IdentityProviderMetadata object @@ -561,7 +562,7 @@ def _get_identity_provider_settings( def _get_service_provider_settings( self, service_provider: SAMLServiceProviderMetadata - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Converts ServiceProviderMetadata object to the OneLogin's SAML Toolkit format :param service_provider: ServiceProviderMetadata object @@ -600,7 +601,7 @@ def configuration(self) -> SAMLWebSSOAuthSettings: def get_identity_provider_settings( self, db: Session, idp_entity_id: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Returns a dictionary containing identity provider's settings in a OneLogin's SAML Toolkit format :param db: Database session @@ -641,7 +642,7 @@ def get_identity_provider_settings( return identity_provider - def get_service_provider_settings(self) -> Dict[str, Any]: + def get_service_provider_settings(self) -> dict[str, Any]: """Returns a dictionary containing service provider's settings in the OneLogin's SAML Toolkit format :param db: Database session @@ -657,7 +658,7 @@ def get_service_provider_settings(self) -> Dict[str, Any]: return self._service_provider_settings - def get_settings(self, db: Session, idp_entity_id: str) -> Dict[str, Any]: + def get_settings(self, db: Session, idp_entity_id: str) -> dict[str, Any]: """Returns a dictionary containing SP's and IdP's settings in the OneLogin's SAML Toolkit format :param db: Database session @@ -665,7 +666,7 @@ def get_settings(self, db: Session, idp_entity_id: str) -> Dict[str, Any]: :return: Dictionary containing SP's and IdP's settings in the OneLogin's SAML Toolkit format """ - onelogin_settings: Dict[str, Any] = { + onelogin_settings: dict[str, Any] = { self.DEBUG: self._configuration.service_provider_debug_mode, self.STRICT: self._configuration.service_provider_strict_mode, } diff --git a/api/saml/credential.py b/api/saml/credential.py index f3d212baf..a08737220 100644 --- a/api/saml/credential.py +++ b/api/saml/credential.py @@ -2,7 +2,6 @@ import json import logging from copy import deepcopy -from typing import Dict, Optional import sqlalchemy @@ -87,7 +86,7 @@ def create_saml_token( db: sqlalchemy.orm.session.Session, patron: Patron, subject: SAMLSubject, - cm_session_lifetime: Optional[int] = None, + cm_session_lifetime: int | None = None, ) -> Credential: """Create a Credential object that ties the given patron to the given provider token. @@ -113,7 +112,7 @@ def create_saml_token( def lookup_saml_token_by_patron( self, db: sqlalchemy.orm.session.Session, patron: Patron - ) -> Optional[Credential]: + ) -> Credential | None: """Look up for a SAML token. :param db: Database session @@ -136,8 +135,8 @@ def lookup_saml_token_by_patron( return credential def lookup_saml_token_by_value( - self, db: sqlalchemy.orm.session.Session, token: Dict - ) -> Optional[Credential]: + self, db: sqlalchemy.orm.session.Session, token: dict + ) -> Credential | None: """Look up for a SAML token. :param db: Database session diff --git a/api/saml/metadata/federations/model.py b/api/saml/metadata/federations/model.py index 4e40b9b3d..2bcde0f1d 100644 --- a/api/saml/metadata/federations/model.py +++ b/api/saml/metadata/federations/model.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text from sqlalchemy.orm import Mapped, relationship @@ -20,7 +18,7 @@ class SAMLFederation(Base): certificate = Column(Text(), nullable=True) - identity_providers: Mapped[List[SAMLFederatedIdentityProvider]] = relationship( + identity_providers: Mapped[list[SAMLFederatedIdentityProvider]] = relationship( "SAMLFederatedIdentityProvider", back_populates="federation" ) diff --git a/api/saml/metadata/federations/validator.py b/api/saml/metadata/federations/validator.py index 09f2f39c6..1fab512c6 100644 --- a/api/saml/metadata/federations/validator.py +++ b/api/saml/metadata/federations/validator.py @@ -1,7 +1,6 @@ import datetime import logging from abc import ABCMeta -from typing import Union from onelogin.saml2.utils import OneLogin_Saml2_Utils from onelogin.saml2.xmlparser import fromstring @@ -92,7 +91,7 @@ def _parse_saml_date_time(saml_date_time): return parsed_date_time - def validate(self, federation: SAMLFederation, metadata: Union[str, bytes]) -> None: + def validate(self, federation: SAMLFederation, metadata: str | bytes) -> None: """Verify that federated SAML metadata has not expired. :param federation: SAML federation diff --git a/api/saml/metadata/model.py b/api/saml/metadata/model.py index 767d5e6aa..8a7bae5fc 100644 --- a/api/saml/metadata/model.py +++ b/api/saml/metadata/model.py @@ -3,7 +3,8 @@ from enum import Enum from json import JSONDecoder, JSONEncoder from json.decoder import WHITESPACE # type: ignore -from typing import Any, List, Optional, Pattern, Union +from re import Pattern +from typing import Any from onelogin.saml2.constants import OneLogin_Saml2_Constants @@ -786,7 +787,7 @@ def __init__( self, name_format: str, name_qualifier: str, - sp_name_qualifier: Optional[str], + sp_name_qualifier: str | None, name_id: str, ) -> None: """Initializes a new instance of NameID class @@ -850,7 +851,7 @@ def name_qualifier(self) -> str: return self._name_qualifier @property - def sp_name_qualifier(self) -> Optional[str]: + def sp_name_qualifier(self) -> str | None: """Returns the attribute that further qualifies a federated name identifier with the name of the service provider or affiliation of providers which has federated the principal's identity @@ -1045,9 +1046,9 @@ class SAMLSubject: def __init__( self, idp: str, - name_id: Optional[SAMLNameID], - attribute_statement: Optional[SAMLAttributeStatement], - valid_till: Optional[Union[datetime.datetime, datetime.timedelta, int]] = None, + name_id: SAMLNameID | None, + attribute_statement: SAMLAttributeStatement | None, + valid_till: datetime.datetime | datetime.timedelta | int | None = None, ): """Initializes a new instance of Subject class @@ -1060,7 +1061,7 @@ def __init__( - https://wiki.shibboleth.net/confluence/display/IDP30/SessionConfiguration """ self._idp = idp - self._name_id: Optional[SAMLNameID] = name_id + self._name_id: SAMLNameID | None = name_id self._attribute_statement = attribute_statement self._valid_till = valid_till @@ -1112,7 +1113,7 @@ def idp(self) -> str: return self._idp @property - def name_id(self) -> Optional[SAMLNameID]: + def name_id(self) -> SAMLNameID | None: """Return the name ID. :return: Name ID @@ -1120,7 +1121,7 @@ def name_id(self) -> Optional[SAMLNameID]: return self._name_id @name_id.setter - def name_id(self, value: Optional[SAMLNameID]) -> None: + def name_id(self, value: SAMLNameID | None) -> None: """Set the name ID. :param value: New name ID @@ -1290,8 +1291,8 @@ class SAMLSubjectPatronIDExtractor: def __init__( self, use_name_id: bool = True, - attributes: Optional[List[str]] = None, - regular_expression: Optional[Pattern] = None, + attributes: list[str] | None = None, + regular_expression: Pattern | None = None, ): """Initialize a new instance of SAMLSubjectPatronIDExtractor class. diff --git a/api/saml/metadata/parser.py b/api/saml/metadata/parser.py index 946b9950d..d4f14a31d 100644 --- a/api/saml/metadata/parser.py +++ b/api/saml/metadata/parser.py @@ -1,5 +1,4 @@ import logging -from typing import Union from flask_babel import lazy_gettext as _ from lxml.etree import XMLSyntaxError @@ -90,7 +89,7 @@ def __init__(self, skip_incorrect_providers=False): ] = OneLogin_Saml2_Constants.NS_ALG def _convert_xml_string_to_dom( - self, xml_metadata: Union[str, bytes] + self, xml_metadata: str | bytes ) -> RestrictedElement: """Converts an XML string containing SAML metadata into XML DOM diff --git a/api/saml/provider.py b/api/saml/provider.py index 17271dc13..819b4e058 100644 --- a/api/saml/provider.py +++ b/api/saml/provider.py @@ -1,5 +1,3 @@ -from typing import Optional, Type - from flask import url_for from flask_babel import lazy_gettext as _ from werkzeug.datastructures import Authorization @@ -45,7 +43,7 @@ def __init__( integration_id: int, settings: SAMLWebSSOAuthSettings, library_settings: SAMLWebSSOAuthLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): """Initializes a new instance of SAMLAuthenticationProvider class""" super().__init__( @@ -76,14 +74,14 @@ def identifies_individuals(self): return True @classmethod - def settings_class(cls) -> Type[SAMLWebSSOAuthSettings]: + def settings_class(cls) -> type[SAMLWebSSOAuthSettings]: return SAMLWebSSOAuthSettings @classmethod - def library_settings_class(cls) -> Type[SAMLWebSSOAuthLibrarySettings]: + def library_settings_class(cls) -> type[SAMLWebSSOAuthLibrarySettings]: return SAMLWebSSOAuthLibrarySettings - def get_credential_from_header(self, auth: Authorization) -> Optional[str]: + def get_credential_from_header(self, auth: Authorization) -> str | None: # We cannot extract the credential from the header, so we just return None return None diff --git a/api/selftest.py b/api/selftest.py index c3eb5e5be..8a5d4b048 100644 --- a/api/selftest.py +++ b/api/selftest.py @@ -1,7 +1,7 @@ from __future__ import annotations -from abc import ABC -from typing import Generator, Iterable, Optional, Tuple, Union +from abc import ABC, abstractmethod +from collections.abc import Generator, Iterable from sqlalchemy.orm.session import Session @@ -30,7 +30,7 @@ class _NoValidLibrarySelfTestPatron(BaseError): detail (optional) -- additional explanation of the error """ - def __init__(self, message: Optional[str], *, detail: Optional[str] = None): + def __init__(self, message: str | None, *, detail: str | None = None): super().__init__(message=message) self.message = message self.detail = detail @@ -38,7 +38,7 @@ def __init__(self, message: Optional[str], *, detail: Optional[str] = None): @classmethod def default_patrons( cls, collection: Collection - ) -> Iterable[Union[Tuple[Library, Patron, Optional[str]], SelfTestResult]]: + ) -> Iterable[tuple[Library, Patron, str | None] | SelfTestResult]: """Find a usable default Patron for each of the libraries associated with the given Collection. @@ -74,7 +74,7 @@ def default_patrons( @classmethod def _determine_self_test_patron( cls, library: Library, _db=None - ) -> Tuple[Patron, Optional[str]]: + ) -> tuple[Patron, str | None]: """Obtain the test Patron and optional password for a library's self-tests. :param library: The library being tested. @@ -96,8 +96,8 @@ def _determine_self_test_patron( # If we get here, then we have failed to find a valid test patron # and will raise an exception. - message: Optional[str] - detail: Optional[str] + message: str | None + detail: str | None if patron is None: message = "Library has no test patron configured." detail = ( @@ -120,7 +120,9 @@ class HasSelfTests(CoreHasSelfTests, HasPatronSelfTests): """Circulation specific self-tests, with the external integration paradigm""" -class HasCollectionSelfTests(HasSelfTestsIntegrationConfiguration, HasPatronSelfTests): +class HasCollectionSelfTests( + HasSelfTestsIntegrationConfiguration, HasPatronSelfTests, ABC +): """Extra tests to verify the integrity of imported collections of books. @@ -128,7 +130,14 @@ class HasCollectionSelfTests(HasSelfTestsIntegrationConfiguration, HasPatronSelf point to the Collection to be tested. """ + @property + @abstractmethod + def collection(self) -> Collection | None: + ... + def integration(self, _db: Session) -> IntegrationConfiguration | None: + if not self.collection: + return None return self.collection.integration_configuration def _no_delivery_mechanisms_test(self): diff --git a/api/simple_authentication.py b/api/simple_authentication.py index 1cdc5b0b8..675e72027 100644 --- a/api/simple_authentication.py +++ b/api/simple_authentication.py @@ -1,5 +1,3 @@ -from typing import List, Optional, Type, Union - from api.authentication.base import PatronData from api.authentication.basic import ( BasicAuthenticationProvider, @@ -32,7 +30,7 @@ class SimpleAuthSettings(BasicAuthProviderSettings): description="A test password to use when testing the authentication provider.", ), ) - additional_test_identifiers: Optional[List[str]] = FormField( + additional_test_identifiers: list[str] | None = FormField( None, form=ConfigurationFormItem( label="Additional test identifiers", @@ -41,7 +39,7 @@ class SimpleAuthSettings(BasicAuthProviderSettings): type=ConfigurationFormItemType.LIST, ), ) - neighborhood: Optional[str] = FormField( + neighborhood: str | None = FormField( None, form=ConfigurationFormItem( label="Test neighborhood", @@ -72,11 +70,11 @@ def description(cls) -> str: ) @classmethod - def settings_class(cls) -> Type[SimpleAuthSettings]: + def settings_class(cls) -> type[SimpleAuthSettings]: return SimpleAuthSettings @classmethod - def library_settings_class(cls) -> Type[BasicAuthProviderLibrarySettings]: + def library_settings_class(cls) -> type[BasicAuthProviderLibrarySettings]: return BasicAuthProviderLibrarySettings def __init__( @@ -85,7 +83,7 @@ def __init__( integration_id: int, settings: SimpleAuthSettings, library_settings: BasicAuthProviderLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): super().__init__( library_id, integration_id, settings, library_settings, analytics @@ -105,8 +103,8 @@ def __init__( self.test_neighborhood = settings.neighborhood def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: """Fake 'remote' authentication.""" if not username or (self.collects_password and not password): return None @@ -118,7 +116,7 @@ def remote_authenticate( @classmethod def generate_patrondata( - cls, authorization_identifier: str, neighborhood: Optional[str] = None + cls, authorization_identifier: str, neighborhood: str | None = None ) -> PatronData: if authorization_identifier.endswith("_username"): username = authorization_identifier @@ -140,7 +138,7 @@ def generate_patrondata( ) return patrondata - def valid_patron(self, username: str, password: Optional[str]) -> bool: + def valid_patron(self, username: str, password: str | None) -> bool: """Is this patron associated with the given password in the given dictionary? """ @@ -151,8 +149,8 @@ def valid_patron(self, username: str, password: Optional[str]) -> bool: return password_match and username in self.test_identifiers def remote_patron_lookup( - self, patron_or_patrondata: Union[Patron, PatronData] - ) -> Optional[PatronData]: + self, patron_or_patrondata: Patron | PatronData + ) -> PatronData | None: if not patron_or_patrondata: return None if ( diff --git a/api/sip/__init__.py b/api/sip/__init__.py index e42bb96b2..072fb43c8 100644 --- a/api/sip/__init__.py +++ b/api/sip/__init__.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import json +from collections.abc import Callable from datetime import datetime -from typing import Callable, Optional, Type, Union +from typing import Any, Dict, Optional, Type, Union from pydantic import Field, PositiveInt @@ -10,6 +13,7 @@ BasicAuthProviderLibrarySettings, BasicAuthProviderSettings, ) +from api.problem_details import INVALID_CREDENTIALS from api.sip.client import Sip2Encoding, SIPClient from api.sip.dialect import Dialect as Sip2Dialect from core.analytics import Analytics @@ -21,6 +25,7 @@ from core.model import Patron from core.util import MoneyUtility from core.util.http import RemoteIntegrationException +from core.util.problem_detail import ProblemDetail, ProblemError class SIP2Settings(BasicAuthProviderSettings): @@ -40,14 +45,14 @@ class SIP2Settings(BasicAuthProviderSettings): # This is _not_ a patron identifier (SIP field AA); it identifies the SC # creating the SIP session. SIP2 defines SC as "...any library automation # device dealing with patrons or library materials." - username: Optional[str] = FormField( + username: str | None = FormField( None, form=ConfigurationFormItem( label="Login User ID", ), ) # Sip field CO; the password to use when initiating a SIP session, if necessary. - password: Optional[str] = FormField( + password: str | None = FormField( None, form=ConfigurationFormItem( label="Login Password", @@ -58,7 +63,7 @@ class SIP2Settings(BasicAuthProviderSettings): # machine within a library system. Some libraries require a special location # code to be provided when authenticating patrons; others may require the # circulation manager to be treated as its own special 'location'. - location_code: Optional[str] = FormField( + location_code: str | None = FormField( None, form=ConfigurationFormItem( label="Location Code", @@ -125,7 +130,7 @@ class SIP2Settings(BasicAuthProviderSettings): required=True, ), ) - ssl_certificate: Optional[str] = FormField( + ssl_certificate: str | None = FormField( None, form=ConfigurationFormItem( label="SSL Certificate", @@ -139,7 +144,7 @@ class SIP2Settings(BasicAuthProviderSettings): type=ConfigurationFormItemType.TEXTAREA, ), ) - ssl_key: Optional[str] = FormField( + ssl_key: str | None = FormField( None, form=ConfigurationFormItem( label="SSL Key", @@ -181,7 +186,7 @@ class SIP2Settings(BasicAuthProviderSettings): class SIP2LibrarySettings(BasicAuthProviderLibrarySettings): # Used as the SIP2 AO field. - institution_id: Optional[str] = FormField( + institution_id: str | None = FormField( None, form=ConfigurationFormItem( label="Institution ID", @@ -216,8 +221,8 @@ def __init__( integration_id: int, settings: SIP2Settings, library_settings: SIP2LibrarySettings, - analytics: Optional[Analytics] = None, - client: Optional[Callable[..., SIPClient]] = None, + analytics: Analytics | None = None, + client: Callable[..., SIPClient] | None = None, ): """An object capable of communicating with a SIP server.""" super().__init__( @@ -280,14 +285,16 @@ def description(cls) -> str: return "SIP2 Patron Authentication" @classmethod - def settings_class(cls) -> Type[SIP2Settings]: + def settings_class(cls) -> type[SIP2Settings]: return SIP2Settings @classmethod - def library_settings_class(cls) -> Type[SIP2LibrarySettings]: + def library_settings_class(cls) -> type[SIP2LibrarySettings]: return SIP2LibrarySettings - def patron_information(self, username, password): + def patron_information( + self, username: str | None, password: str | None + ) -> dict[str, Any] | ProblemDetail: try: sip = self.client sip.connect() @@ -298,19 +305,23 @@ def patron_information(self, username, password): return info except OSError as e: - raise RemoteIntegrationException(self.server or "unknown server", str(e)) + server_name = self.server or "unknown server" + self.log.warning(f"SIP2 error ({server_name}): {str(e)}", exc_info=e) + return INVALID_CREDENTIALS.detailed( + f"Error contacting authentication server ({server_name}). Please try again later." + ) def remote_patron_lookup( - self, patron_or_patrondata: Union[PatronData, Patron] - ) -> Optional[PatronData]: + self, patron_or_patrondata: PatronData | Patron + ) -> PatronData | None | ProblemDetail: info = self.patron_information( patron_or_patrondata.authorization_identifier, None ) return self.info_to_patrondata(info, False) def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None | ProblemDetail: """Authenticate a patron with the SIP2 server. :param username: The patron's username/barcode/card @@ -371,11 +382,16 @@ def raw_patron_information(): ("Raw test patron information"), raw_patron_information ) - def info_to_patrondata(self, info, validate_password=True) -> Optional[PatronData]: + def info_to_patrondata( + self, info: dict[str, Any] | ProblemDetail, validate_password: bool = True + ) -> PatronData | None | ProblemDetail: """Convert the SIP-specific dictionary obtained from SIPClient.patron_information() to an abstract, authenticator-independent PatronData object. """ + if isinstance(info, ProblemDetail): + return info + if info.get("valid_patron", "N") == "N": # The patron could not be identified as a patron of this # library. Don't return any data. @@ -428,12 +444,12 @@ def info_to_patrondata(self, info, validate_password=True) -> Optional[PatronDat def info_to_patrondata_block_reason( self, info, patrondata: PatronData - ) -> Union[PatronData.NoValue, str]: + ) -> PatronData.NoValue | str: # A True value in most (but not all) subfields of the # patron_status field will prohibit the patron from borrowing # books. status = info["patron_status_parsed"] - block_reason: Union[str, PatronData.NoValue] = PatronData.NO_VALUE + block_reason: str | PatronData.NoValue = PatronData.NO_VALUE for field in self.fields_that_deny_borrowing: if status.get(field) is True: block_reason = self.SPECIFIC_BLOCK_REASONS.get( diff --git a/api/sip/client.py b/api/sip/client.py index d9aed2c7a..97fd533f2 100644 --- a/api/sip/client.py +++ b/api/sip/client.py @@ -32,8 +32,8 @@ import ssl import tempfile import time +from collections.abc import Callable from enum import Enum -from typing import Callable, Optional import certifi @@ -99,9 +99,9 @@ def __init__( internal_name: str, sip_code: str, required=False, - length: Optional[int] = None, + length: int | None = None, allow_multiple=False, - log: Optional[logging.Logger] = None, + log: logging.Logger | None = None, ): self.sip_code = sip_code self.internal_name = internal_name diff --git a/api/sirsidynix_authentication_provider.py b/api/sirsidynix_authentication_provider.py index d096df38e..e424fb5fe 100644 --- a/api/sirsidynix_authentication_provider.py +++ b/api/sirsidynix_authentication_provider.py @@ -2,7 +2,7 @@ import os from gettext import gettext as _ -from typing import TYPE_CHECKING, List, Literal, Optional, Union +from typing import TYPE_CHECKING, Literal from urllib.parse import urljoin from pydantic import HttpUrl @@ -61,7 +61,7 @@ class SirsiDynixHorizonAuthLibrarySettings(BasicAuthProviderLibrarySettings): ), alias="LIBRARY_ID", ) - library_disallowed_suffixes: List[str] = FormField( + library_disallowed_suffixes: list[str] = FormField( [], form=ConfigurationFormItem( label="Disallowed Patron Suffixes", @@ -128,7 +128,7 @@ def __init__( integration_id: int, settings: SirsiDynixHorizonAuthSettings, library_settings: SirsiDynixHorizonAuthLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): super().__init__( library_id, integration_id, settings, library_settings, analytics @@ -289,7 +289,7 @@ def api_patron_login(self, username: str, password: str) -> Literal[False] | dic def api_read_patron_data( self, patron_key: str, session_token: str - ) -> Union[Literal[False], dict]: + ) -> Literal[False] | dict: """API request to pull basic patron information :param patron_key: The permanent external identifier for a patron @@ -307,7 +307,7 @@ def api_read_patron_data( def api_patron_status_info( self, patron_key: str, session_token: str - ) -> Union[Literal[False], dict]: + ) -> Literal[False] | dict: """API request to pull patron status information, like fines :param patron_key: The permanent external identifier for a patron diff --git a/api/util/flask.py b/api/util/flask.py index 88ff868d3..bcf5d0f0d 100644 --- a/api/util/flask.py +++ b/api/util/flask.py @@ -1,7 +1,7 @@ from flask import Flask from sqlalchemy.orm import Session -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager class PalaceFlask(Flask): diff --git a/api/util/profilers.py b/api/util/profilers.py index 6337f1f1a..de975c69b 100644 --- a/api/util/profilers.py +++ b/api/util/profilers.py @@ -2,7 +2,6 @@ import os import time from pathlib import Path -from typing import Optional from flask import Flask, g, request @@ -16,7 +15,7 @@ def enabled(cls) -> bool: return os.environ.get(cls.ENVIRONMENT_VARIABLE, None) is not None @classmethod - def create_profile_dir(cls) -> Optional[Path]: + def create_profile_dir(cls) -> Path | None: if not cls.enabled(): return None diff --git a/api/util/xray.py b/api/util/xray.py index 20d6f2cd1..8bb728df7 100644 --- a/api/util/xray.py +++ b/api/util/xray.py @@ -1,5 +1,4 @@ import os -from typing import Optional from aws_xray_sdk.core import AWSXRayRecorder from aws_xray_sdk.core import patch as xray_patch @@ -17,7 +16,7 @@ class PalaceXrayMiddleware(XRayMiddleware): XRAY_ENV_PATRON_BARCODE = "PALACE_XRAY_INCLUDE_BARCODE" @classmethod - def put_annotations(cls, segment: Segment, seg_type: Optional[str] = None): + def put_annotations(cls, segment: Segment, seg_type: str | None = None): if seg_type is not None: segment.put_annotation("type", seg_type) diff --git a/bin/marc_record_coverage b/bin/marc_record_coverage deleted file mode 100755 index 45a6c7992..000000000 --- a/bin/marc_record_coverage +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python -"""Make sure all presentation-ready works have up-to-date MARC records.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) -from core.coverage import MARCRecordWorkCoverageProvider -from core.scripts import RunWorkCoverageProviderScript - -RunWorkCoverageProviderScript(MARCRecordWorkCoverageProvider).run() diff --git a/bin/opds2_import_monitor b/bin/opds2_import_monitor index 3223ba6cd..57fe72cb5 100755 --- a/bin/opds2_import_monitor +++ b/bin/opds2_import_monitor @@ -7,17 +7,21 @@ bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) -from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory from core.model import ExternalIntegration -from core.opds2_import import OPDS2Importer, OPDS2ImportMonitor, RWPMManifestParser +from core.opds2_import import ( + OPDS2Importer, + OPDS2ImportMonitor, + PalaceOPDS2FeedParserFactory, + RWPMManifestParser, +) from core.scripts import OPDSImportScript import_script = OPDSImportScript( importer_class=OPDS2Importer, monitor_class=OPDS2ImportMonitor, protocol=ExternalIntegration.OPDS2_IMPORT, - parser=RWPMManifestParser(OPDS2FeedParserFactory()), + parser=RWPMManifestParser(PalaceOPDS2FeedParserFactory()), ) import_script.run() diff --git a/core/analytics.py b/core/analytics.py index ad9fd2008..0af636075 100644 --- a/core/analytics.py +++ b/core/analytics.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from api.opensearch_analytics_provider import OpenSearchAnalyticsProvider # Finland from api.s3_analytics_provider import S3AnalyticsProvider @@ -18,10 +18,10 @@ class Analytics(LoggerMixin): def __init__( self, s3_analytics_enabled: bool = False, - s3_service: Optional[S3Service] = None, + s3_service: S3Service | None = None, opensearch_analytics_enabled: bool = False, - opensearch_analytics_url=Optional[str], - opensearch_analytics_index_prefix=Optional[str], + opensearch_analytics_url: str | None = None, + opensearch_analytics_index_prefix: str | None = None, ) -> None: self.providers = [LocalAnalyticsProvider()] diff --git a/core/config.py b/core/config.py index e728877b5..abed6d390 100644 --- a/core/config.py +++ b/core/config.py @@ -1,7 +1,6 @@ import json import logging import os -from typing import Dict, List from flask_babel import lazy_gettext as _ from sqlalchemy.engine.url import make_url @@ -233,7 +232,7 @@ def basic_token_auth_is_enabled(cls) -> bool: ) from e @classmethod - def fcm_credentials(cls) -> Dict[str, str]: + def fcm_credentials(cls) -> dict[str, str]: """Returns a dictionary containing Firebase Cloud Messaging credentials. Credentials are provided as a JSON string, either (1) directly in an environment @@ -276,7 +275,7 @@ def fcm_credentials(cls) -> Dict[str, str]: ) @classmethod - def overdrive_fulfillment_keys(cls, testing=False) -> Dict[str, str]: + def overdrive_fulfillment_keys(cls, testing=False) -> dict[str, str]: prefix = ( cls.OD_PREFIX_TESTING_PREFIX if testing else cls.OD_PREFIX_PRODUCTION_PREFIX ) @@ -289,7 +288,7 @@ def overdrive_fulfillment_keys(cls, testing=False) -> Dict[str, str]: return {"key": key, "secret": secret} @classmethod - def quicksight_authorized_arns(cls) -> Dict[str, List[str]]: + def quicksight_authorized_arns(cls) -> dict[str, list[str]]: """Split the comma separated arns""" arns_str = os.environ.get(cls.QUICKSIGHT_AUTHORIZED_ARNS_KEY, "") return json.loads(arns_str) diff --git a/core/configuration/library.py b/core/configuration/library.py index 410417885..eddf736b9 100644 --- a/core/configuration/library.py +++ b/core/configuration/library.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import IntEnum -from typing import Any, Dict, List, Optional, Tuple +from typing import Any import wcag_contrast_ratio from pydantic import ( @@ -55,13 +55,13 @@ class Level(IntEnum): class LibraryConfFormItem(ConfigurationFormItem): category: str = "Basic Information" level: Level = Level.ALL_ACCESS - read_only: Optional[bool] = None - skip: Optional[bool] = None - paired: Optional[str] = None + read_only: bool | None = None + skip: bool | None = None + paired: str | None = None def to_dict( self, db: Session, key: str, required: bool = False, default: Any = None - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """Serialize additional form items specific to library settings.""" weight, item = super().to_dict(db, key, required, default) item["category"] = self.category @@ -144,7 +144,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_ONLY, ), ) - enabled_entry_points: List[str] = FormField( + enabled_entry_points: list[str] = FormField( [x.INTERNAL_NAME for x in EntryPoint.DEFAULT_ENABLED], form=LibraryConfFormItem( label="Enabled entry points", @@ -179,7 +179,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - facets_enabled_order: List[str] = FormField( + facets_enabled_order: list[str] = FormField( FacetConstants.DEFAULT_ENABLED_FACETS[FacetConstants.ORDER_FACET_GROUP_NAME], form=LibraryConfFormItem( label="Allow patrons to sort by", @@ -206,7 +206,7 @@ class LibrarySettings(BaseSettings): skip=True, ), ) - facets_enabled_available: List[str] = FormField( + facets_enabled_available: list[str] = FormField( FacetConstants.DEFAULT_ENABLED_FACETS[ FacetConstants.AVAILABILITY_FACET_GROUP_NAME ], @@ -235,7 +235,7 @@ class LibrarySettings(BaseSettings): skip=True, ), ) - facets_enabled_collection: List[str] = FormField( + facets_enabled_collection: list[str] = FormField( FacetConstants.DEFAULT_ENABLED_FACETS[ FacetConstants.COLLECTION_FACET_GROUP_NAME ], @@ -264,7 +264,7 @@ class LibrarySettings(BaseSettings): skip=True, ), ) - library_description: Optional[str] = FormField( + library_description: str | None = FormField( None, form=LibraryConfFormItem( label="A short description of this library", @@ -273,7 +273,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_ONLY, ), ) - help_email: Optional[EmailStr] = FormField( + help_email: EmailStr | None = FormField( None, form=LibraryConfFormItem( label="Patron support email address", @@ -284,7 +284,7 @@ class LibrarySettings(BaseSettings): ), alias="help-email", ) - help_web: Optional[HttpUrl] = FormField( + help_web: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Patron support website", @@ -295,7 +295,7 @@ class LibrarySettings(BaseSettings): ), alias="help-web", ) - copyright_designated_agent_email_address: Optional[EmailStr] = FormField( + copyright_designated_agent_email_address: EmailStr | None = FormField( None, form=LibraryConfFormItem( label="Copyright designated agent email", @@ -307,7 +307,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_OR_MANAGER, ), ) - configuration_contact_email_address: Optional[EmailStr] = FormField( + configuration_contact_email_address: EmailStr | None = FormField( None, form=LibraryConfFormItem( label="A point of contact for the organization responsible for configuring this library", @@ -388,7 +388,7 @@ class LibrarySettings(BaseSettings): ), alias="web-secondary-color", ) - web_css_file: Optional[HttpUrl] = FormField( + web_css_file: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Custom CSS file for web", @@ -398,7 +398,7 @@ class LibrarySettings(BaseSettings): ), alias="web-css-file", ) - web_header_links: List[str] = FormField( + web_header_links: list[str] = FormField( [], form=LibraryConfFormItem( label="Web header links", @@ -410,7 +410,7 @@ class LibrarySettings(BaseSettings): ), alias="web-header-links", ) - web_header_labels: List[str] = FormField( + web_header_labels: list[str] = FormField( [], form=LibraryConfFormItem( label="Web header labels", @@ -421,7 +421,7 @@ class LibrarySettings(BaseSettings): ), alias="web-header-labels", ) - hidden_content_types: List[str] = FormField( + hidden_content_types: list[str] = FormField( [], form=LibraryConfFormItem( label="Hidden content types", @@ -433,7 +433,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_ONLY, ), ) - max_outstanding_fines: Optional[PositiveFloat] = FormField( + max_outstanding_fines: PositiveFloat | None = FormField( None, form=LibraryConfFormItem( label="Maximum amount in fines a patron can have before losing lending privileges", @@ -441,7 +441,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - loan_limit: Optional[PositiveInt] = FormField( + loan_limit: PositiveInt | None = FormField( None, form=LibraryConfFormItem( label="Maximum number of books a patron can have on loan at once", @@ -452,7 +452,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - hold_limit: Optional[PositiveInt] = FormField( + hold_limit: PositiveInt | None = FormField( None, form=LibraryConfFormItem( label="Maximum number of books a patron can have on hold at once", @@ -463,7 +463,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - terms_of_service: Optional[HttpUrl] = FormField( + terms_of_service: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Terms of service URL", @@ -472,7 +472,7 @@ class LibrarySettings(BaseSettings): ), alias="terms-of-service", ) - privacy_policy: Optional[HttpUrl] = FormField( + privacy_policy: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Privacy policy URL", @@ -481,7 +481,7 @@ class LibrarySettings(BaseSettings): ), alias="privacy-policy", ) - copyright: Optional[HttpUrl] = FormField( + copyright: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Copyright URL", @@ -489,7 +489,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_OR_MANAGER, ), ) - about: Optional[HttpUrl] = FormField( + about: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="About URL", @@ -497,7 +497,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - license: Optional[HttpUrl] = FormField( + license: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="License URL", @@ -505,7 +505,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_OR_MANAGER, ), ) - registration_url: Optional[HttpUrl] = FormField( + registration_url: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Patron registration URL", @@ -515,7 +515,7 @@ class LibrarySettings(BaseSettings): ), alias="register", ) - patron_password_reset: Optional[HttpUrl] = FormField( + patron_password_reset: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Password Reset Link", @@ -525,7 +525,7 @@ class LibrarySettings(BaseSettings): ), alias="http://librarysimplified.org/terms/rel/patron-password-reset", ) - large_collection_languages: Optional[List[str]] = FormField( + large_collection_languages: list[str] | None = FormField( None, form=LibraryConfFormItem( label="The primary languages represented in this library's collection", @@ -539,7 +539,7 @@ class LibrarySettings(BaseSettings): ), alias="large_collections", ) - small_collection_languages: Optional[List[str]] = FormField( + small_collection_languages: list[str] | None = FormField( None, form=LibraryConfFormItem( label="Other major languages represented in this library's collection", @@ -553,7 +553,7 @@ class LibrarySettings(BaseSettings): ), alias="small_collections", ) - tiny_collection_languages: Optional[List[str]] = FormField( + tiny_collection_languages: list[str] | None = FormField( None, form=LibraryConfFormItem( label="Other languages in this library's collection", @@ -570,8 +570,8 @@ class LibrarySettings(BaseSettings): @root_validator def validate_require_help_email_or_website( - cls, values: Dict[str, Any] - ) -> Dict[str, Any]: + cls, values: dict[str, Any] + ) -> dict[str, Any]: if not values.get("help_email") and not values.get("help_web"): help_email_label = cls.get_form_field_label("help_email") help_website_label = cls.get_form_field_label("help_web") @@ -584,7 +584,7 @@ def validate_require_help_email_or_website( return values @root_validator - def validate_header_links(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def validate_header_links(cls, values: dict[str, Any]) -> dict[str, Any]: """Verify that header links and labels are the same length.""" header_links = values.get("web_header_links") header_labels = values.get("web_header_labels") @@ -604,7 +604,7 @@ def validate_web_color_contrast(cls, value: str, field: ModelField) -> str: white test, as well as text color on white backgrounds. """ - def hex_to_rgb(hex: str) -> Tuple[float, ...]: + def hex_to_rgb(hex: str) -> tuple[float, ...]: hex = hex.lstrip("#") return tuple(int(hex[i : i + 2], 16) / 255.0 for i in (0, 2, 4)) @@ -634,8 +634,8 @@ def hex_to_rgb(hex: str) -> Tuple[float, ...]: "tiny_collection_languages", ) def validate_language_codes( - cls, value: Optional[List[str]], field: ModelField - ) -> Optional[List[str]]: + cls, value: list[str] | None, field: ModelField + ) -> list[str] | None: """Verify that collection languages are valid.""" if value is not None: languages = [] diff --git a/core/coverage.py b/core/coverage.py index 577dacfe6..556d993c3 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -1,6 +1,5 @@ import logging import traceback -from typing import Optional, Union from sqlalchemy.orm import Load from sqlalchemy.orm.session import Session @@ -151,13 +150,13 @@ class BaseCoverageProvider: # In your subclass, set this to the name of the service, # e.g. "Overdrive Bibliographic Coverage Provider". - SERVICE_NAME: Optional[str] = None + SERVICE_NAME: str | None = None # In your subclass, you _may_ set this to a string that distinguishes # two different CoverageProviders from the same data source. # (You may also override the operation method, if you need # database access to determine which operation to use.) - OPERATION: Optional[str] = None + OPERATION: str | None = None # The database session will be committed each time the # BaseCoverageProvider has (attempted to) provide coverage to this @@ -621,7 +620,7 @@ class IdentifierCoverageProvider(BaseCoverageProvider): # Setting this to None will attempt to give coverage to every single # Identifier in the system, which is probably not what you want. NO_SPECIFIED_TYPES = object() - INPUT_IDENTIFIER_TYPES: Union[None, str, object] = NO_SPECIFIED_TYPES + INPUT_IDENTIFIER_TYPES: None | str | object = NO_SPECIFIED_TYPES # Set this to False if a given Identifier needs to be run through # this CoverageProvider once for every Collection that has this @@ -1107,14 +1106,14 @@ class CollectionCoverageProvider(IdentifierCoverageProvider): # By default, this type of CoverageProvider will provide coverage to # all Identifiers in the given Collection, regardless of their type. - INPUT_IDENTIFIER_TYPES: Union[None, str, object] = None + INPUT_IDENTIFIER_TYPES: None | str | object = None DEFAULT_BATCH_SIZE = 10 # Set this to the name of the protocol managed by this type of # CoverageProvider. If this CoverageProvider can manage collections # for any protocol, leave this as None. - PROTOCOL: Optional[str] = None + PROTOCOL: str | None = None # By default, Works calculated by a CollectionCoverageProvider update # the ExternalSearchIndex. Set this value to True for applications that @@ -1528,20 +1527,6 @@ class WorkPresentationProvider(PresentationReadyWorkCoverageProvider): DEFAULT_BATCH_SIZE = 100 -class MARCRecordWorkCoverageProvider(WorkPresentationProvider): - """Make sure all presentation-ready works have an up-to-date MARC - record. - """ - - SERVICE_NAME = "MARC Record Work Coverage Provider" - OPERATION = WorkCoverageRecord.GENERATE_MARC_OPERATION - DEFAULT_BATCH_SIZE = 1000 - - def process_item(self, work): - work.calculate_marc_record() - return work - - class WorkPresentationEditionCoverageProvider(WorkPresentationProvider): """Make sure each Work has an up-to-date presentation edition. diff --git a/core/equivalents_coverage.py b/core/equivalents_coverage.py index 0f290a1fb..0a9d71de3 100644 --- a/core/equivalents_coverage.py +++ b/core/equivalents_coverage.py @@ -1,5 +1,3 @@ -from typing import List, Optional, Set - from sqlalchemy import and_, delete, select from sqlalchemy.orm import Query, joinedload @@ -26,7 +24,7 @@ def __init__( self, _db, batch_size=None, cutoff_time=None, registered_only=False, **kwargs ): # Set of identifiers covered this run of the provider - self._already_covered_identifiers: Set[int] = set() + self._already_covered_identifiers: set[int] = set() super().__init__(_db, batch_size, cutoff_time, registered_only) def run(self): @@ -52,8 +50,8 @@ def items_that_need_coverage(self, identifiers=None, **kwargs) -> Query: return qu def _identifiers_for_coverage( - self, records: List[EquivalencyCoverageRecord] - ) -> Set[Optional[int]]: + self, records: list[EquivalencyCoverageRecord] + ) -> set[int | None]: """Get all identifiers this coverage run should recompute This involves inputs and outputs, and also any parent_identifier that has a direct relation with these identifiers @@ -61,9 +59,9 @@ def _identifiers_for_coverage( equivs = [r.equivalency for r in records] # process both inputs and outputs - identifier_ids_list: List[Optional[int]] = [eq.input_id for eq in equivs] + identifier_ids_list: list[int | None] = [eq.input_id for eq in equivs] identifier_ids_list.extend([eq.output_id for eq in equivs]) - identifier_ids: Set[Optional[int]] = set(identifier_ids_list) + identifier_ids: set[int | None] = set(identifier_ids_list) # Any identifier found, should be recalculated # However we must recalculate any other chain these identifiers were part of also @@ -77,8 +75,8 @@ def _identifiers_for_coverage( return identifier_ids def process_batch( - self, batch: List[EquivalencyCoverageRecord] - ) -> List[EquivalencyCoverageRecord]: + self, batch: list[EquivalencyCoverageRecord] + ) -> list[EquivalencyCoverageRecord]: """Query for and store the chain of equivalent identifiers batch sizes are not exact since we pull the related identifiers into the current batch too, so they would start out larger than intended diff --git a/core/exceptions.py b/core/exceptions.py index 8d7485d7e..340fe42cb 100644 --- a/core/exceptions.py +++ b/core/exceptions.py @@ -1,11 +1,8 @@ -from typing import Optional - - class BaseError(Exception): """Base class for all errors""" def __init__( - self, message: Optional[str] = None, inner_exception: Optional[Exception] = None + self, message: str | None = None, inner_exception: Exception | None = None ): """Initializes a new instance of BaseError class @@ -23,7 +20,7 @@ def __hash__(self): return hash(str(self)) @property - def inner_exception(self) -> Optional[str]: + def inner_exception(self) -> str | None: """Returns an inner exception :return: Inner exception diff --git a/core/external_search.py b/core/external_search.py index 493bf3ecd..c3fe8af23 100644 --- a/core/external_search.py +++ b/core/external_search.py @@ -7,7 +7,8 @@ import re import time from collections import defaultdict -from typing import Any, Callable, Dict, Iterable, List, Optional, Union +from collections.abc import Callable, Iterable +from typing import Any from attr import define from flask_babel import lazy_gettext as _ @@ -128,7 +129,7 @@ class ExternalSearchIndex(HasSelfTests): SITEWIDE = True @classmethod - def search_integration(cls, _db) -> Optional[ExternalIntegration]: + def search_integration(cls, _db) -> ExternalIntegration | None: """Look up the ExternalIntegration for Opensearch.""" return ExternalIntegration.lookup( _db, ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL @@ -154,11 +155,11 @@ def load(cls, _db, *args, **kwargs): def __init__( self, _db, - url: Optional[str] = None, - test_search_term: Optional[str] = None, - revision_directory: Optional[SearchRevisionDirectory] = None, - version: Optional[int] = None, - custom_client_service: Optional[SearchService] = None, + url: str | None = None, + test_search_term: str | None = None, + revision_directory: SearchRevisionDirectory | None = None, + version: int | None = None, + custom_client_service: SearchService | None = None, ): """Constructor @@ -226,7 +227,7 @@ def search_service(self) -> SearchService: """Get the underlying search service.""" return self._search_service - def start_migration(self) -> Optional[SearchMigrationInProgress]: + def start_migration(self) -> SearchMigrationInProgress | None: """Update to the latest schema, indexing the given works.""" migrator = SearchMigrator( revisions=self._revision_directory, @@ -1279,7 +1280,7 @@ class Operators(Values): _BOOL_TYPE = {"type": "bool"} # The fields mappings in the search DB - FIELD_MAPPING: Dict[str, Dict] = { + FIELD_MAPPING: dict[str, dict] = { "audience": dict(), "author": _KEYWORD_ONLY, "classifications.scheme": _KEYWORD_ONLY, @@ -1388,7 +1389,7 @@ def language(value: str) -> str: "language": ValueTransforms.language, } - def __init__(self, query: Union[str, Dict], filter=None): + def __init__(self, query: str | dict, filter=None): if type(query) is str: try: query = json.loads(query) @@ -1411,10 +1412,10 @@ def search_query(self): def _is_keyword(self, name: str) -> bool: return self.FIELD_MAPPING[name].get("keyword") == True - def _nested_path(self, name: str) -> Union[str, None]: + def _nested_path(self, name: str) -> str | None: return self.FIELD_MAPPING[name].get("path") - def _parse_json_query(self, query: Dict): + def _parse_json_query(self, query: dict): """Eventually recursive json query parser""" es_query = None @@ -1438,7 +1439,7 @@ def _parse_json_query(self, query: Dict): return es_query - def _parse_json_leaf(self, query: Dict) -> Dict: + def _parse_json_leaf(self, query: dict) -> dict: """We have a leaf query, which means this becomes a keyword.term query""" op = query.get(self.QueryLeaf.OP, self.Operators.EQ) @@ -1514,7 +1515,7 @@ def _match_or_term_query(): return es_query - def _parse_json_join(self, query: Dict) -> Dict: + def _parse_json_join(self, query: dict) -> dict: if len(query.keys()) != 1: raise QueryParseException( detail="A conjuction cannot have multiple parts in the same sub-query" @@ -2765,9 +2766,9 @@ def __init__(self, *args, **kwargs): # we're already at the latest version, then simply upload search documents instead. # self.receiver = None - self.migration: Optional[ + self.migration: None | ( SearchMigrationInProgress - ] = self.search_index_client.start_migration() + ) = self.search_index_client.start_migration() if self.migration is None: self.receiver: SearchDocumentReceiver = ( self.search_index_client.start_updating_search_documents() @@ -2788,19 +2789,19 @@ def run_once_and_update_timestamp(self): self.on_completely_finished() return result - def process_batch(self, works) -> List[Work | CoverageFailure]: + def process_batch(self, works) -> list[Work | CoverageFailure]: target: SearchDocumentReceiverType = self.migration or self.receiver failures = target.add_documents( documents=self.search_index_client.create_search_documents_from_works(works) ) # Maintain a dictionary of works so that we can efficiently remove failed works later. - work_map: Dict[int, Work] = {} + work_map: dict[int, Work] = {} for work in works: work_map[work.id] = work # Remove all the works that failed and create failure records for them. - results: List[Work | CoverageFailure] = [] + results: list[Work | CoverageFailure] = [] for failure in failures: work = work_map[failure.id] del work_map[failure.id] diff --git a/core/feed/acquisition.py b/core/feed/acquisition.py index 5fed809b4..476b9566c 100644 --- a/core/feed/acquisition.py +++ b/core/feed/acquisition.py @@ -2,17 +2,8 @@ from __future__ import annotations import logging -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - List, - Optional, - Tuple, - Type, -) +from collections.abc import Callable, Generator +from typing import TYPE_CHECKING, Any from sqlalchemy.orm import Query, Session @@ -55,11 +46,11 @@ def __init__( self, title: str, url: str, - works: List[Work], + works: list[Work], annotator: CirculationManagerAnnotator, - facets: Optional[FacetsWithEntryPoint] = None, - pagination: Optional[Pagination] = None, - precomposed_entries: Optional[List[OPDSMessage]] = None, + facets: FacetsWithEntryPoint | None = None, + pagination: Pagination | None = None, + precomposed_entries: list[OPDSMessage] | None = None, ) -> None: self.annotator = annotator self._facets = facets @@ -80,7 +71,7 @@ def generate_feed(self, annotate: bool = True) -> None: if annotate: self.annotator.annotate_feed(self._feed) - def add_pagination_links(self, works: List[Work], lane: WorkList) -> None: + def add_pagination_links(self, works: list[Work], lane: WorkList) -> None: """Add pagination links to the feed""" if not self._pagination: return None @@ -118,7 +109,7 @@ def add_facet_links(self, lane: WorkList) -> None: if entrypoints: # A paginated feed may have multiple entry points into the # same dataset. - def make_link(ep: Type[EntryPoint]) -> str: + def make_link(ep: type[EntryPoint]) -> str: return self.annotator.feed_url( lane, facets=facets.navigate(entrypoint=ep) ) @@ -196,7 +187,7 @@ def as_error_response(self, **kwargs: Any) -> OPDSFeedResponse: def _create_entry( cls, work: Work, - active_licensepool: Optional[LicensePool], + active_licensepool: LicensePool | None, edition: Edition, identifier: Identifier, annotator: Annotator, @@ -215,9 +206,9 @@ def _create_entry( def add_entrypoint_links( cls, feed: FeedData, - url_generator: Callable[[Type[EntryPoint]], str], - entrypoints: List[Type[EntryPoint]], - selected_entrypoint: Optional[Type[EntryPoint]], + url_generator: Callable[[type[EntryPoint]], str], + entrypoints: list[type[EntryPoint]], + selected_entrypoint: type[EntryPoint] | None, group_name: str = "Formats", ) -> None: """Add links to a feed forming an OPDS facet group for a set of @@ -248,12 +239,12 @@ def add_entrypoint_links( @classmethod def _entrypoint_link( cls, - url_generator: Callable[[Type[EntryPoint]], str], - entrypoint: Type[EntryPoint], - selected_entrypoint: Optional[Type[EntryPoint]], + url_generator: Callable[[type[EntryPoint]], str], + entrypoint: type[EntryPoint], + selected_entrypoint: type[EntryPoint] | None, is_default: bool, group_name: str, - ) -> Optional[Link]: + ) -> Link | None: """Create arguments for add_link_to_feed for a link that navigates between EntryPoints. """ @@ -276,7 +267,7 @@ def _entrypoint_link( return link def add_breadcrumb_links( - self, lane: WorkList, entrypoint: Optional[Type[EntryPoint]] = None + self, lane: WorkList, entrypoint: type[EntryPoint] | None = None ) -> None: """Add information necessary to find your current place in the site's navigation. @@ -319,7 +310,7 @@ def add_breadcrumbs( self, lane: WorkList, include_lane: bool = False, - entrypoint: Optional[Type[EntryPoint]] = None, + entrypoint: type[EntryPoint] | None = None, ) -> None: """Add list of ancestor links in a breadcrumbs element. @@ -405,7 +396,7 @@ def add_breadcrumbs( # Append the breadcrumbs to the feed. self._feed.breadcrumbs = breadcrumbs - def show_current_entrypoint(self, entrypoint: Optional[Type[EntryPoint]]) -> None: + def show_current_entrypoint(self, entrypoint: type[EntryPoint] | None) -> None: """Annotate this given feed with a simplified:entryPoint attribute pointing to the current entrypoint's TYPE_URI. @@ -442,9 +433,9 @@ def page( url: str, worklist: WorkList, annotator: CirculationManagerAnnotator, - facets: Optional[FacetsWithEntryPoint], - pagination: Optional[Pagination], - search_engine: Optional[ExternalSearchIndex], + facets: FacetsWithEntryPoint | None, + pagination: Pagination | None, + search_engine: ExternalSearchIndex | None, ) -> OPDSAcquisitionFeed: works = worklist.works( _db, facets=facets, pagination=pagination, search_engine=search_engine @@ -470,9 +461,9 @@ def page( @classmethod def active_loans_for( cls, - circulation: Optional[CirculationAPI], + circulation: CirculationAPI | None, patron: Patron, - annotator: Optional[LibraryAnnotator] = None, + annotator: LibraryAnnotator | None = None, **response_kwargs: Any, ) -> OPDSAcquisitionFeed: """A patron specific feed that only contains the loans and holds of a patron""" @@ -484,7 +475,7 @@ def active_loans_for( active_loans_by_work[work] = loan # There might be multiple holds for the same work so we gather all of them and choose the best one. - all_holds_by_work: Dict[Work, List[Hold]] = {} + all_holds_by_work: dict[Work, list[Hold]] = {} for hold in patron.holds: work = hold.work if not work: @@ -495,7 +486,7 @@ def active_loans_for( all_holds_by_work[work].append(hold) - active_holds_by_work: Dict[Work, Hold] = {} + active_holds_by_work: dict[Work, Hold] = {} for work, list_of_holds in all_holds_by_work.items(): active_holds_by_work[ work @@ -599,7 +590,7 @@ def single_entry( work: Work | Edition | None, annotator: Annotator, even_if_no_license_pool: bool = False, - ) -> Optional[WorkEntry | OPDSMessage]: + ) -> WorkEntry | OPDSMessage | None: """Turn a work into an annotated work entry for an acquisition feed.""" identifier = None _work: Work @@ -669,9 +660,9 @@ def groups( url: str, worklist: WorkList, annotator: LibraryAnnotator, - pagination: Optional[Pagination] = None, - facets: Optional[FacetsWithEntryPoint] = None, - search_engine: Optional[ExternalSearchIndex] = None, + pagination: Pagination | None = None, + facets: FacetsWithEntryPoint | None = None, + search_engine: ExternalSearchIndex | None = None, search_debug: bool = False, ) -> OPDSAcquisitionFeed: """Internal method called by groups() when a grouped feed @@ -737,7 +728,7 @@ def groups( entrypoints = facets.selectable_entrypoints(worklist) if entrypoints: - def make_link(ep: Type[EntryPoint]) -> str: + def make_link(ep: type[EntryPoint]) -> str: return annotator.groups_url( worklist, facets=facets.navigate(entrypoint=ep) ) @@ -761,8 +752,8 @@ def search( search_engine: ExternalSearchIndex, query: str, annotator: LibraryAnnotator, - pagination: Optional[Pagination] = None, - facets: Optional[FacetsWithEntryPoint] = None, + pagination: Pagination | None = None, + facets: FacetsWithEntryPoint | None = None, **response_kwargs: Any, ) -> OPDSAcquisitionFeed | ProblemDetail: """Run a search against the given search engine and return @@ -803,7 +794,7 @@ def search( entrypoints = facets.selectable_entrypoints(lane) if entrypoints: - def make_link(ep: Type[EntryPoint]) -> str: + def make_link(ep: type[EntryPoint]) -> str: return annotator.search_url( lane, query, pagination=None, facets=facets.navigate(entrypoint=ep) ) @@ -893,11 +884,11 @@ class LookupAcquisitionFeed(OPDSAcquisitionFeed): """ @classmethod - def single_entry(cls, work: Tuple[Identifier, Work], annotator: Annotator) -> WorkEntry | OPDSMessage: # type: ignore[override] + def single_entry(cls, work: tuple[Identifier, Work], annotator: Annotator) -> WorkEntry | OPDSMessage: # type: ignore[override] # This comes in as a tuple, which deviates from the typical behaviour identifier, _work = work - active_licensepool: Optional[LicensePool] + active_licensepool: LicensePool | None if identifier.licensed_through: active_licensepool = identifier.licensed_through[0] else: diff --git a/core/feed/admin.py b/core/feed/admin.py index a4536fa18..28730d2c4 100644 --- a/core/feed/admin.py +++ b/core/feed/admin.py @@ -1,5 +1,3 @@ -from typing import Optional - from sqlalchemy import and_ from sqlalchemy.orm import Session from typing_extensions import Self @@ -18,7 +16,7 @@ def suppressed( title: str, url: str, annotator: AdminAnnotator, - pagination: Optional[Pagination] = None, + pagination: Pagination | None = None, ) -> Self: _pagination = pagination or Pagination.default() diff --git a/core/feed/annotator/admin.py b/core/feed/annotator/admin.py index 27da25067..5f017cf01 100644 --- a/core/feed/annotator/admin.py +++ b/core/feed/annotator/admin.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Optional from api.circulation import CirculationAPI from core.feed.annotator.circulation import LibraryAnnotator @@ -11,11 +10,11 @@ class AdminAnnotator(LibraryAnnotator): - def __init__(self, circulation: Optional[CirculationAPI], library: Library) -> None: + def __init__(self, circulation: CirculationAPI | None, library: Library) -> None: super().__init__(circulation, None, library) def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime] = None + self, entry: WorkEntry, updated: datetime | None = None ) -> None: super().annotate_work_entry(entry) if not entry.computed: diff --git a/core/feed/annotator/base.py b/core/feed/annotator/base.py index 8c7577e8a..ee8de57f0 100644 --- a/core/feed/annotator/base.py +++ b/core/feed/annotator/base.py @@ -4,7 +4,7 @@ import logging from collections import defaultdict from decimal import Decimal -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any from urllib.parse import quote from sqlalchemy.orm import Session, joinedload @@ -32,7 +32,7 @@ class ToFeedEntry: @classmethod - def authors(cls, edition: Edition) -> Dict[str, List[Author]]: + def authors(cls, edition: Edition) -> dict[str, list[Author]]: """Create one or more author (and contributor) objects for the given Work. @@ -41,8 +41,8 @@ def authors(cls, edition: Edition) -> Dict[str, List[Author]]: Contributions. :return: A dict with "authors" and "contributors" as a list of Author objects """ - authors: Dict[str, List[Author]] = {"authors": [], "contributors": []} - state: Dict[Optional[str], Set[str]] = defaultdict(set) + authors: dict[str, list[Author]] = {"authors": [], "contributors": []} + state: dict[str | None, set[str]] = defaultdict(set) for contribution in edition.contributions: info = cls.contributor(contribution, state) if info is None: @@ -63,8 +63,8 @@ def authors(cls, edition: Edition) -> Dict[str, List[Author]]: @classmethod def contributor( - cls, contribution: Contribution, state: Dict[Optional[str], Set[str]] - ) -> Optional[Tuple[str, Author]]: + cls, contribution: Contribution, state: dict[str | None, set[str]] + ) -> tuple[str, Author] | None: """Build an author (or contributor) object for a Contribution. :param contribution: A Contribution. @@ -101,7 +101,7 @@ def contributor( return None # Okay, we're creating a tag. - properties: Dict[str, Any] = dict() + properties: dict[str, Any] = dict() if marc_role: properties["role"] = marc_role entry = Author(name=name, **properties) @@ -113,8 +113,8 @@ def contributor( @classmethod def series( - cls, series_name: Optional[str], series_position: Optional[int] | Optional[str] - ) -> Optional[FeedEntryType]: + cls, series_name: str | None, series_position: int | None | str | None + ) -> FeedEntryType | None: """Generate a FeedEntryType object for the given name and position.""" if not series_name: return None @@ -126,7 +126,7 @@ def series( return series @classmethod - def rating(cls, type_uri: Optional[str], value: float | Decimal) -> FeedEntryType: + def rating(cls, type_uri: str | None, value: float | Decimal) -> FeedEntryType: """Generate a FeedEntryType object for the given type and value.""" entry = FeedEntryType.create( **dict(ratingValue="%.4f" % value, additionalType=type_uri) @@ -134,7 +134,7 @@ def rating(cls, type_uri: Optional[str], value: float | Decimal) -> FeedEntryTyp return entry @classmethod - def samples(cls, edition: Optional[Edition]) -> list[Hyperlink]: + def samples(cls, edition: Edition | None) -> list[Hyperlink]: if not edition: return [] _db = Session.object_session(edition) @@ -150,7 +150,7 @@ def samples(cls, edition: Optional[Edition]) -> list[Hyperlink]: return links @classmethod - def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: + def categories(cls, work: Work) -> dict[str, list[dict[str, str]]]: """Return all relevant classifications of this work. :return: A dictionary mapping 'scheme' URLs to dictionaries of @@ -187,7 +187,7 @@ def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: # Add the appeals as a category of schema # http://librarysimplified.org/terms/appeal schema_url = AtomFeed.SIMPLIFIED_NS + "appeals/" - appeals: List[Dict[str, Any]] = [] + appeals: list[dict[str, Any]] = [] categories[schema_url] = appeals for name, value in ( (Work.CHARACTER_APPEAL, work.appeal_character), @@ -196,7 +196,7 @@ def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: (Work.STORY_APPEAL, work.appeal_story), ): if value: - appeal: Dict[str, Any] = dict(term=schema_url + name, label=name) + appeal: dict[str, Any] = dict(term=schema_url + name, label=name) weight_field = "ratingValue" appeal[weight_field] = value appeals.append(appeal) @@ -222,7 +222,7 @@ def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: return categories @classmethod - def content(cls, work: Optional[Work]) -> str: + def content(cls, work: Work | None) -> str: """Return an HTML summary of this work.""" summary = "" if work: @@ -243,7 +243,7 @@ def content(cls, work: Optional[Work]) -> str: class Annotator(ToFeedEntry): def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime.datetime] = None + self, entry: WorkEntry, updated: datetime.datetime | None = None ) -> None: """ Any data that the serializer must consider while generating an "entry" diff --git a/core/feed/annotator/circulation.py b/core/feed/annotator/circulation.py index e488b01c4..1e870d322 100644 --- a/core/feed/annotator/circulation.py +++ b/core/feed/annotator/circulation.py @@ -7,7 +7,7 @@ import urllib.parse import urllib.request from collections import defaultdict -from typing import Any, Dict, List, Optional, Tuple +from typing import Any from dependency_injector.wiring import Provide, inject from flask import url_for @@ -61,10 +61,10 @@ class AcquisitionHelper: @classmethod def license_tags( cls, - license_pool: Optional[LicensePool], - loan: Optional[Loan], - hold: Optional[Hold], - ) -> Optional[Dict[str, Any]]: + license_pool: LicensePool | None, + loan: Loan | None, + hold: Hold | None, + ) -> dict[str, Any] | None: acquisition = {} # Generate a list of licensing tags. These should be inserted # into a tag. @@ -154,7 +154,7 @@ def license_tags( return acquisition @classmethod - def format_types(cls, delivery_mechanism: DeliveryMechanism) -> List[str]: + def format_types(cls, delivery_mechanism: DeliveryMechanism) -> list[str]: """Generate a set of types suitable for passing into acquisition_link(). """ @@ -184,11 +184,11 @@ class CirculationManagerAnnotator(Annotator): @inject def __init__( self, - lane: Optional[WorkList], - active_loans_by_work: Optional[Dict[Work, Loan]] = None, - active_holds_by_work: Optional[Dict[Work, Hold]] = None, - active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, - hidden_content_types: Optional[List[str]] = None, + lane: WorkList | None, + active_loans_by_work: dict[Work, Loan] | None = None, + active_holds_by_work: dict[Work, Hold] | None = None, + active_fulfillments_by_work: dict[Work, Any] | None = None, + hidden_content_types: list[str] | None = None, analytics: Analytics = Provide[Services.analytics.analytics], ) -> None: if lane: @@ -224,7 +224,7 @@ def is_work_entry_solo(self, work: Work) -> bool: ) ) - def _lane_identifier(self, lane: Optional[WorkList]) -> Optional[int]: + def _lane_identifier(self, lane: WorkList | None) -> int | None: if isinstance(lane, Lane): return lane.id return None @@ -246,11 +246,11 @@ def facet_url(self, facets: Facets) -> str: def feed_url( self, - lane: Optional[WorkList], - facets: Optional[FacetsWithEntryPoint] = None, - pagination: Optional[Pagination] = None, + lane: WorkList | None, + facets: FacetsWithEntryPoint | None = None, + pagination: Pagination | None = None, default_route: str = "feed", - extra_kwargs: Optional[Dict[str, Any]] = None, + extra_kwargs: dict[str, Any] | None = None, ) -> str: if isinstance(lane, WorkList) and hasattr(lane, "url_arguments"): route, kwargs = lane.url_arguments @@ -275,8 +275,8 @@ def navigation_url(self, lane: Lane) -> str: ) def active_licensepool_for( - self, work: Work, library: Optional[Library] = None - ) -> Optional[LicensePool]: + self, work: Work, library: Library | None = None + ) -> LicensePool | None: loan = self.active_loans_by_work.get(work) or self.active_holds_by_work.get( work ) @@ -304,7 +304,7 @@ def _prioritized_formats_for_pool( config.settings_dict.get(FormatPriorities.PRIORITIZED_DRM_SCHEMES_KEY) or [] ) - content_setting: List[str] = ( + content_setting: list[str] = ( config.settings_dict.get(FormatPriorities.PRIORITIZED_CONTENT_TYPES_KEY) or [] ) @@ -351,7 +351,7 @@ def visible_delivery_mechanisms( def annotate_work_entry( self, entry: WorkEntry, - updated: Optional[datetime.datetime] = None, + updated: datetime.datetime | None = None, ) -> None: work = entry.work identifier = entry.identifier or work.presentation_edition.primary_identifier @@ -394,19 +394,18 @@ def annotate_work_entry( def acquisition_links( self, - active_license_pool: Optional[LicensePool], - active_loan: Optional[Loan], - active_hold: Optional[Hold], - active_fulfillment: Optional[Any], + active_license_pool: LicensePool | None, + active_loan: Loan | None, + active_hold: Hold | None, + active_fulfillment: Any | None, identifier: Identifier, can_hold: bool = True, can_revoke_hold: bool = True, set_mechanism_at_borrow: bool = False, - direct_fulfillment_delivery_mechanisms: Optional[ - List[LicensePoolDeliveryMechanism] - ] = None, + direct_fulfillment_delivery_mechanisms: None + | (list[LicensePoolDeliveryMechanism]) = None, add_open_access_links: bool = True, - ) -> List[Acquisition]: + ) -> list[Acquisition]: """Generate a number of tags that enumerate all acquisition methods. @@ -488,7 +487,7 @@ def acquisition_links( link.add_attributes(license_tags) # Add links for fulfilling an active loan. - fulfill_links: List[Optional[Acquisition]] = [] + fulfill_links: list[Acquisition | None] = [] if can_fulfill: if active_fulfillment: # We're making an entry for a specific fulfill link. @@ -532,7 +531,7 @@ def acquisition_links( ) ) - open_access_links: List[Optional[Acquisition]] = [] + open_access_links: list[Acquisition | None] = [] if ( active_license_pool is not None and direct_fulfillment_delivery_mechanisms is not None @@ -580,33 +579,33 @@ def acquisition_links( def revoke_link( self, active_license_pool: LicensePool, - active_loan: Optional[Loan], - active_hold: Optional[Hold], - ) -> Optional[Acquisition]: + active_loan: Loan | None, + active_hold: Hold | None, + ) -> Acquisition | None: return None def borrow_link( self, active_license_pool: LicensePool, - borrow_mechanism: Optional[LicensePoolDeliveryMechanism], - fulfillment_mechanisms: List[LicensePoolDeliveryMechanism], - active_hold: Optional[Hold] = None, - ) -> Optional[Acquisition]: + borrow_mechanism: LicensePoolDeliveryMechanism | None, + fulfillment_mechanisms: list[LicensePoolDeliveryMechanism], + active_hold: Hold | None = None, + ) -> Acquisition | None: return None def fulfill_link( self, license_pool: LicensePool, - active_loan: Optional[Loan], + active_loan: Loan | None, delivery_mechanism: DeliveryMechanism, rel: str = OPDSFeed.ACQUISITION_REL, - ) -> Optional[Acquisition]: + ) -> Acquisition | None: return None def open_access_link( self, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism ) -> Acquisition: - kw: Dict[str, Any] = dict(rel=OPDSFeed.OPEN_ACCESS_REL, type="") + kw: dict[str, Any] = dict(rel=OPDSFeed.OPEN_ACCESS_REL, type="") # Start off assuming that the URL associated with the # LicensePoolDeliveryMechanism's Resource is the URL we should @@ -626,8 +625,8 @@ def open_access_link( return link def rights_attributes( - self, lpdm: Optional[LicensePoolDeliveryMechanism] - ) -> Dict[str, str]: + self, lpdm: LicensePoolDeliveryMechanism | None + ) -> dict[str, str]: """Create a dictionary of tag attributes that explain the rights status of a LicensePoolDeliveryMechanism. @@ -643,8 +642,8 @@ def acquisition_link( cls, rel: str, href: str, - types: Optional[List[str]], - active_loan: Optional[Loan] = None, + types: list[str] | None, + active_loan: Loan | None = None, ) -> Acquisition: if types: initial_type = types[0] @@ -666,10 +665,10 @@ def acquisition_link( @classmethod def indirect_acquisition( - cls, indirect_types: List[str] - ) -> Optional[IndirectAcquisition]: - top_level_parent: Optional[IndirectAcquisition] = None - parent: Optional[IndirectAcquisition] = None + cls, indirect_types: list[str] + ) -> IndirectAcquisition | None: + top_level_parent: IndirectAcquisition | None = None + parent: IndirectAcquisition | None = None for t in indirect_types: indirect_link = IndirectAcquisition(type=t) if parent is not None: @@ -683,17 +682,17 @@ def indirect_acquisition( class LibraryAnnotator(CirculationManagerAnnotator): def __init__( self, - circulation: Optional[CirculationAPI], - lane: Optional[WorkList], + circulation: CirculationAPI | None, + lane: WorkList | None, library: Library, - patron: Optional[Patron] = None, - active_loans_by_work: Optional[Dict[Work, Loan]] = None, - active_holds_by_work: Optional[Dict[Work, Hold]] = None, - active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, + patron: Patron | None = None, + active_loans_by_work: dict[Work, Loan] | None = None, + active_holds_by_work: dict[Work, Hold] | None = None, + active_fulfillments_by_work: dict[Work, Any] | None = None, facet_view: str = "feed", top_level_title: str = "All Books", library_identifies_patrons: bool = True, - facets: Optional[FacetsWithEntryPoint] = None, + facets: FacetsWithEntryPoint | None = None, ) -> None: """Constructor. @@ -719,9 +718,9 @@ def __init__( self.circulation = circulation self.library: Library = library self.patron = patron - self.lanes_by_work: Dict[Work, List[Any]] = defaultdict(list) + self.lanes_by_work: dict[Work, list[Any]] = defaultdict(list) self.facet_view = facet_view - self._adobe_id_cache: Dict[str, Any] = {} + self._adobe_id_cache: dict[str, Any] = {} self._top_level_title = top_level_title self.identifies_patrons = library_identifies_patrons self.facets = facets or None @@ -729,7 +728,7 @@ def __init__( def top_level_title(self) -> str: return self._top_level_title - def permalink_for(self, identifier: Identifier) -> Tuple[str, str]: + def permalink_for(self, identifier: Identifier) -> tuple[str, str]: # TODO: Do not force OPDS types url = self.url_for( "permalink", @@ -741,7 +740,7 @@ def permalink_for(self, identifier: Identifier) -> Tuple[str, str]: return url, OPDSFeed.ENTRY_TYPE def groups_url( - self, lane: Optional[WorkList], facets: Optional[FacetsWithEntryPoint] = None + self, lane: WorkList | None, facets: FacetsWithEntryPoint | None = None ) -> str: lane_identifier = self._lane_identifier(lane) if facets: @@ -757,14 +756,14 @@ def groups_url( **kwargs, ) - def default_lane_url(self, facets: Optional[FacetsWithEntryPoint] = None) -> str: + def default_lane_url(self, facets: FacetsWithEntryPoint | None = None) -> str: return self.groups_url(None, facets=facets) def feed_url( # type: ignore [override] self, - lane: Optional[WorkList], - facets: Optional[FacetsWithEntryPoint] = None, - pagination: Optional[Pagination] = None, + lane: WorkList | None, + facets: FacetsWithEntryPoint | None = None, + pagination: Pagination | None = None, default_route: str = "feed", ) -> str: extra_kwargs = dict() @@ -774,10 +773,10 @@ def feed_url( # type: ignore [override] def search_url( self, - lane: Optional[WorkList], + lane: WorkList | None, query: str, - pagination: Optional[Pagination], - facets: Optional[FacetsWithEntryPoint] = None, + pagination: Pagination | None, + facets: FacetsWithEntryPoint | None = None, ) -> str: lane_identifier = self._lane_identifier(lane) kwargs = dict(q=query) @@ -794,8 +793,8 @@ def search_url( ) def group_uri( - self, work: Work, license_pool: Optional[LicensePool], identifier: Identifier - ) -> Tuple[Optional[str], str]: + self, work: Work, license_pool: LicensePool | None, identifier: Identifier + ) -> tuple[str | None, str]: if not work in self.lanes_by_work: return None, "" @@ -834,7 +833,7 @@ def group_uri( return self.lane_url(lane, self.facets), title def lane_url( - self, lane: Optional[WorkList], facets: Optional[FacetsWithEntryPoint] = None + self, lane: WorkList | None, facets: FacetsWithEntryPoint | None = None ) -> str: # If the lane has sublanes, the URL identifying the group will # take the user to another set of groups for the @@ -853,7 +852,7 @@ def lane_url( return url def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime.datetime] = None + self, entry: WorkEntry, updated: datetime.datetime | None = None ) -> None: super().annotate_work_entry(entry, updated=updated) @@ -965,7 +964,7 @@ def related_books_available(cls, work: Work, library: Library) -> bool: def language_and_audience_key_from_work( self, work: Work - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[str | None, str | None]: language_key = work.language audiences = None @@ -1150,7 +1149,7 @@ def annotate_feed(self, feed: FeedData) -> None: def add_configuration_links(self, feed: FeedData) -> None: _db = Session.object_session(self.library) - def _add_link(l: Dict[str, Any]) -> None: + def _add_link(l: dict[str, Any]) -> None: feed.add_link(**l) library = self.library @@ -1219,16 +1218,15 @@ def _add_link(l: Dict[str, Any]) -> None: def acquisition_links( # type: ignore [override] self, - active_license_pool: Optional[LicensePool], - active_loan: Optional[Loan], - active_hold: Optional[Hold], - active_fulfillment: Optional[Any], + active_license_pool: LicensePool | None, + active_loan: Loan | None, + active_hold: Hold | None, + active_fulfillment: Any | None, identifier: Identifier, - direct_fulfillment_delivery_mechanisms: Optional[ - List[LicensePoolDeliveryMechanism] - ] = None, - mock_api: Optional[Any] = None, - ) -> List[Acquisition]: + direct_fulfillment_delivery_mechanisms: None + | (list[LicensePoolDeliveryMechanism]) = None, + mock_api: Any | None = None, + ) -> list[Acquisition]: """Generate one or more tags that can be used to borrow, reserve, or fulfill a book, depending on the state of the book and the current patron. @@ -1305,9 +1303,9 @@ def acquisition_links( # type: ignore [override] def revoke_link( self, active_license_pool: LicensePool, - active_loan: Optional[Loan], - active_hold: Optional[Hold], - ) -> Optional[Acquisition]: + active_loan: Loan | None, + active_hold: Hold | None, + ) -> Acquisition | None: if not self.identifies_patrons: return None url = self.url_for( @@ -1316,17 +1314,17 @@ def revoke_link( library_short_name=self.library.short_name, _external=True, ) - kw: Dict[str, Any] = dict(href=url, rel=OPDSFeed.REVOKE_LOAN_REL) + kw: dict[str, Any] = dict(href=url, rel=OPDSFeed.REVOKE_LOAN_REL) revoke_link_tag = Acquisition(**kw) return revoke_link_tag def borrow_link( self, active_license_pool: LicensePool, - borrow_mechanism: Optional[LicensePoolDeliveryMechanism], - fulfillment_mechanisms: List[LicensePoolDeliveryMechanism], - active_hold: Optional[Hold] = None, - ) -> Optional[Acquisition]: + borrow_mechanism: LicensePoolDeliveryMechanism | None, + fulfillment_mechanisms: list[LicensePoolDeliveryMechanism], + active_hold: Hold | None = None, + ) -> Acquisition | None: if not self.identifies_patrons: return None identifier = active_license_pool.identifier @@ -1354,7 +1352,7 @@ def borrow_link( is_hold=True if active_hold else False, ) - indirect_acquisitions: List[IndirectAcquisition] = [] + indirect_acquisitions: list[IndirectAcquisition] = [] for lpdm in fulfillment_mechanisms: # We have information about one or more delivery # mechanisms that will be available at the point of @@ -1384,10 +1382,10 @@ def borrow_link( def fulfill_link( self, license_pool: LicensePool, - active_loan: Optional[Loan], + active_loan: Loan | None, delivery_mechanism: DeliveryMechanism, rel: str = OPDSFeed.ACQUISITION_REL, - ) -> Optional[Acquisition]: + ) -> Acquisition | None: """Create a new fulfillment link. This link may include tags from the OPDS Extensions for DRM. @@ -1442,9 +1440,9 @@ def open_access_link( def drm_extension_tags( self, license_pool: LicensePool, - active_loan: Optional[Loan], - delivery_mechanism: Optional[DeliveryMechanism], - ) -> Dict[str, Any]: + active_loan: Loan | None, + delivery_mechanism: DeliveryMechanism | None, + ) -> dict[str, Any]: """Construct OPDS Extensions for DRM tags that explain how to register a device with the DRM server that manages this loan. :param delivery_mechanism: A DeliveryMechanism @@ -1471,7 +1469,7 @@ def drm_extension_tags( def adobe_id_tags( self, patron_identifier: str | Patron - ) -> Dict[str, FeedEntryType]: + ) -> dict[str, FeedEntryType]: """Construct tags using the DRM Extensions for OPDS standard that explain how to get an Adobe ID for this patron, and how to manage their list of device IDs. @@ -1515,7 +1513,7 @@ def adobe_id_tags( cached = copy.deepcopy(cached) return cached - def lcp_key_retrieval_tags(self, active_loan: Loan) -> Dict[str, FeedEntryType]: + def lcp_key_retrieval_tags(self, active_loan: Loan) -> dict[str, FeedEntryType]: # In the case of LCP we have to include a patron's hashed passphrase # inside the acquisition link so client applications can use it to open the LCP license # without having to ask the user to enter their password diff --git a/core/feed/annotator/loan_and_hold.py b/core/feed/annotator/loan_and_hold.py index c395c5961..837b0a526 100644 --- a/core/feed/annotator/loan_and_hold.py +++ b/core/feed/annotator/loan_and_hold.py @@ -1,17 +1,16 @@ import copy from datetime import datetime -from typing import Any, Dict, List, Optional +from typing import Any from core.feed.annotator.circulation import LibraryAnnotator from core.feed.types import FeedData, Link, WorkEntry -from core.model.configuration import ExternalIntegration from core.model.constants import EditionConstants, LinkRelations from core.model.patron import Hold, Patron class LibraryLoanAndHoldAnnotator(LibraryAnnotator): @staticmethod - def choose_best_hold_for_work(list_of_holds: List[Hold]) -> Hold: + def choose_best_hold_for_work(list_of_holds: list[Hold]) -> Hold: # We don't want holds that are connected to license pools without any licenses owned. Also, we want hold that # would result in the least wait time for the patron. @@ -45,7 +44,7 @@ def choose_best_hold_for_work(list_of_holds: List[Hold]) -> Hold: return best - def drm_device_registration_feed_tags(self, patron: Patron) -> Dict[str, Any]: + def drm_device_registration_feed_tags(self, patron: Patron) -> dict[str, Any]: """Return tags that provide information on DRM device deregistration independent of any particular loan. These tags will go under the tag. @@ -90,7 +89,7 @@ def annotate_feed(self, feed: FeedData) -> None: feed.metadata.drm_licensor = tags["drm_licensor"] def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime] = None + self, entry: WorkEntry, updated: datetime | None = None ) -> None: super().annotate_work_entry(entry, updated=updated) if not entry.computed: @@ -104,8 +103,7 @@ def annotate_work_entry( if ( edition.medium == EditionConstants.AUDIO_MEDIUM and active_license_pool - and active_license_pool.collection.protocol - == ExternalIntegration.OPDS_FOR_DISTRIBUTORS + and active_license_pool.should_track_playtime is True and work in self.active_loans_by_work ): entry.computed.other_links.append( diff --git a/core/feed/annotator/verbose.py b/core/feed/annotator/verbose.py index eabcb870a..45be52f71 100644 --- a/core/feed/annotator/verbose.py +++ b/core/feed/annotator/verbose.py @@ -1,6 +1,5 @@ from collections import defaultdict from datetime import datetime -from typing import Dict, List, Optional from sqlalchemy.orm import Session @@ -23,7 +22,7 @@ class VerboseAnnotator(Annotator): """ def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime] = None + self, entry: WorkEntry, updated: datetime | None = None ) -> None: super().annotate_work_entry(entry, updated=updated) self.add_ratings(entry) @@ -42,8 +41,8 @@ def add_ratings(cls, entry: WorkEntry) -> None: @classmethod def categories( - cls, work: Work, policy: Optional[PresentationCalculationPolicy] = None - ) -> Dict[str, List[Dict[str, str]]]: + cls, work: Work, policy: PresentationCalculationPolicy | None = None + ) -> dict[str, list[dict[str, str]]]: """Send out _all_ categories for the work. (So long as the category type has a URI associated with it in @@ -85,7 +84,7 @@ def categories( return by_scheme @classmethod - def authors(cls, edition: Edition) -> Dict[str, List[Author]]: + def authors(cls, edition: Edition) -> dict[str, list[Author]]: """Create a detailed tag for each author.""" return { "authors": [ diff --git a/core/feed/navigation.py b/core/feed/navigation.py index ae2111a9e..0b1da8865 100644 --- a/core/feed/navigation.py +++ b/core/feed/navigation.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Optional +from typing import Any from sqlalchemy.orm import Session from typing_extensions import Self @@ -23,8 +23,8 @@ def __init__( url: str, lane: WorkList, annotator: CirculationManagerAnnotator, - facets: Optional[Facets] = None, - pagination: Optional[Pagination] = None, + facets: Facets | None = None, + pagination: Pagination | None = None, ) -> None: self.lane = lane self.annotator = annotator @@ -40,7 +40,7 @@ def navigation( url: str, worklist: WorkList, annotator: CirculationManagerAnnotator, - facets: Optional[Facets] = None, + facets: Facets | None = None, ) -> Self: """The navigation feed with links to a given lane's sublanes.""" @@ -83,7 +83,7 @@ def add_entry( def as_response( self, - mime_types: Optional[MIMEAccept] = None, + mime_types: MIMEAccept | None = None, **kwargs: Any, ) -> OPDSFeedResponse: response = super().as_response(mime_types=mime_types, **kwargs) diff --git a/core/feed/opds.py b/core/feed/opds.py index b33e1a544..5daf0c931 100644 --- a/core/feed/opds.py +++ b/core/feed/opds.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, Dict, List, Optional, Type +from typing import Any from werkzeug.datastructures import MIMEAccept @@ -16,10 +16,10 @@ def get_serializer( - mime_types: Optional[MIMEAccept], + mime_types: MIMEAccept | None, ) -> SerializerInterface[Any]: # Ordering matters for poor matches (eg. */*), so we will keep OPDS1 first - serializers: Dict[str, Type[SerializerInterface[Any]]] = { + serializers: dict[str, type[SerializerInterface[Any]]] = { "application/atom+xml": OPDS1Serializer, "application/opds+json": OPDS2Serializer, } @@ -37,7 +37,7 @@ def __init__( self, title: str, url: str, - precomposed_entries: Optional[List[OPDSMessage]] = None, + precomposed_entries: list[OPDSMessage] | None = None, ) -> None: self.url = url self.title = title @@ -45,12 +45,12 @@ def __init__( self._feed = FeedData() self.log = logging.getLogger(self.__class__.__name__) - def add_link(self, href: str, rel: Optional[str] = None, **kwargs: Any) -> None: + def add_link(self, href: str, rel: str | None = None, **kwargs: Any) -> None: self._feed.add_link(href, rel=rel, **kwargs) def as_response( self, - mime_types: Optional[MIMEAccept] = None, + mime_types: MIMEAccept | None = None, **kwargs: Any, ) -> OPDSFeedResponse: """Serialize the feed using the serializer protocol""" @@ -67,7 +67,7 @@ def as_response( def entry_as_response( cls, entry: WorkEntry | OPDSMessage, - mime_types: Optional[MIMEAccept] = None, + mime_types: MIMEAccept | None = None, **response_kwargs: Any, ) -> OPDSEntryResponse: serializer = get_serializer(mime_types) diff --git a/core/feed/serializer/base.py b/core/feed/serializer/base.py index 5f0734578..7043a5c64 100644 --- a/core/feed/serializer/base.py +++ b/core/feed/serializer/base.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Generic, List, Optional, TypeVar +from typing import Generic, TypeVar from core.feed.types import FeedData, WorkEntryData from core.util.opds_writer import OPDSMessage @@ -15,7 +15,7 @@ def to_string(cls, data: T) -> str: @abstractmethod def serialize_feed( - self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None + self, feed: FeedData, precomposed_entries: list[OPDSMessage] | None = None ) -> str: ... diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index aa4c08c48..76d78c377 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -2,7 +2,7 @@ import datetime from functools import partial -from typing import Any, Dict, List, Optional, cast +from typing import Any, cast from lxml import etree @@ -59,21 +59,19 @@ def __init__(self) -> None: pass def _tag( - self, tag_name: str, *args: Any, mapping: Optional[Dict[str, str]] = None + self, tag_name: str, *args: Any, mapping: dict[str, str] | None = None ) -> etree._Element: if not mapping: mapping = TAG_MAPPING return self.E(mapping.get(tag_name, tag_name), *args) - def _attr_name( - self, attr_name: str, mapping: Optional[Dict[str, str]] = None - ) -> str: + def _attr_name(self, attr_name: str, mapping: dict[str, str] | None = None) -> str: if not mapping: mapping = ATTRIBUTE_MAPPING return mapping.get(attr_name, attr_name) def serialize_feed( - self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None + self, feed: FeedData, precomposed_entries: list[OPDSMessage] | None = None ) -> str: # First we do metadata serialized = self.E.feed() @@ -114,7 +112,7 @@ def serialize_feed( etree.indent(serialized) return self.to_string(serialized) - def _serialize_feed_metadata(self, metadata: FeedMetadata) -> List[etree._Element]: + def _serialize_feed_metadata(self, metadata: FeedMetadata) -> list[etree._Element]: tags = [] # Compulsory title tags.append(self._tag("title", metadata.title or "")) @@ -257,14 +255,14 @@ def serialize_work_entry(self, feed_entry: WorkEntryData) -> etree._Element: entry.append(self._serialize_author_tag("contributor", contributor)) for link in feed_entry.image_links: - entry.append(OPDSFeed.link(**link.dict())) + entry.append(OPDSFeed.link(**link.asdict())) for link in feed_entry.acquisition_links: element = self._serialize_acquistion_link(link) entry.append(element) for link in feed_entry.other_links: - entry.append(OPDSFeed.link(**link.dict())) + entry.append(OPDSFeed.link(**link.asdict())) return entry diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py index 91fe915cb..e63d3ec7e 100644 --- a/core/feed/serializer/opds2.py +++ b/core/feed/serializer/opds2.py @@ -1,6 +1,6 @@ import json from collections import defaultdict -from typing import Any, Dict, List, Optional +from typing import Any from core.feed.serializer.base import SerializerInterface from core.feed.types import ( @@ -32,14 +32,14 @@ } -class OPDS2Serializer(SerializerInterface[Dict[str, Any]]): +class OPDS2Serializer(SerializerInterface[dict[str, Any]]): def __init__(self) -> None: pass def serialize_feed( - self, feed: FeedData, precomposed_entries: Optional[List[Any]] = None + self, feed: FeedData, precomposed_entries: list[Any] | None = None ) -> str: - serialized: Dict[str, Any] = {"publications": []} + serialized: dict[str, Any] = {"publications": []} serialized["metadata"] = self._serialize_metadata(feed) for entry in feed.entries: @@ -51,20 +51,20 @@ def serialize_feed( return self.to_string(serialized) - def _serialize_metadata(self, feed: FeedData) -> Dict[str, Any]: + def _serialize_metadata(self, feed: FeedData) -> dict[str, Any]: fmeta = feed.metadata - metadata: Dict[str, Any] = {} + metadata: dict[str, Any] = {} if fmeta.title: metadata["title"] = fmeta.title if fmeta.items_per_page is not None: metadata["itemsPerPage"] = fmeta.items_per_page return metadata - def serialize_opds_message(self, entry: OPDSMessage) -> Dict[str, Any]: + def serialize_opds_message(self, entry: OPDSMessage) -> dict[str, Any]: return dict(urn=entry.urn, description=entry.message) - def serialize_work_entry(self, data: WorkEntryData) -> Dict[str, Any]: - metadata: Dict[str, Any] = {} + def serialize_work_entry(self, data: WorkEntryData) -> dict[str, Any]: + metadata: dict[str, Any] = {} if data.additionalType: metadata["@type"] = data.additionalType @@ -126,7 +126,7 @@ def serialize_work_entry(self, data: WorkEntryData) -> Dict[str, Any]: publication = {"metadata": metadata, "links": links, "images": images} return publication - def _serialize_link(self, link: Link) -> Dict[str, Any]: + def _serialize_link(self, link: Link) -> dict[str, Any]: serialized = {"href": link.href, "rel": link.rel} if link.type: serialized["type"] = link.type @@ -134,18 +134,18 @@ def _serialize_link(self, link: Link) -> Dict[str, Any]: serialized["title"] = link.title return serialized - def _serialize_acquisition_link(self, link: Acquisition) -> Dict[str, Any]: + def _serialize_acquisition_link(self, link: Acquisition) -> dict[str, Any]: item = self._serialize_link(link) - def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: - result: Dict[str, Any] = dict(type=indirect.type) + def _indirect(indirect: IndirectAcquisition) -> dict[str, Any]: + result: dict[str, Any] = dict(type=indirect.type) if indirect.children: result["child"] = [] for child in indirect.children: result["child"].append(_indirect(child)) return result - props: Dict[str, Any] = {} + props: dict[str, Any] = {} if link.availability_status: state = link.availability_status if link.is_loan: @@ -171,7 +171,9 @@ def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: if link.drm_licensor: props["licensor"] = { - "clientToken": getattr(link.drm_licensor, "clientToken"), + "clientToken": getattr( + getattr(link.drm_licensor, "clientToken"), "text" + ), "vendor": getattr(link.drm_licensor, "vendor"), } @@ -180,12 +182,12 @@ def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: return item - def _serialize_feed_links(self, feed: FeedData) -> Dict[str, Any]: - link_data: Dict[str, List[Dict[str, Any]]] = {"links": [], "facets": []} + def _serialize_feed_links(self, feed: FeedData) -> dict[str, Any]: + link_data: dict[str, list[dict[str, Any]]] = {"links": [], "facets": []} for link in feed.links: link_data["links"].append(self._serialize_link(link)) - facet_links: Dict[str, Any] = defaultdict(lambda: {"metadata": {}, "links": []}) + facet_links: dict[str, Any] = defaultdict(lambda: {"metadata": {}, "links": []}) for link in feed.facet_links: group = getattr(link, "facetGroup", None) if group: @@ -196,8 +198,8 @@ def _serialize_feed_links(self, feed: FeedData) -> Dict[str, Any]: return link_data - def _serialize_contributor(self, author: Author) -> Dict[str, Any]: - result: Dict[str, Any] = {"name": author.name} + def _serialize_contributor(self, author: Author) -> dict[str, Any]: + result: dict[str, Any] = {"name": author.name} if author.sort_name: result["sortAs"] = author.sort_name if author.link: @@ -211,5 +213,5 @@ def content_type(self) -> str: return "application/opds+json" @classmethod - def to_string(cls, data: Dict[str, Any]) -> str: + def to_string(cls, data: dict[str, Any]) -> str: return json.dumps(data, indent=2) diff --git a/core/feed/types.py b/core/feed/types.py index cdf5207bd..18cbaaaec 100644 --- a/core/feed/types.py +++ b/core/feed/types.py @@ -1,8 +1,9 @@ from __future__ import annotations +from collections.abc import Generator from dataclasses import dataclass, field from datetime import date, datetime -from typing import Any, Dict, Generator, List, Optional, Tuple, cast +from typing import Any, cast from typing_extensions import Self @@ -15,7 +16,7 @@ @dataclass class BaseModel: - def _vars(self) -> Generator[Tuple[str, Any], None, None]: + def _vars(self) -> Generator[tuple[str, Any], None, None]: """Yield attributes as a tuple""" _attrs = vars(self) for name, value in _attrs.items(): @@ -25,17 +26,17 @@ def _vars(self) -> Generator[Tuple[str, Any], None, None]: continue yield name, value - def dict(self) -> Dict[str, Any]: + def asdict(self) -> dict[str, Any]: """Dataclasses do not return undefined attributes via `asdict` so we must implement this ourselves""" attrs = {} for name, value in self: if isinstance(value, BaseModel): - attrs[name] = value.dict() + attrs[name] = value.asdict() else: attrs[name] = value return attrs - def __iter__(self) -> Generator[Tuple[str, Any], None, None]: + def __iter__(self) -> Generator[tuple[str, Any], None, None]: """Allow attribute iteration""" yield from self._vars() @@ -52,7 +53,7 @@ def get(self, name: str, *default: Any) -> Any: @dataclass class FeedEntryType(BaseModel): - text: Optional[str] = None + text: str | None = None @classmethod def create(cls, **kwargs: Any) -> Self: @@ -61,11 +62,11 @@ def create(cls, **kwargs: Any) -> Self: obj.add_attributes(kwargs) return obj - def add_attributes(self, attrs: Dict[str, Any]) -> None: + def add_attributes(self, attrs: dict[str, Any]) -> None: for name, data in attrs.items(): setattr(self, name, data) - def children(self) -> Generator[Tuple[str, FeedEntryType], None, None]: + def children(self) -> Generator[tuple[str, FeedEntryType], None, None]: """Yield all FeedEntryType attributes""" for name, value in self: if isinstance(value, self.__class__): @@ -75,24 +76,24 @@ def children(self) -> Generator[Tuple[str, FeedEntryType], None, None]: @dataclass class Link(FeedEntryType): - href: Optional[str] = None - rel: Optional[str] = None - type: Optional[str] = None + href: str | None = None + rel: str | None = None + type: str | None = None # Additional types - role: Optional[str] = None - title: Optional[str] = None + role: str | None = None + title: str | None = None - def dict(self) -> Dict[str, Any]: + def asdict(self) -> dict[str, Any]: """A dict without None values""" - d = super().dict() + d = super().asdict() santized = {} for k, v in d.items(): if v is not None: santized[k] = v return santized - def link_attribs(self) -> Dict[str, Any]: + def link_attribs(self) -> dict[str, Any]: d = dict(href=self.href) for key in ["rel", "type"]: if (value := getattr(self, key, None)) is not None: @@ -102,28 +103,28 @@ def link_attribs(self) -> Dict[str, Any]: @dataclass class IndirectAcquisition(BaseModel): - type: Optional[str] = None - children: List[IndirectAcquisition] = field(default_factory=list) + type: str | None = None + children: list[IndirectAcquisition] = field(default_factory=list) @dataclass class Acquisition(Link): - holds_position: Optional[str] = None - holds_total: Optional[str] = None + holds_position: str | None = None + holds_total: str | None = None - copies_available: Optional[str] = None - copies_total: Optional[str] = None + copies_available: str | None = None + copies_total: str | None = None - availability_status: Optional[str] = None - availability_since: Optional[str] = None - availability_until: Optional[str] = None + availability_status: str | None = None + availability_since: str | None = None + availability_until: str | None = None - rights: Optional[str] = None + rights: str | None = None - lcp_hashed_passphrase: Optional[FeedEntryType] = None - drm_licensor: Optional[FeedEntryType] = None + lcp_hashed_passphrase: FeedEntryType | None = None + drm_licensor: FeedEntryType | None = None - indirect_acquisitions: List[IndirectAcquisition] = field(default_factory=list) + indirect_acquisitions: list[IndirectAcquisition] = field(default_factory=list) # Signal if the acquisition is for a loan or a hold for the patron is_loan: bool = False @@ -132,47 +133,47 @@ class Acquisition(Link): @dataclass class Author(FeedEntryType): - name: Optional[str] = None - sort_name: Optional[str] = None - viaf: Optional[str] = None - role: Optional[str] = None - family_name: Optional[str] = None - wikipedia_name: Optional[str] = None - lc: Optional[str] = None - link: Optional[Link] = None + name: str | None = None + sort_name: str | None = None + viaf: str | None = None + role: str | None = None + family_name: str | None = None + wikipedia_name: str | None = None + lc: str | None = None + link: Link | None = None @dataclass class WorkEntryData(BaseModel): """All the metadata possible for a work. This is not a FeedEntryType because we want strict control.""" - additionalType: Optional[str] = None - identifier: Optional[str] = None - pwid: Optional[str] = None - issued: Optional[datetime | date] = None - duration: Optional[float] = None - - summary: Optional[FeedEntryType] = None - language: Optional[FeedEntryType] = None - publisher: Optional[FeedEntryType] = None - published: Optional[FeedEntryType] = None - updated: Optional[FeedEntryType] = None - title: Optional[FeedEntryType] = None - sort_title: Optional[FeedEntryType] = None - subtitle: Optional[FeedEntryType] = None - series: Optional[FeedEntryType] = None - imprint: Optional[FeedEntryType] = None - - authors: List[Author] = field(default_factory=list) - contributors: List[Author] = field(default_factory=list) - categories: List[FeedEntryType] = field(default_factory=list) - ratings: List[FeedEntryType] = field(default_factory=list) - distribution: Optional[FeedEntryType] = None + additionalType: str | None = None + identifier: str | None = None + pwid: str | None = None + issued: datetime | date | None = None + duration: float | None = None + + summary: FeedEntryType | None = None + language: FeedEntryType | None = None + publisher: FeedEntryType | None = None + published: FeedEntryType | None = None + updated: FeedEntryType | None = None + title: FeedEntryType | None = None + sort_title: FeedEntryType | None = None + subtitle: FeedEntryType | None = None + series: FeedEntryType | None = None + imprint: FeedEntryType | None = None + + authors: list[Author] = field(default_factory=list) + contributors: list[Author] = field(default_factory=list) + categories: list[FeedEntryType] = field(default_factory=list) + ratings: list[FeedEntryType] = field(default_factory=list) + distribution: FeedEntryType | None = None # Links - acquisition_links: List[Acquisition] = field(default_factory=list) - image_links: List[Link] = field(default_factory=list) - other_links: List[Link] = field(default_factory=list) + acquisition_links: list[Acquisition] = field(default_factory=list) + image_links: list[Link] = field(default_factory=list) + other_links: list[Link] = field(default_factory=list) @dataclass @@ -180,17 +181,17 @@ class WorkEntry(BaseModel): work: Work edition: Edition identifier: Identifier - license_pool: Optional[LicensePool] = None + license_pool: LicensePool | None = None # Actual, computed feed data - computed: Optional[WorkEntryData] = None + computed: WorkEntryData | None = None def __init__( self, - work: Optional[Work] = None, - edition: Optional[Edition] = None, - identifier: Optional[Identifier] = None, - license_pool: Optional[LicensePool] = None, + work: Work | None = None, + edition: Edition | None = None, + identifier: Identifier | None = None, + license_pool: LicensePool | None = None, ) -> None: if None in (work, edition, identifier): raise ValueError( @@ -204,13 +205,13 @@ def __init__( @dataclass class FeedMetadata(BaseModel): - title: Optional[str] = None - id: Optional[str] = None - updated: Optional[str] = None - items_per_page: Optional[int] = None - patron: Optional[FeedEntryType] = None - drm_licensor: Optional[FeedEntryType] = None - lcp_hashed_passphrase: Optional[FeedEntryType] = None + title: str | None = None + id: str | None = None + updated: str | None = None + items_per_page: int | None = None + patron: FeedEntryType | None = None + drm_licensor: FeedEntryType | None = None + lcp_hashed_passphrase: FeedEntryType | None = None class DataEntryTypes: @@ -221,21 +222,21 @@ class DataEntryTypes: class DataEntry(FeedEntryType): """Other kinds of information, like entries of a navigation feed""" - type: Optional[str] = None - title: Optional[str] = None - id: Optional[str] = None - links: List[Link] = field(default_factory=list) + type: str | None = None + title: str | None = None + id: str | None = None + links: list[Link] = field(default_factory=list) @dataclass class FeedData(BaseModel): - links: List[Link] = field(default_factory=list) - breadcrumbs: List[Link] = field(default_factory=list) - facet_links: List[Link] = field(default_factory=list) - entries: List[WorkEntry] = field(default_factory=list) - data_entries: List[DataEntry] = field(default_factory=list) + links: list[Link] = field(default_factory=list) + breadcrumbs: list[Link] = field(default_factory=list) + facet_links: list[Link] = field(default_factory=list) + entries: list[WorkEntry] = field(default_factory=list) + data_entries: list[DataEntry] = field(default_factory=list) metadata: FeedMetadata = field(default_factory=lambda: FeedMetadata()) - entrypoint: Optional[str] = None + entrypoint: str | None = None class Config: arbitrary_types_allowed = True diff --git a/core/feed/util.py b/core/feed/util.py index 5519f0a5b..808201a0f 100644 --- a/core/feed/util.py +++ b/core/feed/util.py @@ -1,5 +1,4 @@ import datetime -from typing import Union import pytz @@ -7,7 +6,7 @@ TIME_FORMAT_NAIVE = "%Y-%m-%dT%H:%M:%SZ" -def strftime(date: Union[datetime.datetime, datetime.date]) -> str: +def strftime(date: datetime.datetime | datetime.date) -> str: """ Format a date for the OPDS feeds. diff --git a/core/integration/base.py b/core/integration/base.py index db17e20ae..606648120 100644 --- a/core/integration/base.py +++ b/core/integration/base.py @@ -1,7 +1,8 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, Generic, Mapping, Protocol, Type, TypeVar +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar from sqlalchemy.orm import Session from sqlalchemy.orm.attributes import Mapped, flag_modified @@ -13,15 +14,15 @@ class IntegrationConfigurationProtocol(Protocol): - settings_dict: Mapped[Dict[str, Any]] + settings_dict: Mapped[dict[str, Any]] T = TypeVar("T", bound=BaseSettings) def integration_settings_load( - settings_cls: Type[T], - integration: IntegrationConfigurationProtocol, + settings_cls: type[T], + integration: IntegrationConfigurationProtocol | dict[str, Any], ) -> T: """ Load the settings object for an integration from the database. @@ -31,17 +32,20 @@ def integration_settings_load( when round tripping from the database (such as enum) and construct() doesn't do that. :param settings_cls: The settings class that the settings should be loaded into. - :param integration: The integration to load the settings from. This should be a - SQLAlchemy model with a settings_dict JSONB column. + :param integration: The integration to load the settings from or a dict that should + be loaded into the model. If it is an integration, it should be a SQLAlchemy model + with a settings_dict JSONB column. :return: An instance of the settings class loaded with the settings from the database. """ - settings_dict = integration.settings_dict + settings_dict = ( + integration if isinstance(integration, dict) else integration.settings_dict + ) return settings_cls(**settings_dict) def integration_settings_update( - settings_cls: Type[BaseSettings], + settings_cls: type[BaseSettings], integration: IntegrationConfigurationProtocol, new_settings: BaseSettings | Mapping[str, Any], merge: bool = False, @@ -72,6 +76,7 @@ def integration_settings_update( SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) +ChildSettingsType = TypeVar("ChildSettingsType", bound=BaseSettings, covariant=True) class HasIntegrationConfiguration(Generic[SettingsType], ABC): @@ -89,7 +94,7 @@ def description(cls) -> str: @classmethod @abstractmethod - def settings_class(cls) -> Type[SettingsType]: + def settings_class(cls) -> type[SettingsType]: """Get the settings for this integration""" ... @@ -135,7 +140,7 @@ class HasLibraryIntegrationConfiguration( ): @classmethod @abstractmethod - def library_settings_class(cls) -> Type[LibrarySettingsType]: + def library_settings_class(cls) -> type[LibrarySettingsType]: """Get the library settings for this integration""" ... @@ -167,9 +172,58 @@ def library_settings_update( ) -class HasChildIntegrationConfiguration(HasIntegrationConfiguration[SettingsType], ABC): +class HasChildIntegrationConfiguration( + Generic[SettingsType, ChildSettingsType], + HasIntegrationConfiguration[SettingsType], + ABC, +): @classmethod @abstractmethod - def child_settings_class(cls) -> Type[BaseSettings]: + def child_settings_class(cls) -> type[ChildSettingsType]: """Get the child settings class""" ... + + @classmethod + def child_settings_load(cls, child: IntegrationConfiguration) -> ChildSettingsType: + """ + Load the child settings object for this integration from the database. + """ + return integration_settings_load(cls.child_settings_class(), child) + + @classmethod + def settings_load( + cls, + integration: IntegrationConfiguration, + parent: IntegrationConfiguration | None = None, + ) -> SettingsType: + """ + Load the full settings object for this integration from the database. + + If a parent is provided, the child settings will be merged with the parent settings, with the child + settings taking precedence. + """ + if parent is None: + return super().settings_load(integration) + else: + parent_settings = super().settings_load(parent) + child_settings = cls.child_settings_load(integration) + + merged_settings = parent_settings.dict() + merged_settings.update(child_settings.dict()) + return integration_settings_load(cls.settings_class(), merged_settings) + + @classmethod + def child_settings_update( + cls, + integration: IntegrationConfiguration, + new_settings: BaseSettings | Mapping[str, Any], + merge: bool = False, + ) -> None: + """ + Update the settings for this library integration in the database. + + See the documentation for `integration_settings_update` for more details. + """ + integration_settings_update( + cls.child_settings_class(), integration, new_settings, merge + ) diff --git a/core/integration/goals.py b/core/integration/goals.py index b7326f2ce..99db3d2d6 100644 --- a/core/integration/goals.py +++ b/core/integration/goals.py @@ -9,3 +9,4 @@ class Goals(Enum): PATRON_AUTH_GOAL = "patron_auth" LICENSE_GOAL = "licenses" DISCOVERY_GOAL = "discovery" + CATALOG_GOAL = "catalog" diff --git a/core/integration/registry.py b/core/integration/registry.py index ac54fa39c..a0b55e572 100644 --- a/core/integration/registry.py +++ b/core/integration/registry.py @@ -1,19 +1,8 @@ from __future__ import annotations from collections import defaultdict -from typing import ( - Dict, - Generic, - Iterator, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, - overload, -) +from collections.abc import Iterator +from typing import Generic, TypeVar, overload from core.integration.goals import Goals @@ -26,10 +15,10 @@ class IntegrationRegistryException(ValueError): class IntegrationRegistry(Generic[T]): - def __init__(self, goal: Goals, integrations: Optional[Dict[str, Type[T]]] = None): + def __init__(self, goal: Goals, integrations: dict[str, type[T]] | None = None): """Initialize a new IntegrationRegistry.""" - self._lookup: Dict[str, Type[T]] = {} - self._reverse_lookup: Dict[Type[T], List[str]] = defaultdict(list) + self._lookup: dict[str, type[T]] = {} + self._reverse_lookup: dict[type[T], list[str]] = defaultdict(list) self.goal = goal if integrations: @@ -38,11 +27,11 @@ def __init__(self, goal: Goals, integrations: Optional[Dict[str, Type[T]]] = Non def register( self, - integration: Type[T], + integration: type[T], *, - canonical: Optional[str] = None, - aliases: Optional[List[str]] = None, - ) -> Type[T]: + canonical: str | None = None, + aliases: list[str] | None = None, + ) -> type[T]: """ Register an integration class. @@ -72,29 +61,29 @@ def register( return integration @overload - def get(self, protocol: str, default: None = ...) -> Type[T] | None: + def get(self, protocol: str, default: None = ...) -> type[T] | None: ... @overload - def get(self, protocol: str, default: V) -> Type[T] | V: + def get(self, protocol: str, default: V) -> type[T] | V: ... - def get(self, protocol: str, default: V | None = None) -> Type[T] | V | None: + def get(self, protocol: str, default: V | None = None) -> type[T] | V | None: """Look up an integration class by protocol.""" if protocol not in self._lookup: return default return self[protocol] @overload - def get_protocol(self, integration: Type[T], default: None = ...) -> str | None: + def get_protocol(self, integration: type[T], default: None = ...) -> str | None: ... @overload - def get_protocol(self, integration: Type[T], default: V) -> str | V: + def get_protocol(self, integration: type[T], default: V) -> str | V: ... def get_protocol( - self, integration: Type[T], default: V | None = None + self, integration: type[T], default: V | None = None ) -> str | V | None: """Look up the canonical protocol for an integration class.""" names = self.get_protocols(integration, default) @@ -104,24 +93,24 @@ def get_protocol( @overload def get_protocols( - self, integration: Type[T], default: None = ... - ) -> List[str] | None: + self, integration: type[T], default: None = ... + ) -> list[str] | None: ... @overload - def get_protocols(self, integration: Type[T], default: V) -> List[str] | V: + def get_protocols(self, integration: type[T], default: V) -> list[str] | V: ... def get_protocols( - self, integration: Type[T], default: V | None = None - ) -> List[str] | V | None: + self, integration: type[T], default: V | None = None + ) -> list[str] | V | None: """Look up all protocols for an integration class.""" if integration not in self._reverse_lookup: return default return self._reverse_lookup[integration] @property - def integrations(self) -> Set[Type[T]]: + def integrations(self) -> set[type[T]]: """Return a set of all registered canonical protocols.""" return set(self._reverse_lookup.keys()) @@ -137,11 +126,11 @@ def update(self, other: IntegrationRegistry[T]) -> None: assert isinstance(names, list) self.register(integration, canonical=names[0], aliases=names[1:]) - def __iter__(self) -> Iterator[Tuple[str, Type[T]]]: + def __iter__(self) -> Iterator[tuple[str, type[T]]]: for integration, names in self._reverse_lookup.items(): yield names[0], integration - def __getitem__(self, protocol: str) -> Type[T]: + def __getitem__(self, protocol: str) -> type[T]: """Look up an integration class by protocol, using the [] operator.""" return self._lookup[protocol] @@ -156,15 +145,13 @@ def __contains__(self, name: str) -> bool: def __repr__(self) -> str: return f"" - def __add__( - self, other: IntegrationRegistry[V] - ) -> IntegrationRegistry[Union[T, V]]: + def __add__(self, other: IntegrationRegistry[V]) -> IntegrationRegistry[T | V]: if not isinstance(other, IntegrationRegistry): raise TypeError( f"unsupported operand type(s) for +: 'IntegrationRegistry' and '{type(other).__name__}'" ) - new: IntegrationRegistry[Union[T, V]] = IntegrationRegistry(self.goal) + new: IntegrationRegistry[T | V] = IntegrationRegistry(self.goal) new.update(self) new.update(other) return new diff --git a/core/integration/settings.py b/core/integration/settings.py index ec4eeb807..1b42efac2 100644 --- a/core/integration/settings.py +++ b/core/integration/settings.py @@ -1,18 +1,9 @@ from __future__ import annotations +from collections.abc import Callable, Mapping from dataclasses import dataclass from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Mapping, - Optional, - Tuple, - Union, -) +from typing import TYPE_CHECKING, Any, Union from pydantic import ( BaseModel, @@ -74,29 +65,29 @@ def FormField( default: Any = Undefined, *, form: ConfigurationFormItem = None, # type: ignore[assignment] - default_factory: Optional[NoArgAnyCallable] = None, - alias: Optional[str] = None, - title: Optional[str] = None, - description: Optional[str] = None, - exclude: Union[AbstractSetIntStr, MappingIntStrAny, Any] = None, - include: Union[AbstractSetIntStr, MappingIntStrAny, Any] = None, - const: Optional[bool] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - multiple_of: Optional[float] = None, - allow_inf_nan: Optional[bool] = None, - max_digits: Optional[int] = None, - decimal_places: Optional[int] = None, - min_items: Optional[int] = None, - max_items: Optional[int] = None, - unique_items: Optional[bool] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, + default_factory: NoArgAnyCallable | None = None, + alias: str | None = None, + title: str | None = None, + description: str | None = None, + exclude: AbstractSetIntStr | MappingIntStrAny | Any = None, + include: AbstractSetIntStr | MappingIntStrAny | Any = None, + const: bool | None = None, + gt: float | None = None, + ge: float | None = None, + lt: float | None = None, + le: float | None = None, + multiple_of: float | None = None, + allow_inf_nan: bool | None = None, + max_digits: int | None = None, + decimal_places: int | None = None, + min_items: int | None = None, + max_items: int | None = None, + unique_items: bool | None = None, + min_length: int | None = None, + max_length: int | None = None, allow_mutation: bool = True, - regex: Optional[str] = None, - discriminator: Optional[str] = None, + regex: str | None = None, + discriminator: str | None = None, repr: bool = True, **extra: Any, ) -> Any: @@ -155,6 +146,9 @@ class ConfigurationFormItemType(Enum): IMAGE = "image" +ConfigurationFormOptionsType = Mapping[Union[Enum, str, None], str] + + @dataclass(frozen=True) class ConfigurationFormItem: """ @@ -182,9 +176,9 @@ class ConfigurationFormItem: # When the type is SELECT, LIST, or MENU, the options are used to populate the # field in the admin interface. This can either be a callable that returns a # dictionary of options or a dictionary of options. - options: Callable[[Session], Dict[Enum | str, str]] | Mapping[ - Enum | str, str - ] | None = None + options: Callable[ + [Session], ConfigurationFormOptionsType + ] | ConfigurationFormOptionsType | None = None # Required is usually determined by the Pydantic model, but can be overridden # here, in the case where a field would not be required in the model, but is @@ -198,6 +192,8 @@ class ConfigurationFormItem: @staticmethod def get_form_value(value: Any) -> Any: + if value is None: + return "" if isinstance(value, Enum): return value.value if isinstance(value, bool): @@ -208,13 +204,13 @@ def get_form_value(value: Any) -> Any: def to_dict( self, db: Session, key: str, required: bool = False, default: Any = None - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """ Convert the ConfigurationFormItem to a dictionary The dictionary is in the format expected by the admin interface. """ - form_entry: Dict[str, Any] = { + form_entry: dict[str, Any] = { "label": self.label, "key": key, "required": required or self.required, @@ -260,7 +256,7 @@ class MySettings(BaseSettings): """ @root_validator(pre=True) - def extra_args(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def extra_args(cls, values: dict[str, Any]) -> dict[str, Any]: # We log any extra arguments that are passed to the model, but # we don't raise an error, these arguments may be old configuration # settings that have not been cleaned up by a migration yet. @@ -305,8 +301,19 @@ class Config: # not the alias. allow_population_by_field_name = True + # If your settings class needs additional form fields that are not + # defined on the model, you can add them here. This is useful if you + # need to add a custom form field, but don't want the data in the field + # to be stored on the model in the database. For example, if you want + # to add a custom form field that allows the user to upload an image, but + # want to store that image data outside the settings model. + # + # The key for the dictionary should be the field name, and the value + # should be a ConfigurationFormItem object that defines the form field. + _additional_form_fields: dict[str, ConfigurationFormItem] = {} + @classmethod - def configuration_form(cls, db: Session) -> List[Dict[str, Any]]: + def configuration_form(cls, db: Session) -> list[dict[str, Any]]: """Get the configuration dictionary for this class""" config = [] for field in cls.__fields__.values(): @@ -327,10 +334,18 @@ def configuration_form(cls, db: Session) -> List[Dict[str, Any]]: config.sort(key=lambda x: x[0]) return [item[1] for item in config] - def dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: + def dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: """Override the dict method to remove the default values""" + if "exclude_defaults" not in kwargs: kwargs["exclude_defaults"] = True + + # Allow us to exclude extra fields that are not defined on the model + if "exclude_extra" in kwargs: + exclude_extra = kwargs.pop("exclude_extra") + if exclude_extra: + kwargs["exclude"] = self.__fields_set__ - self.__fields__.keys() + return super().dict(*args, **kwargs) @classmethod @@ -347,17 +362,6 @@ def get_form_field_label(cls, field_name: str) -> str: else: return field_name - # If your settings class needs additional form fields that are not - # defined on the model, you can add them here. This is useful if you - # need to add a custom form field, but don't want the data in the field - # to be stored on the model in the database. For example, if you want - # to add a custom form field that allows the user to upload an image, but - # want to store that image data outside the settings model. - # - # The key for the dictionary should be the field name, and the value - # should be a ConfigurationFormItem object that defines the form field. - _additional_form_fields: Dict[str, ConfigurationFormItem] = {} - def __init__(self, **data: Any): """ Override the init method to return our custom ProblemError diff --git a/core/jobs/integration_test.py b/core/jobs/integration_test.py index b4fd8a7c9..6128ca9e5 100644 --- a/core/jobs/integration_test.py +++ b/core/jobs/integration_test.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from json import JSONDecodeError from ssl import get_server_certificate -from typing import Any, List, cast +from typing import Any, cast from urllib.parse import urlparse import pytz @@ -97,7 +97,7 @@ def arg_parser(cls): # pragma: no cover def _read_config( self, filepath: str, key_file: str | None = None, raw: bool = False - ) -> List | bytes: + ) -> list | bytes: """Read the config yml from a source. The file should be a yml with a list of IntegrationTestDetails as the content. :param filepath: The path to the file, could be an URL or a file on the local directory @@ -157,7 +157,7 @@ def do_run(self) -> None: self._encrypt(args.config, args.key_file, args.encrypt_file) return - data = cast(List[dict], self._read_config(args.config, key_file=args.key_file)) + data = cast(list[dict], self._read_config(args.config, key_file=args.key_file)) for datapoint in data: test = IntegrationTestDetails(**datapoint) diff --git a/core/jobs/patron_activity_sync.py b/core/jobs/patron_activity_sync.py index 5bd168ce8..2cdf71f1f 100644 --- a/core/jobs/patron_activity_sync.py +++ b/core/jobs/patron_activity_sync.py @@ -1,5 +1,4 @@ from datetime import timedelta -from typing import List, Optional from sqlalchemy import or_ from sqlalchemy.orm import Query @@ -16,7 +15,7 @@ class PatronActivitySyncNotificationScript(PatronSweepMonitor): and notify said patron devices to re-sync their data""" STALE_ACTIVITY_SYNC_DAYS = 2 - SERVICE_NAME: Optional[str] = "Patron Activity Sync Notification" + SERVICE_NAME: str | None = "Patron Activity Sync Notification" def item_query(self) -> Query: expired_sync = utc_now() - timedelta(days=self.STALE_ACTIVITY_SYNC_DAYS) @@ -36,5 +35,5 @@ def item_query(self) -> Query: ) return query - def process_items(self, items: List[Patron]) -> None: + def process_items(self, items: list[Patron]) -> None: PushNotifications.send_activity_sync_message(items) diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index c9f72bc87..c63d7bde9 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -6,15 +6,17 @@ from collections import defaultdict from datetime import datetime, timedelta from tempfile import TemporaryFile -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast import dateutil.parser import pytz +from sqlalchemy.orm import Session from sqlalchemy.sql.functions import sum from core.config import Configuration from core.model import get_one from core.model.edition import Edition +from core.model.identifier import Identifier, RecursiveEquivalencyCache from core.model.time_tracking import PlaytimeEntry, PlaytimeSummary from core.util.datetime_helpers import previous_months, utc_now from core.util.email import EmailManager @@ -76,11 +78,11 @@ class PlaytimeEntriesEmailReportsScript(Script): @classmethod def arg_parser(cls): - # The default `start` and `until` dates encompass the previous three months. + # The default `start` and `until` dates encompass the previous month. # We convert them to strings here so that they are handled the same way # as non-default dates specified as arguments. default_start, default_until = ( - date.isoformat() for date in previous_months(number_of_months=3) + date.isoformat() for date in previous_months(number_of_months=1) ) parser = argparse.ArgumentParser( @@ -154,7 +156,15 @@ def do_run(self): # Write the data as a CSV writer = csv.writer(temp) writer.writerow( - ["date", "urn", "collection", "library", "title", "total seconds"] + [ + "date", + "urn", + "isbn", + "collection", + "library", + "title", + "total seconds", + ] ) for ( @@ -164,15 +174,25 @@ def do_run(self): identifier_id, total, ) in self._fetch_report_records(start=start, until=until): - edition = None + edition: Edition | None = None + identifier: Identifier | None = None if identifier_id: edition = get_one( self._db, Edition, primary_identifier_id=identifier_id ) + # Use the identifier from the edition where available. + # Otherwise, we'll have to look it up. + identifier = ( + edition.primary_identifier + if edition + else get_one(self._db, Identifier, id=identifier_id) + ) + isbn = self._isbn_for_identifier(identifier) title = edition and edition.title row = ( report_date_label, urn, + isbn, collection_name, library_name, title, @@ -218,3 +238,39 @@ def _fetch_report_records(self, start: datetime, until: datetime) -> Query: PlaytimeSummary.identifier_id, ) ) + + @staticmethod + def _isbn_for_identifier( + identifier: Identifier | None, + /, + *, + default_value: str = "", + ) -> str: + """Find the strongest ISBN match for the given identifier. + + :param identifier: The identifier to match. + :param default_value: The default value to return if the identifier is missing or a match is not found. + """ + if identifier is None: + return default_value + + if identifier.type == Identifier.ISBN: + return cast(str, identifier.identifier) + + # If our identifier is not an ISBN itself, we'll use our Recursive Equivalency + # mechanism to find the next best one that is, if available. + db = Session.object_session(identifier) + eq_subquery = db.query(RecursiveEquivalencyCache.identifier_id).filter( + RecursiveEquivalencyCache.parent_identifier_id == identifier.id + ) + equivalent_identifiers = ( + db.query(Identifier) + .filter(Identifier.id.in_(eq_subquery)) + .filter(Identifier.type == Identifier.ISBN) + ) + + isbn = next( + map(lambda id_: id_.identifier, equivalent_identifiers), + None, + ) + return isbn or default_value diff --git a/core/lane.py b/core/lane.py index 59c0f9c65..4d04fcd3f 100644 --- a/core/lane.py +++ b/core/lane.py @@ -4,7 +4,7 @@ import logging import time from collections import defaultdict -from typing import TYPE_CHECKING, Any, List, Optional +from typing import Any from urllib.parse import quote_plus from flask_babel import lazy_gettext as _ @@ -48,6 +48,7 @@ DataSource, Edition, Genre, + IntegrationConfiguration, Library, LicensePool, Work, @@ -57,11 +58,7 @@ tuple_to_numericrange, ) from core.model.before_flush_decorator import Listener -from core.model.configuration import ( - ConfigurationAttributeValue, - ConfigurationSetting, - ExternalIntegration, -) +from core.model.configuration import ConfigurationAttributeValue, ExternalIntegration from core.model.constants import EditionConstants from core.model.hybrid import hybrid_property from core.model.listeners import site_configuration_has_changed @@ -72,9 +69,6 @@ from core.util.opds_writer import OPDSFeed from core.util.problem_detail import ProblemDetail -if TYPE_CHECKING: - from core.model import CachedMARCFile # noqa: autoflake - class BaseFacets(FacetConstants): """Basic faceting class that doesn't modify a search filter at all. @@ -711,7 +705,7 @@ def modify_search_filter(self, filter): self.collection_name and self.collection_name != self.COLLECTION_NAME_ALL ): - collection = get_one(_db, Collection, name=self.collection_name) + collection = Collection.by_name(_db, self.collection_name) if collection: filter.collection_ids = [collection.id] @@ -1328,7 +1322,7 @@ class WorkList: # If a certain type of Worklist should always have its OPDS feeds # cached under a specific type, define that type as # CACHED_FEED_TYPE. - CACHED_FEED_TYPE: Optional[str] = None + CACHED_FEED_TYPE: str | None = None # By default, a WorkList is always visible. @property @@ -2301,22 +2295,21 @@ def _restrict_query_for_no_hold_collections( # Modify the query to not show holds on collections # that don't allow it # This query looks like a prime candidate for some in-memory caching - restricted_collections = ( - _db.query(Collection.id) + restricted_collections = _db.execute( + select(Collection.id) .join( - ConfigurationSetting, - Collection.external_integration_id - == ConfigurationSetting.external_integration_id, + IntegrationConfiguration, + Collection.integration_configuration_id == IntegrationConfiguration.id, ) - .filter( - Collection.id.in_(self.collection_ids), - ConfigurationSetting.library_id == self.library_id, - ConfigurationSetting.key == ExternalIntegration.DISPLAY_RESERVES, - ConfigurationSetting.value == ConfigurationAttributeValue.NOVALUE.value, + .where( + IntegrationConfiguration.settings_dict.contains( + { + ExternalIntegration.DISPLAY_RESERVES: ConfigurationAttributeValue.NOVALUE.value + } + ) ) - .all() - ) - restricted_collection_ids = (r[0] for r in restricted_collections) + ).all() + restricted_collection_ids = (r.id for r in restricted_collections) # If a licensepool is from a collection that restricts holds # and has no available copies, then we don't want to see it @@ -2618,7 +2611,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): size_by_entrypoint = Column(JSON, nullable=True) # A lane may have one parent lane and many sublanes. - sublanes: Mapped[List[Lane]] = relationship( + sublanes: Mapped[list[Lane]] = relationship( "Lane", backref=backref("parent", remote_side=[id]), ) @@ -2626,7 +2619,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): # A lane may have multiple associated LaneGenres. For most lanes, # this is how the contents of the lanes are defined. genres = association_proxy("lane_genres", "genre", creator=LaneGenre.from_genre) - lane_genres: Mapped[List[LaneGenre]] = relationship( + lane_genres: Mapped[list[LaneGenre]] = relationship( "LaneGenre", foreign_keys="LaneGenre.lane_id", backref="lane", @@ -2685,7 +2678,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): ) # Only the books on these specific CustomLists will be shown. - customlists: Mapped[List[CustomList]] = relationship( + customlists: Mapped[list[CustomList]] = relationship( "CustomList", secondary=lambda: lanes_customlists, backref="lane" # type: ignore ) @@ -2720,13 +2713,6 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): # admin interface can see all the lanes, visible or not. _visible = Column("visible", Boolean, default=True, nullable=False) - # A Lane may have many CachedMARCFiles. - cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( - "CachedMARCFile", - backref="lane", - cascade="all, delete-orphan", - ) - __table_args__ = (UniqueConstraint("parent_id", "display_name"),) def __init__(self, *args, **kwargs): diff --git a/core/lcp/credential.py b/core/lcp/credential.py index f88224521..3ba1a4296 100644 --- a/core/lcp/credential.py +++ b/core/lcp/credential.py @@ -1,6 +1,6 @@ import logging from enum import Enum -from typing import Any, Optional +from typing import Any from sqlalchemy.orm import Session @@ -67,7 +67,7 @@ def _get_or_create_persistent_token( data_source_type: Any, credential_type: Any, commit: bool, - value: Optional[str] = None, + value: str | None = None, ) -> Any: """Gets or creates a new persistent token diff --git a/core/marc.py b/core/marc.py index 928c65e69..d7f7aa34f 100644 --- a/core/marc.py +++ b/core/marc.py @@ -1,38 +1,52 @@ +from __future__ import annotations + import re +import urllib.parse +from collections.abc import Mapping +from datetime import datetime from io import BytesIO -from typing import Optional +from uuid import UUID, uuid4 -from flask_babel import lazy_gettext as _ +import pytz +from pydantic import NonNegativeInt from pymarc import Field, Record, Subfield +from sqlalchemy import select +from sqlalchemy.engine import ScalarResult from sqlalchemy.orm.session import Session from core.classifier import Classifier -from core.config import CannotLoadConfiguration -from core.external_search import ExternalSearchIndex, SortKeyPagination -from core.lane import BaseFacets, Lane +from core.integration.base import HasLibraryIntegrationConfiguration +from core.integration.settings import ( + BaseSettings, + ConfigurationFormItem, + ConfigurationFormItemType, + FormField, +) from core.model import ( - CachedMARCFile, + Collection, DeliveryMechanism, Edition, - ExternalIntegration, Identifier, + Library, + LicensePool, + MarcFile, Representation, Work, - get_one_or_create, + create, ) -from core.service.storage.s3 import MultipartS3ContextManager, S3Service +from core.service.storage.s3 import S3Service from core.util import LanguageCodes from core.util.datetime_helpers import utc_now +from core.util.log import LoggerMixin +from core.util.uuid import uuid_encode -class Annotator: +class Annotator(LoggerMixin): """The Annotator knows how to add information about a Work to a MARC record.""" - marc_cache_field = Work.marc_record.name - # From https://www.loc.gov/standards/valuelist/marctarget.html - AUDIENCE_TERMS = { + AUDIENCE_TERMS: Mapping[str, str] = { Classifier.AUDIENCE_CHILDREN: "Juvenile", Classifier.AUDIENCE_YOUNG_ADULT: "Adolescent", Classifier.AUDIENCE_ADULTS_ONLY: "Adult", @@ -42,7 +56,7 @@ class Annotator: # TODO: Add remaining formats. Maybe there's a better place to # store this so it's easier to keep up-to-date. # There doesn't seem to be any particular vocabulary for this. - FORMAT_TERMS = { + FORMAT_TERMS: Mapping[tuple[str | None, str | None], str] = { (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM): "EPUB eBook", ( Representation.EPUB_MEDIA_TYPE, @@ -52,38 +66,90 @@ class Annotator: (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM): "Adobe PDF eBook", } + def __init__( + self, + cm_url: str, + library_short_name: str, + web_client_urls: list[str], + organization_code: str | None, + include_summary: bool, + include_genres: bool, + ) -> None: + self.cm_url = cm_url + self.library_short_name = library_short_name + self.web_client_urls = web_client_urls + self.organization_code = organization_code + self.include_summary = include_summary + self.include_genres = include_genres + def annotate_work_record( self, - work, - active_license_pool, - edition, - identifier, - record, - integration=None, - updated=None, - ): + revised: bool, + work: Work, + active_license_pool: LicensePool, + edition: Edition, + identifier: Identifier, + ) -> Record: """Add metadata from this work to a MARC record. - :work: The Work whose record is being annotated. - :active_license_pool: Of all the LicensePools associated with this + :param revised: Whether this record is being revised. + :param work: The Work whose record is being annotated. + :param active_license_pool: Of all the LicensePools associated with this Work, the client has expressed interest in this one. - :edition: The Edition to use when associating bibliographic + :param edition: The Edition to use when associating bibliographic metadata with this entry. - :identifier: Of all the Identifiers associated with this + :param identifier: Of all the Identifiers associated with this Work, the client has expressed interest in this one. - :param record: A MARCRecord object to be annotated. + + :return: A pymarc Record object. """ + record = Record(leader=self.leader(revised), force_utf8=True) + self.add_control_fields(record, identifier, active_license_pool, edition) + self.add_isbn(record, identifier) + + # TODO: The 240 and 130 fields are for translated works, so they can be grouped even + # though they have different titles. We do not group editions of the same work in + # different languages, so we can't use those yet. + + self.add_title(record, edition) + self.add_contributors(record, edition) + self.add_publisher(record, edition) + self.add_physical_description(record, edition) + self.add_audience(record, work) + self.add_series(record, edition) + self.add_system_details(record) + self.add_ebooks_subject(record) self.add_distributor(record, active_license_pool) self.add_formats(record, active_license_pool) + if self.organization_code: + self.add_marc_organization_code(record, self.organization_code) + + if self.include_summary: + self.add_summary(record, work) + + if self.include_genres: + self.add_genres(record, work) + + self.add_web_client_urls( + record, + identifier, + self.library_short_name, + self.cm_url, + self.web_client_urls, + ) + + return record + @classmethod - def leader(cls, work): + def leader(cls, revised: bool) -> str: # The record length is automatically updated once fields are added. initial_record_length = "00000" - record_status = "n" # New record - if getattr(work, cls.marc_cache_field): + if revised: record_status = "c" # Corrected or revised + else: + record_status = "n" # New record # Distributors consistently seem to use type "a" - language material - for # ebooks, though there is also type "m" for computer files. @@ -98,7 +164,9 @@ def leader(cls, work): return leader @classmethod - def add_control_fields(cls, record, identifier, pool, edition): + def add_control_fields( + cls, record: Record, identifier: Identifier, pool: LicensePool, edition: Edition + ) -> None: # Unique identifier for this record. record.add_field(Field(tag="001", data=identifier.urn)) @@ -145,11 +213,11 @@ def add_control_fields(cls, record, identifier, pool, edition): record.add_field(Field(tag="008", data=data)) @classmethod - def add_marc_organization_code(cls, record, marc_org): + def add_marc_organization_code(cls, record: Record, marc_org: str) -> None: record.add_field(Field(tag="003", data=marc_org)) @classmethod - def add_isbn(cls, record, identifier): + def add_isbn(cls, record: Record, identifier: Identifier) -> None: # Add the ISBN if we have one. isbn = None if identifier.type == Identifier.ISBN: @@ -164,7 +232,7 @@ def add_isbn(cls, record, identifier): .order_by(Identifier.id) .first() ) - if isbn: + if isbn and isbn.identifier: record.add_field( Field( tag="020", @@ -176,18 +244,33 @@ def add_isbn(cls, record, identifier): ) @classmethod - def add_title(cls, record, edition): + def add_title(cls, record: Record, edition: Edition) -> None: # Non-filing characters are used to indicate when the beginning of a title # should not be used in sorting. This code tries to identify them by comparing # the title and the sort_title. non_filing_characters = 0 - if edition.title != edition.sort_title and ("," in edition.sort_title): + if ( + edition.title != edition.sort_title + and edition.sort_title is not None + and ("," in edition.sort_title) + ): stemmed = edition.sort_title[: edition.sort_title.rindex(",")] - non_filing_characters = edition.title.index(stemmed) - # MARC only supports up to 9 non-filing characters, but if we got more - # something is probably wrong anyway. - if non_filing_characters > 9: - non_filing_characters = 0 + if edition.title is None: + cls.logger().warning( + "Edition %s has a sort title, but no title.", edition.id + ) + non_filing_characters = 0 + else: + non_filing_characters = edition.title.index(stemmed) + # MARC only supports up to 9 non-filing characters, but if we got more + # something is probably wrong anyway. + if non_filing_characters > 9: + cls.logger().warning( + "Edition %s has %s non-filing characters, but MARC only supports up to 9.", + edition.id, + non_filing_characters, + ) + non_filing_characters = 0 subfields = [Subfield("a", str(edition.title or ""))] if edition.subtitle: @@ -197,19 +280,17 @@ def add_title(cls, record, edition): record.add_field( Field( tag="245", - indicators=["0", non_filing_characters], + indicators=["0", str(non_filing_characters)], subfields=subfields, ) ) @classmethod - def add_contributors(cls, record, edition): + def add_contributors(cls, record: Record, edition: Edition) -> None: """Create contributor fields for this edition. TODO: Use canonical names from LoC. """ - contibutor_fields = [] - # If there's one author, use the 100 field. if edition.sort_author and len(edition.contributions) == 1: record.add_field( @@ -225,19 +306,20 @@ def add_contributors(cls, record, edition): if len(edition.contributions) > 1: for contribution in edition.contributions: contributor = contribution.contributor - record.add_field( - Field( - tag="700", - indicators=["1", " "], - subfields=[ - Subfield("a", str(contributor.sort_name)), - Subfield("e", contribution.role), - ], + if contributor.sort_name and contribution.role: + record.add_field( + Field( + tag="700", + indicators=["1", " "], + subfields=[ + Subfield("a", str(contributor.sort_name)), + Subfield("e", contribution.role), + ], + ) ) - ) @classmethod - def add_publisher(cls, record, edition): + def add_publisher(cls, record: Record, edition: Edition) -> None: if edition.publisher: publication_date = edition.issued or edition.published year = "" @@ -256,7 +338,7 @@ def add_publisher(cls, record, edition): ) @classmethod - def add_distributor(cls, record, pool): + def add_distributor(cls, record: Record, pool: LicensePool) -> None: # Distributor record.add_field( Field( @@ -267,7 +349,7 @@ def add_distributor(cls, record, pool): ) @classmethod - def add_physical_description(cls, record, edition): + def add_physical_description(cls, record: Record, edition: Edition) -> None: # These 3xx fields are for a physical description of the item. if edition.medium == Edition.BOOK_MEDIUM: record.add_field( @@ -376,8 +458,9 @@ def add_physical_description(cls, record, edition): ) @classmethod - def add_audience(cls, record, work): - audience = cls.AUDIENCE_TERMS.get(work.audience, "General") + def add_audience(cls, record: Record, work: Work) -> None: + work_audience = work.audience or Classifier.AUDIENCE_ADULT + audience = cls.AUDIENCE_TERMS.get(work_audience, "General") record.add_field( Field( tag="385", @@ -390,7 +473,7 @@ def add_audience(cls, record, work): ) @classmethod - def add_series(cls, record, edition): + def add_series(cls, record: Record, edition: Edition) -> None: if edition.series: subfields = [Subfield("a", str(edition.series))] if edition.series_position: @@ -404,7 +487,7 @@ def add_series(cls, record, edition): ) @classmethod - def add_system_details(cls, record): + def add_system_details(cls, record: Record) -> None: record.add_field( Field( tag="538", @@ -414,10 +497,8 @@ def add_system_details(cls, record): ) @classmethod - def add_formats(cls, record, pool): - formats = [] + def add_formats(cls, record: Record, pool: LicensePool) -> None: for lpdm in pool.delivery_mechanisms: - format = None dm = lpdm.delivery_mechanism format = cls.FORMAT_TERMS.get((dm.content_type, dm.drm_scheme)) if format: @@ -432,7 +513,7 @@ def add_formats(cls, record, pool): ) @classmethod - def add_summary(cls, record, work): + def add_summary(cls, record: Record, work: Work) -> None: summary = work.summary_text if summary: stripped = re.sub("<[^>]+?>", " ", summary) @@ -445,9 +526,8 @@ def add_summary(cls, record, work): ) @classmethod - def add_simplified_genres(cls, record, work): + def add_genres(cls, record: Record, work: Work) -> None: """Create subject fields for this work.""" - genres = [] genres = work.genres for genre in genres: @@ -463,7 +543,7 @@ def add_simplified_genres(cls, record, work): ) @classmethod - def add_ebooks_subject(cls, record): + def add_ebooks_subject(cls, record: Record) -> None: # This is a general subject that can be added to all records. record.add_field( Field( @@ -475,128 +555,144 @@ def add_ebooks_subject(cls, record): ) ) + @classmethod + def add_web_client_urls( + cls, + record: Record, + identifier: Identifier, + library_short_name: str, + cm_url: str, + web_client_urls: list[str], + ) -> None: + qualified_identifier = urllib.parse.quote( + f"{identifier.type}/{identifier.identifier}", safe="" + ) -class MARCExporterFacets(BaseFacets): - """A faceting object used to configure the search engine so that - it only works updated since a certain time. - """ - - def __init__(self, start_time): - self.start_time = start_time - - def modify_search_filter(self, filter): - filter.order = self.SORT_ORDER_TO_OPENSEARCH_FIELD_NAME[self.ORDER_LAST_UPDATE] - filter.order_ascending = True - filter.updated_after = self.start_time - - -class MARCExporter: - """Turn a work into a record for a MARC file.""" - - NAME = ExternalIntegration.MARC_EXPORT + for web_client_base_url in web_client_urls: + link = "{}/{}/works/{}".format( + cm_url, + library_short_name, + qualified_identifier, + ) + encoded_link = urllib.parse.quote(link, safe="") + url = f"{web_client_base_url}/book/{encoded_link}" + record.add_field( + Field( + tag="856", + indicators=["4", "0"], + subfields=[Subfield(code="u", value=url)], + ) + ) - DESCRIPTION = _( - "Export metadata into MARC files that can be imported into an ILS manually." - ) +class MarcExporterSettings(BaseSettings): # This setting (in days) controls how often MARC files should be # automatically updated. Since the crontab in docker isn't easily # configurable, we can run a script daily but check this to decide # whether to do anything. - UPDATE_FREQUENCY = "marc_update_frequency" - DEFAULT_UPDATE_FREQUENCY = 30 + update_frequency: NonNegativeInt = FormField( + 30, + form=ConfigurationFormItem( + label="Update frequency (in days)", + type=ConfigurationFormItemType.NUMBER, + required=True, + ), + alias="marc_update_frequency", + ) + +class MarcExporterLibrarySettings(BaseSettings): # MARC organization codes are assigned by the # Library of Congress and can be found here: # http://www.loc.gov/marc/organizations/org-search.php - MARC_ORGANIZATION_CODE = "marc_organization_code" + organization_code: str | None = FormField( + None, + form=ConfigurationFormItem( + label="The MARC organization code for this library (003 field).", + description="MARC organization codes are assigned by the Library of Congress.", + type=ConfigurationFormItemType.TEXT, + ), + alias="marc_organization_code", + ) + + web_client_url: str | None = FormField( + None, + form=ConfigurationFormItem( + label="The base URL for the web catalog for this library, for the 856 field.", + description="If using a library registry that provides a web catalog, this can be left blank.", + type=ConfigurationFormItemType.TEXT, + ), + alias="marc_web_client_url", + ) + + include_summary: bool = FormField( + False, + form=ConfigurationFormItem( + label="Include summaries in MARC records (520 field)", + type=ConfigurationFormItemType.SELECT, + options={"false": "Do not include summaries", "true": "Include summaries"}, + ), + ) + + include_genres: bool = FormField( + False, + form=ConfigurationFormItem( + label="Include Palace Collection Manager genres in MARC records (650 fields)", + type=ConfigurationFormItemType.SELECT, + options={ + "false": "Do not include Palace Collection Manager genres", + "true": "Include Palace Collection Manager genres", + }, + ), + alias="include_simplified_genres", + ) - WEB_CLIENT_URL = "marc_web_client_url" - INCLUDE_SUMMARY = "include_summary" - INCLUDE_SIMPLIFIED_GENRES = "include_simplified_genres" + +class MARCExporter( + HasLibraryIntegrationConfiguration[ + MarcExporterSettings, MarcExporterLibrarySettings + ], + LoggerMixin, +): + """Turn a work into a record for a MARC file.""" # The minimum size each piece of a multipart upload should be MINIMUM_UPLOAD_BATCH_SIZE_BYTES = 5 * 1024 * 1024 # 5MB - LIBRARY_SETTINGS = [ - { - "key": UPDATE_FREQUENCY, - "label": _("Update frequency (in days)"), - "description": _( - "The circulation manager will wait this number of days between generating MARC files." - ), - "type": "number", - "default": DEFAULT_UPDATE_FREQUENCY, - }, - { - "key": MARC_ORGANIZATION_CODE, - "label": _("The MARC organization code for this library (003 field)."), - "description": _( - "MARC organization codes are assigned by the Library of Congress." - ), - }, - { - "key": WEB_CLIENT_URL, - "label": _( - "The base URL for the web catalog for this library, for the 856 field." - ), - "description": _( - "If using a library registry that provides a web catalog, this can be left blank." - ), - }, - { - "key": INCLUDE_SUMMARY, - "label": _("Include summaries in MARC records (520 field)"), - "type": "select", - "options": [ - {"key": "false", "label": _("Do not include summaries")}, - {"key": "true", "label": _("Include summaries")}, - ], - "default": "false", - }, - { - "key": INCLUDE_SIMPLIFIED_GENRES, - "label": _( - "Include Palace Collection Manager genres in MARC records (650 fields)" - ), - "type": "select", - "options": [ - { - "key": "false", - "label": _("Do not include Palace Collection Manager genres"), - }, - {"key": "true", "label": _("Include Palace Collection Manager genres")}, - ], - "default": "false", - }, - ] + def __init__( + self, + _db: Session, + storage_service: S3Service, + ): + self._db = _db + self.storage_service = storage_service @classmethod - def from_config(cls, library): - _db = Session.object_session(library) - integration = ExternalIntegration.lookup( - _db, - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - library=library, + def label(cls) -> str: + return "MARC Export" + + @classmethod + def description(cls) -> str: + return ( + "Export metadata into MARC files that can be imported into an ILS manually." ) - if not integration: - raise CannotLoadConfiguration( - "No MARC export service is configured for this library" - ) - return cls(_db, library, integration) - def __init__(self, _db, library, integration): - self._db = _db - self.library = library - self.integration = integration + @classmethod + def settings_class(cls) -> type[MarcExporterSettings]: + return MarcExporterSettings @classmethod - def create_record(cls, work, annotator, force_create=False, integration=None): - """Build a complete MARC record for a given work.""" - if callable(annotator): - annotator = annotator() + def library_settings_class(cls) -> type[MarcExporterLibrarySettings]: + return MarcExporterLibrarySettings + @classmethod + def create_record( + cls, + revised: bool, + work: Work, + annotator: Annotator, + ) -> Record | None: + """Build a complete MARC record for a given work.""" pool = work.active_license_pool() if not pool: return None @@ -604,151 +700,127 @@ def create_record(cls, work, annotator, force_create=False, integration=None): edition = pool.presentation_edition identifier = pool.identifier - _db = Session.object_session(work) - - record = None - existing_record = getattr(work, annotator.marc_cache_field) - if existing_record and not force_create: - record = Record(data=existing_record.encode("utf-8"), force_utf8=True) - - if not record: - record = Record(leader=annotator.leader(work), force_utf8=True) - annotator.add_control_fields(record, identifier, pool, edition) - annotator.add_isbn(record, identifier) - - # TODO: The 240 and 130 fields are for translated works, so they can be grouped even - # though they have different titles. We do not group editions of the same work in - # different languages, so we can't use those yet. - - annotator.add_title(record, edition) - annotator.add_contributors(record, edition) - annotator.add_publisher(record, edition) - annotator.add_physical_description(record, edition) - annotator.add_audience(record, work) - annotator.add_series(record, edition) - annotator.add_system_details(record) - annotator.add_ebooks_subject(record) - - data = record.as_marc() - setattr(work, annotator.marc_cache_field, data.decode("utf8")) - - # Add additional fields that should not be cached. - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration - ) - return record + return annotator.annotate_work_record(revised, work, pool, edition, identifier) - def _file_key(self, library, lane, end_time, start_time=None): - """The path to the hosted MARC file for the given library, lane, - and date range.""" - root = library.short_name - if start_time: - time_part = str(start_time) + "-" + str(end_time) - else: - time_part = str(end_time) - parts = [root, time_part, lane.display_name] - return "/".join(parts) + ".mrc" + @staticmethod + def _date_to_string(date: datetime) -> str: + return date.astimezone(pytz.UTC).strftime("%Y-%m-%d") - def records( + def _file_key( self, - lane, - annotator, - storage_service: Optional[S3Service], - start_time=None, - force_refresh=False, - search_engine=None, - query_batch_size=500, - ): - """ - Create and export a MARC file for the books in a lane. - - :param lane: The Lane to export books from. - :param annotator: The Annotator to use when creating MARC records. - :param storage_service: The storage service integration to use for MARC files. - :param start_time: Only include records that were created or modified after this time. - :param force_refresh: Create new records even when cached records are available. - :param query_batch_size: Number of works to retrieve with a single Opensearch query. - """ + uuid: UUID, + library: Library, + collection: Collection, + creation_time: datetime, + since_time: datetime | None = None, + ) -> str: + """The path to the hosted MARC file for the given library, collection, + and date range.""" + root = "marc" + short_name = str(library.short_name) + creation = self._date_to_string(creation_time) + + if since_time: + file_type = f"delta.{self._date_to_string(since_time)}.{creation}" + else: + file_type = f"full.{creation}" - # We store the content, if it's not empty. If it's empty, we create a CachedMARCFile - # and Representation, but don't actually store it. - if storage_service is None: - raise Exception("No storage service is configured") + uuid_encoded = uuid_encode(uuid) + collection_name = collection.name.replace(" ", "_") + filename = f"{collection_name}.{file_type}.{uuid_encoded}.mrc" + parts = [root, short_name, filename] + return "/".join(parts) - search_engine = search_engine or ExternalSearchIndex(self._db) + def query_works( + self, + collection: Collection, + since_time: datetime | None, + creation_time: datetime, + batch_size: int, + ) -> ScalarResult: + query = ( + select(Work) + .join(LicensePool) + .join(Collection) + .where( + Collection.id == collection.id, + Work.last_update_time <= creation_time, + ) + ) - # End time is before we start the query, because if any records are changed - # during the processing we may not catch them, and they should be handled - # again on the next run. - end_time = utc_now() + if since_time is not None: + query = query.where(Work.last_update_time >= since_time) - facets = MARCExporterFacets(start_time=start_time) - pagination = SortKeyPagination(size=query_batch_size) + return self._db.execute(query).unique().yield_per(batch_size).scalars() - key = self._file_key(self.library, lane, end_time, start_time) + def records( + self, + library: Library, + collection: Collection, + annotator: Annotator, + *, + creation_time: datetime, + since_time: datetime | None = None, + batch_size: int = 500, + ) -> None: + """ + Create and export a MARC file for the books in a collection. + """ + uuid = uuid4() + key = self._file_key(uuid, library, collection, creation_time, since_time) - with storage_service.multipart( + with self.storage_service.multipart( key, content_type=Representation.MARC_MEDIA_TYPE, ) as upload: this_batch = BytesIO() - while pagination is not None: - # Retrieve one 'page' of works from the search index. - works = lane.works( - self._db, - pagination=pagination, - facets=facets, - search_engine=search_engine, + + works = self.query_works(collection, since_time, creation_time, batch_size) + for work in works: + # Create a record for each work and add it to the MARC file in progress. + record = self.create_record( + since_time is not None, + work, + annotator, ) - for work in works: - # Create a record for each work and add it to the - # MARC file in progress. - record = self.create_record( - work, annotator, force_refresh, self.integration - ) - if record: - record_bytes = record.as_marc() - this_batch.write(record_bytes) - if ( - this_batch.getbuffer().nbytes - >= self.MINIMUM_UPLOAD_BATCH_SIZE_BYTES - ): - # We've reached or exceeded the upload threshold. - # Upload one part of the multipart document. - self._upload_batch(this_batch, upload) - this_batch = BytesIO() - pagination = pagination.next_page + if record: + record_bytes = record.as_marc() + this_batch.write(record_bytes) + if ( + this_batch.getbuffer().nbytes + >= self.MINIMUM_UPLOAD_BATCH_SIZE_BYTES + ): + # We've reached or exceeded the upload threshold. + # Upload one part of the multipart document. + upload.upload_part(this_batch.getvalue()) + this_batch.seek(0) + this_batch.truncate() # Upload the final part of the multi-document, if # necessary. - self._upload_batch(this_batch, upload) # type: ignore[unreachable] + if this_batch.getbuffer().nbytes > 0: + upload.upload_part(this_batch.getvalue()) - representation, ignore = get_one_or_create( - self._db, - Representation, - url=upload.url, - media_type=Representation.MARC_MEDIA_TYPE, - ) - representation.fetched_at = end_time - if not upload.exception: - cached, is_new = get_one_or_create( + if upload.complete: + create( self._db, - CachedMARCFile, - library=self.library, - lane=(lane if isinstance(lane, Lane) else None), - start_time=start_time, - create_method_kwargs=dict(representation=representation), + MarcFile, + id=uuid, + library=library, + collection=collection, + created=creation_time, + since=since_time, + key=key, ) - if not is_new: - cached.representation = representation - cached.end_time = end_time - representation.set_as_mirrored(upload.url) else: - representation.mirror_exception = str(upload.exception) - - def _upload_batch(self, output: BytesIO, upload: MultipartS3ContextManager): - "Upload a batch of MARC records as one part of a multi-part upload." - content = output.getvalue() - if content: - upload.upload_part(content) - output.close() + if upload.exception: + # Log the exception and move on to the next file. We will try again next script run. + self.log.error( + f"Failed to upload MARC file for {library.short_name}/{collection.name}: {upload.exception}", + exc_info=upload.exception, + ) + else: + # There were no records to upload. This is not an error, but we should log it. + self.log.info( + f"No MARC records to upload for {library.short_name}/{collection.name}." + ) diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 57cabc141..9368a522d 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -10,7 +10,6 @@ import datetime import logging from collections import defaultdict -from typing import List, Optional from dateutil.parser import parse from dependency_injector.wiring import Provide, inject @@ -526,14 +525,14 @@ class LicenseData(LicenseFunctions): def __init__( self, identifier: str, - checkout_url: Optional[str], + checkout_url: str | None, status_url: str, status: LicenseStatus, checkouts_available: int, - expires: Optional[datetime.datetime] = None, - checkouts_left: Optional[int] = None, - terms_concurrency: Optional[int] = None, - content_types: Optional[List[str]] = None, + expires: datetime.datetime | None = None, + checkouts_left: int | None = None, + terms_concurrency: int | None = None, + content_types: list[str] | None = None, ): self.identifier = identifier self.checkout_url = checkout_url @@ -697,6 +696,7 @@ def __init__( links=None, licenses=None, last_checked=None, + should_track_playtime=False, ): """Constructor. @@ -747,6 +747,9 @@ def __init__( # instead of directly using the values that are given to CirculationData. self.licenses = licenses + # Whether the license should contain a playtime tracking link + self.should_track_playtime = should_track_playtime + @property def links(self): return self.__links @@ -877,6 +880,7 @@ def license_pool(self, _db, collection): license_pool.open_access = self.has_open_access_link license_pool.availability_time = self.last_checked license_pool.last_checked = self.last_checked + license_pool.should_track_playtime = self.should_track_playtime return license_pool, is_new @@ -969,6 +973,7 @@ def apply( # with the book reflect the formats in self.formats. old_lpdms = new_lpdms = [] if pool: + pool.should_track_playtime = self.should_track_playtime old_lpdms = list(pool.delivery_mechanisms) # Before setting and unsetting delivery mechanisms, which may diff --git a/core/migration/migrate_external_integration.py b/core/migration/migrate_external_integration.py index 56e82a5a7..fdc0be846 100644 --- a/core/migration/migrate_external_integration.py +++ b/core/migration/migrate_external_integration.py @@ -1,9 +1,13 @@ import json from collections import defaultdict -from typing import Dict, Tuple, Type, TypeVar +from typing import Any, TypeVar from sqlalchemy.engine import Connection, CursorResult, Row +from core.integration.base import ( + HasIntegrationConfiguration, + HasLibraryIntegrationConfiguration, +) from core.integration.settings import ( BaseSettings, ConfigurationFormItemType, @@ -15,7 +19,7 @@ def _validate_and_load_settings( - settings_class: Type[T], settings_dict: Dict[str, str] + settings_class: type[T], settings_dict: dict[str, str] ) -> T: aliases = { f.alias: f.name @@ -43,14 +47,14 @@ def _validate_and_load_settings( def get_configuration_settings( connection: Connection, integration: Row, -) -> Tuple[Dict, Dict, str]: +) -> tuple[dict[str, str], dict[str, dict[str, str]], str]: settings = connection.execute( "select cs.library_id, cs.key, cs.value from configurationsettings cs " "where cs.external_integration_id = (%s)", (integration.id,), ) settings_dict = {} - library_settings: Dict[str, Dict[str, str]] = defaultdict(dict) + library_settings: dict[str, dict[str, str]] = defaultdict(dict) self_test_results = json_serializer({}) for setting in settings: if not setting.value: @@ -68,39 +72,43 @@ def get_configuration_settings( def _migrate_external_integration( connection: Connection, - integration: Row, - protocol_class: Type, + name: str, + protocol: str, + protocol_class: type[HasIntegrationConfiguration[BaseSettings]], goal: str, - settings_dict: Dict, + settings_dict: dict[str, Any], self_test_results: str, - name=None, + context: dict[str, Any] | None = None, ) -> int: # Load and validate the settings before storing them in the database. settings_class = protocol_class.settings_class() settings_obj = _validate_and_load_settings(settings_class, settings_dict) integration_configuration = connection.execute( "insert into integration_configurations " - "(protocol, goal, name, settings, self_test_results) " - "values (%s, %s, %s, %s, %s)" + "(protocol, goal, name, settings, context, self_test_results) " + "values (%s, %s, %s, %s, %s, %s)" "returning id", ( - integration.protocol, + protocol, goal, - name or integration.name, + name, json_serializer(settings_obj.dict()), + json_serializer(context or {}), self_test_results, ), ).fetchone() assert integration_configuration is not None - return integration_configuration[0] + return integration_configuration[0] # type: ignore[no-any-return] def _migrate_library_settings( connection: Connection, integration_id: int, library_id: int, - library_settings: Dict[str, str], - protocol_class: Type, + library_settings: dict[str, str], + protocol_class: type[ + HasLibraryIntegrationConfiguration[BaseSettings, BaseSettings] + ], ) -> None: library_settings_class = protocol_class.library_settings_class() library_settings_obj = _validate_and_load_settings( diff --git a/core/migration/util.py b/core/migration/util.py index 0085ce6a9..dc02ee49b 100644 --- a/core/migration/util.py +++ b/core/migration/util.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, List +import logging +from typing import Any import sqlalchemy as sa @@ -10,8 +11,8 @@ def pg_update_enum( table: str, column: str, enum_name: str, - old_values: List[str], - new_values: List[str], + old_values: list[str], + new_values: list[str], ) -> None: """ Alembic migration helper function to update an enum type. @@ -64,3 +65,15 @@ def drop_enum(op: Any, enum_name: str, checkfirst: bool = True) -> None: Alembic migration helper function to drop an enum type. """ sa.Enum(name=enum_name).drop(op.get_bind(), checkfirst=checkfirst) + + +def migration_logger(revision: str) -> logging.Logger: + """ + Create a logger for a migration revision. + + This logger will be used to log messages during the migration. + """ + log = logging.getLogger(f"palace.migration.{revision}") + log.setLevel(logging.INFO) + log.disabled = False + return log diff --git a/core/model/__init__.py b/core/model/__init__.py index 38888ca4a..57333ca83 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -3,7 +3,8 @@ import json import logging import os -from typing import Any, Generator, List, Literal, Tuple, Type, TypeVar +from collections.abc import Generator +from typing import Any, List, Literal, Tuple, Type, TypeVar, Union from contextlib2 import contextmanager from psycopg2.extensions import adapt as sqlescape @@ -11,7 +12,7 @@ from pydantic.json import pydantic_encoder from sqlalchemy import create_engine from sqlalchemy.engine import Connection -from sqlalchemy.exc import IntegrityError +from sqlalchemy.exc import DatabaseError, IntegrityError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound @@ -54,6 +55,13 @@ def pg_advisory_lock( connection.execute(text(f"SELECT pg_advisory_lock({lock_id});")) try: yield + except IntegrityError: + # If there was an IntegrityError, and we are in a transaction, + # we need to roll it back before we are able to release the lock. + transaction = connection.get_transaction() + if transaction is not None: + transaction.rollback() + raise finally: # Close the lock connection.execute(text(f"SELECT pg_advisory_unlock({lock_id});")) @@ -78,8 +86,8 @@ def flush(db): def create( - db: Session, model: Type[T], create_method="", create_method_kwargs=None, **kwargs -) -> Tuple[T, Literal[True]]: + db: Session, model: type[T], create_method="", create_method_kwargs=None, **kwargs +) -> tuple[T, Literal[True]]: kwargs.update(create_method_kwargs or {}) created = getattr(model, create_method, model)(**kwargs) db.add(created) @@ -88,7 +96,7 @@ def create( def get_one( - db: Session, model: Type[T], on_multiple="error", constraint=None, **kwargs + db: Session, model: type[T], on_multiple="error", constraint=None, **kwargs ) -> T | None: """Gets an object from the database based on its attributes. @@ -124,8 +132,8 @@ def get_one( def get_one_or_create( - db: Session, model: Type[T], create_method="", create_method_kwargs=None, **kwargs -) -> Tuple[T, bool]: + db: Session, model: type[T], create_method="", create_method_kwargs=None, **kwargs +) -> tuple[T, bool]: one = get_one(db, model, **kwargs) if one: return one, False @@ -204,13 +212,13 @@ class PresentationCalculationPolicy: def __init__( self, + *, choose_edition=True, set_edition_metadata=True, classify=True, choose_summary=True, calculate_quality=True, choose_cover=True, - regenerate_marc_record=False, update_search_index=False, verbose=True, equivalent_identifier_levels=DEFAULT_LEVELS, @@ -231,8 +239,6 @@ def __init__( quality of the Work? :param choose_cover: Should we reconsider which of the available cover images is the best? - :param regenerate_marc_record: Should we regenerate the MARC record - for this Work? :param update_search_index: Should we reindex this Work's entry in the search index? :param verbose: Should we print out information about the work we're @@ -265,11 +271,6 @@ def __init__( self.calculate_quality = calculate_quality self.choose_cover = choose_cover - # Regenerate MARC records, except that they will - # never be generated unless a MARC organization code is set - # in a sitewide configuration setting. - self.regenerate_marc_record = regenerate_marc_record - # Similarly for update_search_index. self.update_search_index = update_search_index @@ -285,7 +286,6 @@ def recalculate_everything(cls): everything, even when it doesn't seem necessary. """ return PresentationCalculationPolicy( - regenerate_marc_record=True, update_search_index=True, ) @@ -346,8 +346,8 @@ def engine(cls, url=None): @classmethod def setup_event_listener( - cls, session: Union[Session, sessionmaker] - ) -> Union[Session, sessionmaker]: + cls, session: Session | sessionmaker + ) -> Session | sessionmaker: event.listen(session, "before_flush", Listener.before_flush_event_listener) return session @@ -483,7 +483,7 @@ def __init__( self.bulk_method = bulk_method self.bulk_method_kwargs = bulk_method_kwargs or {} self.batch_size = batch_size - self._objects: List[Base] = [] + self._objects: list[Base] = [] def __enter__(self): return self @@ -515,7 +515,6 @@ def _bulk_operation(self): SAMLFederation, ) from core.model.admin import Admin, AdminRole -from core.model.cachedfeed import CachedMARCFile from core.model.circulationevent import CirculationEvent from core.model.classification import Classification, Genre, Subject from core.model.collection import ( @@ -554,6 +553,7 @@ def _bulk_operation(self): RightsStatus, ) from core.model.listeners import * +from core.model.marcfile import MarcFile from core.model.measurement import Measurement from core.model.patron import ( Annotation, diff --git a/core/model/admin.py b/core/model/admin.py index c9db3cce5..c286cbf05 100644 --- a/core/model/admin.py +++ b/core/model/admin.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING import bcrypt from flask_babel import lazy_gettext as _ @@ -40,7 +40,7 @@ class Admin(Base, HasSessionCache): password_hashed = Column(Unicode, index=True) # An Admin may have many roles. - roles: Mapped[List[AdminRole]] = relationship( + roles: Mapped[list[AdminRole]] = relationship( "AdminRole", backref="admin", cascade="all, delete-orphan", uselist=True ) diff --git a/core/model/announcements.py b/core/model/announcements.py index fae3c6dda..c263daec1 100644 --- a/core/model/announcements.py +++ b/core/model/announcements.py @@ -3,7 +3,7 @@ import dataclasses import datetime import uuid -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING from sqlalchemy import Column, Date, ForeignKey, Integer, Unicode, select from sqlalchemy.dialects.postgresql import UUID @@ -52,7 +52,7 @@ def library_announcements(cls, library: Library) -> Select: @classmethod def authentication_document_announcements( cls, library: Library - ) -> List[Dict[str, str]]: + ) -> list[dict[str, str]]: db = Session.object_session(library) today_local = datetime.date.today() query = ( @@ -69,7 +69,7 @@ def authentication_document_announcements( @classmethod def from_data( - cls, db: Session, data: AnnouncementData, library: Optional[Library] = None + cls, db: Session, data: AnnouncementData, library: Library | None = None ) -> Announcement: created, _ = create( db, @@ -86,9 +86,9 @@ def from_data( def sync( cls, db: Session, - existing: List[Announcement], - new: Dict[uuid.UUID, AnnouncementData], - library: Optional[Library] = None, + existing: list[Announcement], + new: dict[uuid.UUID, AnnouncementData], + library: Library | None = None, ) -> None: """ Synchronize the existing announcements with the new announcements, creating any new announcements @@ -140,9 +140,9 @@ class AnnouncementData: content: str start: datetime.date finish: datetime.date - id: Optional[uuid.UUID] = None + id: uuid.UUID | None = None - def as_dict(self) -> Dict[str, str]: + def as_dict(self) -> dict[str, str]: date_format = "%Y-%m-%d" return_dict = { "content": self.content, diff --git a/core/model/before_flush_decorator.py b/core/model/before_flush_decorator.py index 46b296f5b..4a2c706c8 100644 --- a/core/model/before_flush_decorator.py +++ b/core/model/before_flush_decorator.py @@ -1,20 +1,14 @@ from __future__ import annotations -import sys +from collections.abc import Callable from copy import copy from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, ParamSpec from sqlalchemy.orm import Session from sqlalchemy.orm.unitofwork import UOWTransaction -# TODO: Remove this when we drop support for Python 3.9 -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - if TYPE_CHECKING: from core.model import Base @@ -39,7 +33,7 @@ class Listeners: """ # Tuple of models that the listener is registered for. - models: Tuple[Type[Base], ...] + models: tuple[type[Base], ...] # State that the listener is registered for. state: ListenerState # If True, the listener will only be called once. @@ -50,11 +44,11 @@ class Listeners: one_shot_triggered: bool = False def __init__(self): - self._listeners: List[BeforeFlushListener.Listeners] = [] + self._listeners: list[BeforeFlushListener.Listeners] = [] def before_flush( self, - model: Type[Base] | Tuple[Type[Base], ...], + model: type[Base] | tuple[type[Base], ...], state: ListenerState = ListenerState.any, one_shot: bool = False, ) -> Callable[[Callable[P, None]], Callable[P, None]]: @@ -91,8 +85,8 @@ def _invoke_listeners( cls, listening_for: ListenerState, session: Session, - listeners: List[BeforeFlushListener.Listeners], - instance_filter: Optional[Callable[[Session, Base], bool]] = None, + listeners: list[BeforeFlushListener.Listeners], + instance_filter: Callable[[Session, Base], bool] | None = None, ) -> None: """ Invoke the listeners for the given state. @@ -136,8 +130,8 @@ def _invoke_listeners( def before_flush_event_listener( self, session: Session, - _flush_context: Optional[UOWTransaction] = None, - _instances: Optional[List[object]] = None, + _flush_context: UOWTransaction | None = None, + _instances: list[object] | None = None, ) -> None: """ SQLAlchemy event listener that is called before a flush. This is where we invoke the listeners that have been diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py deleted file mode 100644 index a6603caa8..000000000 --- a/core/model/cachedfeed.py +++ /dev/null @@ -1,36 +0,0 @@ -# Cached Marc Files -from __future__ import annotations - -from typing import TYPE_CHECKING - -from sqlalchemy import Column, DateTime, ForeignKey, Integer -from sqlalchemy.orm import Mapped, relationship - -from core.model import Base - -if TYPE_CHECKING: - from core.model import Representation - - -class CachedMARCFile(Base): - """A record that a MARC file has been created and cached for a particular lane.""" - - __tablename__ = "cachedmarcfiles" - id = Column(Integer, primary_key=True) - - # Every MARC file is associated with a library and a lane. If the - # lane is null, the file is for the top-level WorkList. - library_id = Column(Integer, ForeignKey("libraries.id"), nullable=False, index=True) - - lane_id = Column(Integer, ForeignKey("lanes.id"), nullable=True, index=True) - - # The representation for this file stores the URL where it was mirrored. - representation_id = Column( - Integer, ForeignKey("representations.id"), nullable=False - ) - representation: Mapped[Representation] = relationship( - "Representation", back_populates="marc_file" - ) - - start_time = Column(DateTime(timezone=True), nullable=True, index=True) - end_time = Column(DateTime(timezone=True), nullable=True, index=True) diff --git a/core/model/classification.py b/core/model/classification.py index d4d406716..4e94fbe23 100644 --- a/core/model/classification.py +++ b/core/model/classification.py @@ -2,7 +2,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from sqlalchemy import ( Boolean, @@ -146,7 +146,7 @@ class Subject(Base): checked = Column(Boolean, default=False, index=True) # One Subject may participate in many Classifications. - classifications: Mapped[List[Classification]] = relationship( + classifications: Mapped[list[Classification]] = relationship( "Classification", back_populates="subject" ) @@ -350,11 +350,11 @@ class Classification(Base): __tablename__ = "classifications" id = Column(Integer, primary_key=True) identifier_id = Column(Integer, ForeignKey("identifiers.id"), index=True) - identifier: Mapped[Optional[Identifier]] + identifier: Mapped[Identifier | None] subject_id = Column(Integer, ForeignKey("subjects.id"), index=True) subject: Mapped[Subject] = relationship("Subject", back_populates="classifications") data_source_id = Column(Integer, ForeignKey("datasources.id"), index=True) - data_source: Mapped[Optional[DataSource]] + data_source: Mapped[DataSource | None] # How much weight the data source gives to this classification. weight = Column(Integer) @@ -485,12 +485,12 @@ class Genre(Base, HasSessionCache): name = Column(Unicode, unique=True, index=True) # One Genre may have affinity with many Subjects. - subjects: Mapped[List[Subject]] = relationship("Subject", backref="genre") + subjects: Mapped[list[Subject]] = relationship("Subject", backref="genre") # One Genre may participate in many WorkGenre assignments. works = association_proxy("work_genres", "work") - work_genres: Mapped[List[WorkGenre]] = relationship( + work_genres: Mapped[list[WorkGenre]] = relationship( "WorkGenre", backref="genre", cascade="all, delete-orphan" ) diff --git a/core/model/collection.py b/core/model/collection.py index 1e32de626..b9876ba4e 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -1,7 +1,7 @@ from __future__ import annotations -from abc import ABCMeta, abstractmethod -from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, TypeVar +from collections.abc import Generator +from typing import TYPE_CHECKING, Any, TypeVar from sqlalchemy import ( Boolean, @@ -9,17 +9,17 @@ ForeignKey, Integer, Table, - Unicode, UniqueConstraint, exists, + select, ) -from sqlalchemy.orm import Mapped, Query, backref, mapper, relationship -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.ext.associationproxy import association_proxy +from sqlalchemy.orm import Mapped, Query, mapper, relationship from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ from core.integration.goals import Goals -from core.model import Base, create, get_one_or_create +from core.model import Base, create from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.constants import EditionConstants from core.model.coverage import CoverageRecord @@ -28,10 +28,7 @@ from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property from core.model.identifier import Identifier -from core.model.integration import ( - IntegrationConfiguration, - IntegrationLibraryConfiguration, -) +from core.model.integration import IntegrationConfiguration from core.model.library import Library from core.model.licensing import LicensePool, LicensePoolDeliveryMechanism from core.model.work import Work @@ -51,34 +48,20 @@ class Collection(Base, HasSessionCache): __tablename__ = "collections" id = Column(Integer, primary_key=True, nullable=False) - name = Column(Unicode, unique=True, nullable=False, index=True) - DATA_SOURCE_NAME_SETTING = "data_source" - # For use in forms that edit Collections. - EXTERNAL_ACCOUNT_ID_KEY = "external_account_id" - - # How does the provider of this collection distinguish it from - # other collections it provides? On the other side this is usually - # called a "library ID". - external_account_id = Column(Unicode, nullable=True) - # How do we connect to the provider of this collection? Any url, # authentication information, or additional configuration goes # into the external integration, as does the 'protocol', which # designates the integration technique we will use to actually get # the metadata and licenses. Each Collection has a distinct - # ExternalIntegration. - external_integration_id = Column( - Integer, ForeignKey("externalintegrations.id"), unique=True, index=True - ) - _external_integration: ExternalIntegration - + # integration configuration. integration_configuration_id = Column( Integer, - ForeignKey("integration_configurations.id", ondelete="SET NULL"), + ForeignKey("integration_configurations.id"), unique=True, index=True, + nullable=False, ) integration_configuration: Mapped[IntegrationConfiguration] = relationship( "IntegrationConfiguration", @@ -94,31 +77,30 @@ class Collection(Base, HasSessionCache): # secret as the Overdrive collection, but it has a distinct # external_account_id. parent_id = Column(Integer, ForeignKey("collections.id"), index=True) - # SQLAlchemy will create a Collection-typed field called "parent". - parent: Collection - - # When deleting a collection, this flag is set to True so that the deletion - # script can take care of deleting it in the background. This is - # useful for deleting large collections which can timeout when deleting. - marked_for_deletion = Column(Boolean, default=False) + parent: Collection = relationship( + "Collection", remote_side=[id], back_populates="children" + ) # A collection may have many child collections. For example, # An Overdrive collection may have many children corresponding # to Overdrive Advantage collections. - children: Mapped[List[Collection]] = relationship( - "Collection", backref=backref("parent", remote_side=[id]), uselist=True + children: Mapped[list[Collection]] = relationship( + "Collection", back_populates="parent", uselist=True ) + # When deleting a collection, this flag is set to True so that the deletion + # script can take care of deleting it in the background. This is + # useful for deleting large collections which can timeout when deleting. + marked_for_deletion = Column(Boolean, default=False) + # A Collection can provide books to many Libraries. - libraries: Mapped[List[Library]] = relationship( - "Library", - secondary=lambda: collections_libraries, - backref="collections", - uselist=True, + # https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html#composite-association-proxies + libraries: Mapped[list[Library]] = association_proxy( + "integration_configuration", "libraries" ) # A Collection can include many LicensePools. - licensepools: Mapped[List[LicensePool]] = relationship( + licensepools: Mapped[list[LicensePool]] = relationship( "LicensePool", back_populates="collection", cascade="all, delete-orphan", @@ -126,23 +108,23 @@ class Collection(Base, HasSessionCache): ) # A Collection can have many associated Credentials. - credentials: Mapped[List[Credential]] = relationship( + credentials: Mapped[list[Credential]] = relationship( "Credential", back_populates="collection", cascade="delete" ) # A Collection can be monitored by many Monitors, each of which # will have its own Timestamp. - timestamps: Mapped[List[Timestamp]] = relationship( + timestamps: Mapped[list[Timestamp]] = relationship( "Timestamp", back_populates="collection" ) - catalog: Mapped[List[Identifier]] = relationship( + catalog: Mapped[list[Identifier]] = relationship( "Identifier", secondary=lambda: collections_identifiers, backref="collections" ) # A Collection can be associated with multiple CoverageRecords # for Identifiers in its catalog. - coverage_records: Mapped[List[CoverageRecord]] = relationship( + coverage_records: Mapped[list[CoverageRecord]] = relationship( "CoverageRecord", backref="collection", cascade="all" ) @@ -151,10 +133,12 @@ class Collection(Base, HasSessionCache): # also be added to the list. Admins can remove items from the # the list and they won't be added back, so the list doesn't # necessarily match the collection. - customlists: Mapped[List[CustomList]] = relationship( + customlists: Mapped[list[CustomList]] = relationship( "CustomList", secondary=lambda: collections_customlists, backref="collections" ) + export_marc_records = Column(Boolean, default=False, nullable=False) + # Most data sources offer different catalogs to different # libraries. Data sources in this list offer the same catalog to # every library. @@ -163,13 +147,13 @@ class Collection(Base, HasSessionCache): def __repr__(self) -> str: return f'' - def cache_key(self) -> Tuple[str | None, str | None]: - return self.name, self.external_integration.protocol + def cache_key(self) -> tuple[str | None, str | None]: + return self.name, self.integration_configuration.protocol @classmethod def by_name_and_protocol( cls, _db: Session, name: str, protocol: str - ) -> Tuple[Collection, bool]: + ) -> tuple[Collection, bool]: """Find or create a Collection with the given name and the given protocol. @@ -179,15 +163,15 @@ def by_name_and_protocol( """ key = (name, protocol) - def lookup_hook() -> Tuple[Collection, bool]: + def lookup_hook() -> tuple[Collection, bool]: return cls._by_name_and_protocol(_db, key) return cls.by_cache_key(_db, key, lookup_hook) @classmethod def _by_name_and_protocol( - cls, _db: Session, cache_key: Tuple[str, str] - ) -> Tuple[Collection, bool]: + cls, _db: Session, cache_key: tuple[str, str] + ) -> tuple[Collection, bool]: """Find or create a Collection with the given name and the given protocol. @@ -198,25 +182,55 @@ def _by_name_and_protocol( """ name, protocol = cache_key - qu = cls.by_protocol(_db, protocol) - qu = qu.filter(Collection.name == name) - try: - collection = qu.one() + query = select(IntegrationConfiguration).where( + IntegrationConfiguration.name == name + ) + integration_or_none = _db.execute(query).scalar_one_or_none() + if integration_or_none is None: + integration, _ = create( + _db, + IntegrationConfiguration, + protocol=protocol, + goal=Goals.LICENSE_GOAL, + name=name, + ) + else: + integration = integration_or_none + + if integration.goal != Goals.LICENSE_GOAL: + raise ValueError( + f'Integration "{name}" does not have goal "{Goals.LICENSE_GOAL.name}".' + ) + if integration.protocol != protocol: + raise ValueError( + f'Integration "{name}" does not use protocol "{protocol}".' + ) + + if integration.collection is not None: + collection = integration.collection is_new = False - except NoResultFound as e: - # Make a new Collection. - collection, is_new = get_one_or_create(_db, Collection, name=name) - if not is_new and collection.protocol != protocol: - # The collection already exists, it just uses a different - # protocol than the one we asked about. - raise ValueError( - f'Collection "{name}" does not use protocol "{protocol}".' - ) - integration = collection.create_external_integration(protocol=protocol) - collection.external_integration.protocol = protocol - collection.create_integration_configuration(protocol) + else: + collection, _ = create( # type: ignore[unreachable] + _db, + Collection, + integration_configuration=integration, + ) + is_new = True + return collection, is_new + @classmethod + def by_name(cls, _db: Session, name: str) -> Collection | None: + """Find a Collection by name.""" + return _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.name == name, + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + ) + ).scalar_one_or_none() + @classmethod def by_protocol(cls, _db: Session, protocol: str | None) -> Query[Collection]: """Query collections that get their licenses through the given protocol. @@ -241,35 +255,17 @@ def by_protocol(cls, _db: Session, protocol: str | None) -> Query[Collection]: return qu - @classmethod - def by_datasource( - cls, _db: Session, data_source: DataSource | str - ) -> Query[Collection]: - """Query collections that are associated with the given DataSource. - - Collections marked for deletion are not included. - """ - data_source_name = ( - data_source.name if isinstance(data_source, DataSource) else data_source - ) - - qu = ( - _db.query(cls) - .join( - IntegrationConfiguration, - cls.integration_configuration_id == IntegrationConfiguration.id, - ) - .filter( - IntegrationConfiguration.settings_dict[ - Collection.DATA_SOURCE_NAME_SETTING - ].astext - == data_source_name - ) - .filter(Collection.marked_for_deletion == False) - ) - return qu + @property + def name(self) -> str: + """What is the name of this collection?""" + if self.integration_configuration is None: + raise ValueError("Collection has no integration configuration.") + name = self.integration_configuration.name + if not name: + raise ValueError("Collection has no name.") + return name - @hybrid_property + @property def protocol(self) -> str: """What protocol do we need to use to get licenses for this collection? @@ -296,23 +292,6 @@ def protocol(self, new_protocol: str) -> None: for child in self.children: child.protocol = new_protocol - @hybrid_property - def primary_identifier_source(self) -> str | None: - """Identify if should try to use another identifier than """ - return self.integration_configuration.settings_dict.get( - ExternalIntegration.PRIMARY_IDENTIFIER_SOURCE - ) - - @primary_identifier_source.setter - def primary_identifier_source(self, new_primary_identifier_source: str) -> None: - """Modify the primary identifier source in use by this Collection.""" - self.integration_configuration.settings_dict = ( - self.integration_configuration.settings_dict.copy() - ) - self.integration_configuration.settings_dict[ - ExternalIntegration.PRIMARY_IDENTIFIER_SOURCE - ] = new_primary_identifier_source - # For collections that can control the duration of the loans they # create, the durations are stored in these settings and new loans are # expected to be created using these settings. For collections @@ -347,16 +326,14 @@ def default_loan_period_setting( self, library: Library, medium: str = EditionConstants.BOOK_MEDIUM, - ) -> Optional[int]: + ) -> int | None: """Until we hear otherwise from the license provider, we assume that someone who borrows a non-open-access item from this collection has it for this number of days. """ key = self.loan_period_key(medium) - if library.id is None: - return None - config = self.integration_configuration.for_library(library.id) + config = self.integration_configuration.for_library(library) if config is None: return None @@ -413,78 +390,6 @@ def default_audience(self, new_value: str) -> None: """ self._set_settings(**{self.DEFAULT_AUDIENCE_KEY: str(new_value)}) - def create_external_integration(self, protocol: str) -> ExternalIntegration: - """Create an ExternalIntegration for this Collection. - - To be used immediately after creating a new Collection, - e.g. in by_name_and_protocol, from_metadata_identifier, and - various test methods that create mock Collections. - - If an external integration already exists, return it instead - of creating another one. - - :param protocol: The protocol known to be in use when getting - licenses for this collection. - """ - _db = Session.object_session(self) - goal = ExternalIntegration.LICENSE_GOAL - external_integration, is_new = get_one_or_create( - _db, - ExternalIntegration, - id=self.external_integration_id, - create_method_kwargs=dict(protocol=protocol, goal=goal), - ) - if external_integration.protocol != protocol: - raise ValueError( - "Located ExternalIntegration, but its protocol (%s) does not match desired protocol (%s)." - % (external_integration.protocol, protocol) - ) - self.external_integration_id = external_integration.id - return external_integration - - def create_integration_configuration( - self, protocol: str - ) -> IntegrationConfiguration: - _db = Session.object_session(self) - goal = Goals.LICENSE_GOAL - if self.integration_configuration_id: - integration = self.integration_configuration - else: - integration, is_new = create( - _db, - IntegrationConfiguration, - protocol=protocol, - goal=goal, - name=self.name, - ) - if integration.protocol != protocol: - raise ValueError( - "Located ExternalIntegration, but its protocol (%s) does not match desired protocol (%s)." - % (integration.protocol, protocol) - ) - self.integration_configuration_id = integration.id - # Immediately accessing the relationship fills out the data - return self.integration_configuration - - @property - def external_integration(self) -> ExternalIntegration: - """Find the external integration for this Collection, assuming - it already exists. - - This is generally a safe assumption since by_name_and_protocol and - from_metadata_identifier both create ExternalIntegrations for the - Collections they create. - """ - # We don't enforce this on the database level because it is - # legitimate for a newly created Collection to have no - # ExternalIntegration. But by the time it's being used for real, - # it needs to have one. - if not self.external_integration_id: - raise ValueError( - "No known external integration for collection %s" % self.name - ) - return self._external_integration - @hybrid_property def data_source(self) -> DataSource | None: """Find the data source associated with this Collection. @@ -545,47 +450,6 @@ def parents(self) -> Generator[Collection, None, None]: yield parent yield from parent.parents - def disassociate_library(self, library: Library) -> None: - """Disassociate a Library from this Collection and delete any relevant - ConfigurationSettings. - """ - if library is None or library not in self.libraries: - # No-op. - return - - _db = Session.object_session(self) - if self.external_integration_id: - qu = ( - _db.query(ConfigurationSetting) - .filter(ConfigurationSetting.library == library) - .filter( - ConfigurationSetting.external_integration - == self.external_integration - ) - ) - qu.delete() - else: - raise ValueError( - "No known external integration for collection %s" % self.name - ) - if self.integration_configuration_id: - qu = ( - _db.query(IntegrationLibraryConfiguration) - .filter(IntegrationLibraryConfiguration.library_id == library.id) - .filter( - IntegrationLibraryConfiguration.parent_id - == self.integration_configuration_id - ) - ) - qu.delete() - else: - raise ValueError( - "No known integration library configuration for collection %s" - % self.name - ) - - self.libraries.remove(library) - @property def pools_with_no_delivery_mechanisms(self) -> Query[LicensePool]: """Find all LicensePools in this Collection that have no delivery @@ -597,7 +461,7 @@ def pools_with_no_delivery_mechanisms(self) -> Query[LicensePool]: qu = LicensePool.with_no_delivery_mechanisms(_db) return qu.filter(LicensePool.collection == self) # type: ignore[no-any-return] - def explain(self, include_secrets: bool = False) -> List[str]: + def explain(self, include_secrets: bool = False) -> list[str]: """Create a series of human-readable strings to explain a collection's settings. @@ -616,8 +480,6 @@ def explain(self, include_secrets: bool = False) -> List[str]: lines.append('Protocol: "%s"' % integration.protocol) for library in self.libraries: lines.append('Used by library: "%s"' % library.short_name) - if self.external_account_id: - lines.append('External account ID: "%s"' % self.external_account_id) for name in sorted(integration.settings_dict): value = integration.settings_dict[name] if ( @@ -630,7 +492,7 @@ def explain(self, include_secrets: bool = False) -> List[str]: def restrict_to_ready_deliverable_works( cls, query: Query[T], - collection_ids: List[int] | None = None, + collection_ids: list[int] | None = None, show_suppressed: bool = False, allow_holds: bool = True, ) -> Query[T]: @@ -723,8 +585,7 @@ def delete(self, search_index: ExternalSearchIndex | None = None) -> None: _db = Session.object_session(self) # Disassociate all libraries from this collection. - for library in self.libraries: - self.disassociate_library(library) + self.libraries.clear() # Delete all the license pools. This should be the only part # of the application where LicensePools are permanently @@ -742,33 +603,11 @@ def delete(self, search_index: ExternalSearchIndex | None = None) -> None: _db.delete(pool) - # Delete the ExternalIntegration associated with this - # Collection, assuming it wasn't deleted already. - if self.external_integration: - _db.delete(self.external_integration) - # Now delete the Collection itself. _db.delete(self) _db.commit() -collections_libraries: Table = Table( - "collections_libraries", - Base.metadata, - Column( - "collection_id", - Integer, - ForeignKey("collections.id"), - index=True, - nullable=False, - ), - Column( - "library_id", Integer, ForeignKey("libraries.id"), index=True, nullable=False - ), - UniqueConstraint("collection_id", "library_id"), -) - - collections_identifiers: Table = Table( "collections_identifiers", Base.metadata, @@ -830,19 +669,3 @@ class CollectionMissing(Exception): ), UniqueConstraint("collection_id", "customlist_id"), ) - - -class HasExternalIntegrationPerCollection(metaclass=ABCMeta): - """Interface allowing to get access to an external integration""" - - @abstractmethod - def collection_external_integration( - self, collection: Optional[Collection] - ) -> ExternalIntegration: - """Returns an external integration associated with the collection - - :param collection: Collection - - :return: External integration associated with the collection - """ - raise NotImplementedError() diff --git a/core/model/configuration.py b/core/model/configuration.py index c68bca272..079b5222b 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -3,9 +3,8 @@ # ExternalIntegration, ExternalIntegrationLink, ConfigurationSetting import json import logging -from abc import ABCMeta, abstractmethod from enum import Enum -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from sqlalchemy import Column, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship @@ -42,10 +41,6 @@ class ExternalIntegration(Base): # to this are defined in the circulation manager. PATRON_AUTH_GOAL = "patron_auth" - # These integrations are associated with external services such - # as Overdrive which provide access to books. - LICENSE_GOAL = "licenses" - # These integrations are associated with external services such as # the metadata wrangler, which provide information about books, # but not the books themselves. @@ -67,10 +62,6 @@ class ExternalIntegration(Base): # collect logs of server-side events. LOGGING_GOAL = "logging" - # These integrations are associated with external services that - # a library uses to manage its catalog. - CATALOG_GOAL = "ils_catalog" - # Supported protocols for ExternalIntegrations with LICENSE_GOAL. OPDS_IMPORT = "OPDS Import" OPDS2_IMPORT = "OPDS 2.0 Import" @@ -125,9 +116,6 @@ class ExternalIntegration(Base): # Integrations with ANALYTICS_GOAL GOOGLE_ANALYTICS = "Google Analytics" - # Integrations with CATALOG_GOAL - MARC_EXPORT = "MARC Export" - # Keys for common configuration settings # If there is a special URL to use for access to this API, @@ -174,22 +162,14 @@ class ExternalIntegration(Base): # Any additional configuration information goes into # ConfigurationSettings. - settings: Mapped[List[ConfigurationSetting]] = relationship( + settings: Mapped[list[ConfigurationSetting]] = relationship( "ConfigurationSetting", back_populates="external_integration", cascade="all, delete", uselist=True, ) - # Any number of Collections may designate an ExternalIntegration - # as the source of their configuration - collections: Mapped[List[Collection]] = relationship( - "Collection", - backref="_external_integration", - foreign_keys="Collection.external_integration_id", - ) - - libraries: Mapped[List[Library]] = relationship( + libraries: Mapped[list[Library]] = relationship( "Library", back_populates="integrations", secondary=lambda: externalintegrations_libraries, @@ -331,24 +311,6 @@ def password(self): def password(self, new_password): return self.set_setting(self.PASSWORD, new_password) - @hybrid_property - def custom_accept_header(self): - return self.setting(self.CUSTOM_ACCEPT_HEADER).value - - @custom_accept_header.setter - def custom_accept_header(self, new_custom_accept_header): - return self.set_setting(self.CUSTOM_ACCEPT_HEADER, new_custom_accept_header) - - @hybrid_property - def primary_identifier_source(self): - return self.setting(self.PRIMARY_IDENTIFIER_SOURCE).value - - @primary_identifier_source.setter - def primary_identifier_source(self, new_primary_identifier_source): - return self.set_setting( - self.PRIMARY_IDENTIFIER_SOURCE, new_primary_identifier_source - ) - def explain(self, library=None, include_secrets=False): """Create a series of human-readable strings to explain an ExternalIntegration's settings. @@ -679,20 +641,6 @@ def excluded_audio_data_sources(cls, _db): return value -class HasExternalIntegration(metaclass=ABCMeta): - """Interface allowing to get access to an external integration""" - - @abstractmethod - def external_integration(self, db: Session) -> Optional[ExternalIntegration]: - """Returns an external integration associated with this object - - :param db: Database session - - :return: External integration associated with this object - """ - raise NotImplementedError() - - class ConfigurationAttributeValue(Enum): """Enumeration of common configuration attribute values""" diff --git a/core/model/contributor.py b/core/model/contributor.py index 00e2ebf93..1f73c519f 100644 --- a/core/model/contributor.py +++ b/core/model/contributor.py @@ -3,7 +3,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, List, Set +from typing import TYPE_CHECKING from sqlalchemy import Column, ForeignKey, Integer, Unicode, UniqueConstraint from sqlalchemy.dialects.postgresql import ARRAY, JSON @@ -52,9 +52,9 @@ class Contributor(Base): # provided by a publisher. biography = Column(Unicode) - extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + extra: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) - contributions: Mapped[List[Contribution]] = relationship( + contributions: Mapped[list[Contribution]] = relationship( "Contribution", back_populates="contributor", uselist=True ) @@ -93,7 +93,7 @@ class Contributor(Base): COPYRIGHT_HOLDER_ROLE = "Copyright holder" TRANSCRIBER_ROLE = "Transcriber" DESIGNER_ROLE = "Designer" - AUTHOR_ROLES: Set[str] = {PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE} + AUTHOR_ROLES: set[str] = {PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE} # Map our recognized roles to MARC relators. # https://www.loc.gov/marc/relators/relaterm.html diff --git a/core/model/coverage.py b/core/model/coverage.py index e1204fe9e..e3f422a1e 100644 --- a/core/model/coverage.py +++ b/core/model/coverage.py @@ -1,7 +1,7 @@ # BaseCoverageRecord, Timestamp, CoverageRecord, WorkCoverageRecord from __future__ import annotations -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from sqlalchemy import ( Column, @@ -614,7 +614,6 @@ class WorkCoverageRecord(Base, BaseCoverageRecord): CLASSIFY_OPERATION = "classify" SUMMARY_OPERATION = "summary" QUALITY_OPERATION = "quality" - GENERATE_MARC_OPERATION = "generate-marc" UPDATE_SEARCH_INDEX_OPERATION = "update-search-index" id = Column(Integer, primary_key=True) @@ -791,7 +790,7 @@ class EquivalencyCoverageRecord(Base, BaseCoverageRecord): def bulk_add( cls, _db, - equivalents: List[Equivalency], + equivalents: list[Equivalency], operation: str, status=BaseCoverageRecord.REGISTERED, batch_size=100, diff --git a/core/model/customlist.py b/core/model/customlist.py index bf44e00d5..5c2e8fa22 100644 --- a/core/model/customlist.py +++ b/core/model/customlist.py @@ -3,7 +3,7 @@ import logging from functools import total_ordering -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from sqlalchemy import ( Boolean, @@ -62,12 +62,12 @@ class CustomList(Base): # cached when the list contents change. size = Column(Integer, nullable=False, default=0) - entries: Mapped[List[CustomListEntry]] = relationship( + entries: Mapped[list[CustomListEntry]] = relationship( "CustomListEntry", backref="customlist", uselist=True ) # List sharing mechanisms - shared_locally_with_libraries: Mapped[List[Library]] = relationship( + shared_locally_with_libraries: Mapped[list[Library]] = relationship( "Library", secondary=lambda: customlist_sharedlibrary, back_populates="shared_custom_lists", @@ -81,7 +81,7 @@ class CustomList(Base): auto_update_status: Mapped[str] = Column(auto_update_status_enum, default=INIT) # type: ignore[assignment] # Typing specific - collections: List[Collection] + collections: list[Collection] library: Library __table_args__ = ( diff --git a/core/model/datasource.py b/core/model/datasource.py index 665fdddd4..085c18c46 100644 --- a/core/model/datasource.py +++ b/core/model/datasource.py @@ -2,7 +2,7 @@ from __future__ import annotations from collections import defaultdict -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING from urllib.parse import quote, unquote from sqlalchemy import Boolean, Column, Integer, String @@ -42,68 +42,68 @@ class DataSource(Base, HasSessionCache, DataSourceConstants): name = Column(String, unique=True, index=True) offers_licenses = Column(Boolean, default=False) primary_identifier_type = Column(String, index=True) - extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + extra: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) # One DataSource can generate many Editions. - editions: Mapped[List[Edition]] = relationship( + editions: Mapped[list[Edition]] = relationship( "Edition", back_populates="data_source", uselist=True ) # One DataSource can generate many CoverageRecords. - coverage_records: Mapped[List[CoverageRecord]] = relationship( + coverage_records: Mapped[list[CoverageRecord]] = relationship( "CoverageRecord", back_populates="data_source" ) # One DataSource can generate many IDEquivalencies. - id_equivalencies: Mapped[List[Equivalency]] = relationship( + id_equivalencies: Mapped[list[Equivalency]] = relationship( "Equivalency", backref="data_source" ) # One DataSource can grant access to many LicensePools. - license_pools: Mapped[List[LicensePool]] = relationship( + license_pools: Mapped[list[LicensePool]] = relationship( "LicensePool", back_populates="data_source", overlaps="delivery_mechanisms" ) # One DataSource can provide many Hyperlinks. - links: Mapped[List[Hyperlink]] = relationship("Hyperlink", backref="data_source") + links: Mapped[list[Hyperlink]] = relationship("Hyperlink", backref="data_source") # One DataSource can provide many Resources. - resources: Mapped[List[Resource]] = relationship("Resource", backref="data_source") + resources: Mapped[list[Resource]] = relationship("Resource", backref="data_source") # One DataSource can generate many Measurements. - measurements: Mapped[List[Measurement]] = relationship( + measurements: Mapped[list[Measurement]] = relationship( "Measurement", back_populates="data_source" ) # One DataSource can provide many Classifications. - classifications: Mapped[List[Classification]] = relationship( + classifications: Mapped[list[Classification]] = relationship( "Classification", backref="data_source" ) # One DataSource can have many associated Credentials. - credentials: Mapped[List[Credential]] = relationship( + credentials: Mapped[list[Credential]] = relationship( "Credential", back_populates="data_source" ) # One DataSource can generate many CustomLists. - custom_lists: Mapped[List[CustomList]] = relationship( + custom_lists: Mapped[list[CustomList]] = relationship( "CustomList", back_populates="data_source" ) # One DataSource can provide many LicensePoolDeliveryMechanisms. - delivery_mechanisms: Mapped[List[LicensePoolDeliveryMechanism]] = relationship( + delivery_mechanisms: Mapped[list[LicensePoolDeliveryMechanism]] = relationship( "LicensePoolDeliveryMechanism", backref="data_source", foreign_keys=lambda: [LicensePoolDeliveryMechanism.data_source_id], ) - license_lanes: Mapped[List[Lane]] = relationship( + license_lanes: Mapped[list[Lane]] = relationship( "Lane", back_populates="license_datasource", foreign_keys="Lane.license_datasource_id", ) - list_lanes: Mapped[List[Lane]] = relationship( + list_lanes: Mapped[list[Lane]] = relationship( "Lane", back_populates="_list_datasource", foreign_keys="Lane._list_datasource_id", diff --git a/core/model/devicetokens.py b/core/model/devicetokens.py index b324b6561..faab4b93e 100644 --- a/core/model/devicetokens.py +++ b/core/model/devicetokens.py @@ -1,5 +1,4 @@ import sys -from typing import Union from sqlalchemy import Column, Enum, ForeignKey, Index, Integer, Unicode from sqlalchemy.exc import IntegrityError @@ -55,7 +54,7 @@ def create( db, token_type: str, device_token: str, - patron: Union[Patron, int], + patron: Patron | int, ) -> Self: """Create a DeviceToken while ensuring sql issues are managed. Raises InvalidTokenTypeError, DuplicateDeviceTokenError""" diff --git a/core/model/edition.py b/core/model/edition.py index af435a769..a83f8f7ea 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -3,7 +3,7 @@ import logging from collections import defaultdict -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING from sqlalchemy import ( Column, @@ -93,12 +93,12 @@ class Edition(Base, EditionConstants): ) # An Edition may show up in many CustomListEntries. - custom_list_entries: Mapped[List[CustomListEntry]] = relationship( + custom_list_entries: Mapped[list[CustomListEntry]] = relationship( "CustomListEntry", backref="edition" ) # An Edition may be the presentation edition for many LicensePools. - is_presentation_for: Mapped[List[LicensePool]] = relationship( + is_presentation_for: Mapped[list[LicensePool]] = relationship( "LicensePool", backref="presentation_edition" ) @@ -117,7 +117,7 @@ class Edition(Base, EditionConstants): author = Column(Unicode, index=True) sort_author = Column(Unicode, index=True) - contributions: Mapped[List[Contribution]] = relationship( + contributions: Mapped[list[Contribution]] = relationship( "Contribution", back_populates="edition", uselist=True ) @@ -151,7 +151,7 @@ class Edition(Base, EditionConstants): cover_thumbnail_url = Column(Unicode) # Information kept in here probably won't be used. - extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + extra: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) def __repr__(self): id_repr = repr(self.primary_identifier) diff --git a/core/model/formats.py b/core/model/formats.py index ee12b8d80..e49d47518 100644 --- a/core/model/formats.py +++ b/core/model/formats.py @@ -1,5 +1,5 @@ import sys -from typing import List, Mapping, Optional +from collections.abc import Mapping from flask_babel import lazy_gettext as _ @@ -26,14 +26,14 @@ class FormatPriorities: _prioritized_drm_schemes: Mapping[str, int] _prioritized_content_types: Mapping[str, int] - _hidden_content_types: List[str] + _hidden_content_types: list[str] _deprioritize_lcp_non_epubs: bool def __init__( self, - prioritized_drm_schemes: List[str], - prioritized_content_types: List[str], - hidden_content_types: List[str], + prioritized_drm_schemes: list[str], + prioritized_content_types: list[str], + hidden_content_types: list[str], deprioritize_lcp_non_epubs: bool, ): """ @@ -58,7 +58,7 @@ def __init__( def prioritize_for_pool( self, pool: LicensePool - ) -> List[LicensePoolDeliveryMechanism]: + ) -> list[LicensePoolDeliveryMechanism]: """ Filter and prioritize the delivery mechanisms in the given pool. :param pool: The license pool @@ -67,8 +67,8 @@ def prioritize_for_pool( return self.prioritize_mechanisms(pool.delivery_mechanisms) def prioritize_mechanisms( - self, mechanisms: List[LicensePoolDeliveryMechanism] - ) -> List[LicensePoolDeliveryMechanism]: + self, mechanisms: list[LicensePoolDeliveryMechanism] + ) -> list[LicensePoolDeliveryMechanism]: """ Filter and prioritize the delivery mechanisms in the given pool. :param mechanisms: The list of delivery mechanisms @@ -76,7 +76,7 @@ def prioritize_mechanisms( """ # First, filter out all hidden content types. - mechanisms_filtered: List[LicensePoolDeliveryMechanism] = [] + mechanisms_filtered: list[LicensePoolDeliveryMechanism] = [] for delivery in mechanisms: delivery_mechanism = delivery.delivery_mechanism if delivery_mechanism: @@ -115,7 +115,7 @@ def prioritize_mechanisms( @staticmethod def _artificial_lcp_content_priority( - drm_scheme: Optional[str], content_type: Optional[str] + drm_scheme: str | None, content_type: str | None ) -> int: """A comparison function that arbitrarily deflates the priority of LCP content. The comparison function treats all other DRM mechanisms and content types as equal.""" @@ -127,7 +127,7 @@ def _artificial_lcp_content_priority( else: return 0 - def _drm_scheme_priority(self, drm_scheme: Optional[str]) -> int: + def _drm_scheme_priority(self, drm_scheme: str | None) -> int: """Determine the priority of a DRM scheme. A lack of DRM is always prioritized over having DRM, and prioritized schemes are always higher priority than non-prioritized schemes.""" @@ -143,7 +143,7 @@ def _content_type_priority(self, content_type: str) -> int: class FormatPrioritiesSettings(BaseSettings): - prioritized_drm_schemes: Optional[list] = FormField( + prioritized_drm_schemes: list | None = FormField( default=[], form=ConfigurationFormItem( label=_("Prioritized DRM schemes"), @@ -165,7 +165,7 @@ class FormatPrioritiesSettings(BaseSettings): ), ) - prioritized_content_types: Optional[list] = FormField( + prioritized_content_types: list | None = FormField( default=[], form=ConfigurationFormItem( label=_("Prioritized content types"), @@ -187,7 +187,7 @@ class FormatPrioritiesSettings(BaseSettings): ), ) - deprioritize_lcp_non_epubs: Optional[str] = FormField( + deprioritize_lcp_non_epubs: str | None = FormField( default="false", form=ConfigurationFormItem( label=_("De-prioritize LCP non-EPUBs"), diff --git a/core/model/hassessioncache.py b/core/model/hassessioncache.py index f17fa48bf..cd91f7750 100644 --- a/core/model/hassessioncache.py +++ b/core/model/hassessioncache.py @@ -5,8 +5,9 @@ import sys from abc import abstractmethod from collections import namedtuple +from collections.abc import Callable, Hashable from types import SimpleNamespace -from typing import Callable, Hashable, TypeVar +from typing import TypeVar from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Mapped, Session diff --git a/core/model/identifier.py b/core/model/identifier.py index 35245e373..7a687255c 100644 --- a/core/model/identifier.py +++ b/core/model/identifier.py @@ -7,7 +7,7 @@ from abc import ABCMeta, abstractmethod from collections import defaultdict from functools import total_ordering -from typing import TYPE_CHECKING, List, overload +from typing import TYPE_CHECKING, overload from urllib.parse import quote, unquote import isbnlib @@ -242,7 +242,7 @@ class Identifier(Base, IdentifierConstants): type = Column(String(64), index=True) identifier = Column(String, index=True) - equivalencies: Mapped[List[Equivalency]] = relationship( + equivalencies: Mapped[list[Equivalency]] = relationship( "Equivalency", foreign_keys="Equivalency.input_id", back_populates="input", @@ -250,7 +250,7 @@ class Identifier(Base, IdentifierConstants): uselist=True, ) - inbound_equivalencies: Mapped[List[Equivalency]] = relationship( + inbound_equivalencies: Mapped[list[Equivalency]] = relationship( "Equivalency", foreign_keys="Equivalency.output_id", back_populates="output", @@ -259,7 +259,7 @@ class Identifier(Base, IdentifierConstants): ) # One Identifier may have many associated CoverageRecords. - coverage_records: Mapped[List[CoverageRecord]] = relationship( + coverage_records: Mapped[list[CoverageRecord]] = relationship( "CoverageRecord", back_populates="identifier" ) @@ -273,13 +273,13 @@ def __repr__(self): # One Identifier may serve as the primary identifier for # several Editions. - primarily_identifies: Mapped[List[Edition]] = relationship( + primarily_identifies: Mapped[list[Edition]] = relationship( "Edition", backref="primary_identifier" ) # One Identifier may serve as the identifier for many # LicensePools, through different Collections. - licensed_through: Mapped[List[LicensePool]] = relationship( + licensed_through: Mapped[list[LicensePool]] = relationship( "LicensePool", back_populates="identifier", lazy="joined", @@ -287,27 +287,27 @@ def __repr__(self): ) # One Identifier may have many Links. - links: Mapped[List[Hyperlink]] = relationship( + links: Mapped[list[Hyperlink]] = relationship( "Hyperlink", backref="identifier", uselist=True ) # One Identifier may be the subject of many Measurements. - measurements: Mapped[List[Measurement]] = relationship( + measurements: Mapped[list[Measurement]] = relationship( "Measurement", backref="identifier" ) # One Identifier may participate in many Classifications. - classifications: Mapped[List[Classification]] = relationship( + classifications: Mapped[list[Classification]] = relationship( "Classification", backref="identifier" ) # One identifier may participate in many Annotations. - annotations: Mapped[List[Annotation]] = relationship( + annotations: Mapped[list[Annotation]] = relationship( "Annotation", backref="identifier" ) # One Identifier can have many LicensePoolDeliveryMechanisms. - delivery_mechanisms: Mapped[List[LicensePoolDeliveryMechanism]] = relationship( + delivery_mechanisms: Mapped[list[LicensePoolDeliveryMechanism]] = relationship( "LicensePoolDeliveryMechanism", backref="identifier", foreign_keys=lambda: [LicensePoolDeliveryMechanism.identifier_id], diff --git a/core/model/integration.py b/core/model/integration.py index ebac448cb..755b58fe4 100644 --- a/core/model/integration.py +++ b/core/model/integration.py @@ -1,15 +1,17 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Literal, overload +from typing import TYPE_CHECKING, Any from sqlalchemy import Column from sqlalchemy import Enum as SQLAlchemyEnum -from sqlalchemy import ForeignKey, Integer, Unicode +from sqlalchemy import ForeignKey, Index, Integer, Unicode, select from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import Mapped, Query, Session, relationship +from sqlalchemy.orm.attributes import flag_modified from core.integration.goals import Goals -from core.model import Base, get_one_or_create +from core.model import Base if TYPE_CHECKING: from core.model import Collection, Library @@ -45,58 +47,75 @@ class IntegrationConfiguration(Base): name = Column(Unicode, nullable=False, unique=True) # The configuration settings for this integration. Stored as json. - settings_dict: Mapped[Dict[str, Any]] = Column( + settings_dict: Mapped[dict[str, Any]] = Column( "settings", JSONB, nullable=False, default=dict ) + # Integration specific context data. Stored as json. This is used to + # store configuration data that is not user supplied for a particular + # integration. + context: Mapped[dict[str, Any]] = Column(JSONB, nullable=False, default=dict) + + __table_args__ = ( + Index( + "ix_integration_configurations_settings_dict", + settings_dict, + postgresql_using="gin", + ), + ) + + def context_update(self, new_context: dict[str, Any]) -> None: + """Update the context for this integration""" + self.context.update(new_context) + flag_modified(self, "context") + # Self test results, stored as json. self_test_results = Column(JSONB, nullable=False, default=dict) library_configurations: Mapped[ - List[IntegrationLibraryConfiguration] + list[IntegrationLibraryConfiguration] ] = relationship( "IntegrationLibraryConfiguration", back_populates="parent", uselist=True, - cascade="all, delete", + cascade="all, delete-orphan", passive_deletes=True, ) - collection: Mapped[Collection] = relationship("Collection", uselist=False) + collection: Mapped[Collection] = relationship( + "Collection", back_populates="integration_configuration", uselist=False + ) - @overload - def for_library( - self, library_id: int, create: Literal[True] - ) -> IntegrationLibraryConfiguration: - ... + # https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html#simplifying-association-objects + libraries: Mapped[list[Library]] = association_proxy( + "library_configurations", + "library", + creator=lambda library: IntegrationLibraryConfiguration(library=library), + ) - @overload def for_library( - self, library_id: int | None, create: bool = False + self, library: int | Library | None ) -> IntegrationLibraryConfiguration | None: - ... + """Fetch the library configuration for a specific library""" + from core.model import Library - def for_library( - self, library_id: int | None, create: bool = False - ) -> IntegrationLibraryConfiguration | None: - """Fetch the library configuration specifically by library_id""" - if library_id is None: + if library is None: return None - for config in self.library_configurations: - if config.library_id == library_id: - return config - if create: - session = Session.object_session(self) - config, _ = get_one_or_create( - session, - IntegrationLibraryConfiguration, - parent_id=self.id, - library_id=library_id, + db = Session.object_session(self) + if isinstance(library, Library): + if library.id is None: + return None + library_id = library.id + else: + library_id = library + + return db.execute( + select(IntegrationLibraryConfiguration).where( + IntegrationLibraryConfiguration.library_id == library_id, + IntegrationLibraryConfiguration.parent_id == self.id, ) - session.refresh(self) - return config - return None + ).scalar_one_or_none() def __repr__(self) -> str: return f"" @@ -112,6 +131,10 @@ class IntegrationLibraryConfiguration(Base): It stores the configuration settings for each external integration in a single json row in the database. These settings are then serialized using Pydantic to a python object. + + This is a many-to-many relationship between IntegrationConfiguration and + Library. Implementing the Association Object pattern: + https://docs.sqlalchemy.org/en/14/orm/basic_relationships.html#association-object """ __tablename__ = "integration_library_configurations" @@ -128,8 +151,7 @@ class IntegrationLibraryConfiguration(Base): "IntegrationConfiguration", back_populates="library_configurations" ) - # The library this integration is associated with. This is optional - # and is only used for integrations that are specific to a library. + # The library this integration is associated with. library_id = Column( Integer, ForeignKey("libraries.id", ondelete="CASCADE"), @@ -139,7 +161,7 @@ class IntegrationLibraryConfiguration(Base): library: Mapped[Library] = relationship("Library") # The configuration settings for this integration. Stored as json. - settings_dict: Mapped[Dict[str, Any]] = Column( + settings_dict: Mapped[dict[str, Any]] = Column( "settings", JSONB, nullable=False, default=dict ) diff --git a/core/model/library.py b/core/model/library.py index 9f6693912..32c268da5 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -3,17 +3,8 @@ import logging from collections import Counter -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generator, - List, - Optional, - Tuple, - Type, - Union, -) +from collections.abc import Generator, Sequence +from typing import TYPE_CHECKING, Any from Crypto.PublicKey import RSA from expiringdict import ExpiringDict @@ -26,6 +17,7 @@ Table, Unicode, UniqueConstraint, + select, ) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, Query, relationship @@ -36,6 +28,7 @@ from core.entrypoint import EntryPoint from core.facets import FacetConstants from core.integration.base import integration_settings_load, integration_settings_update +from core.integration.goals import Goals from core.model import Base, get_one from core.model.announcements import Announcement from core.model.customlist import customlist_sharedlibrary @@ -47,9 +40,8 @@ if TYPE_CHECKING: from core.lane import Lane - from core.model import ( # noqa: autoflake + from core.model import ( AdminRole, - CachedMARCFile, CirculationEvent, Collection, ConfigurationSetting, @@ -99,30 +91,23 @@ class Library(Base, HasSessionCache): library_registry_shared_secret = Column(Unicode, unique=True) # A library may have many Patrons. - patrons: Mapped[List[Patron]] = relationship( + patrons: Mapped[list[Patron]] = relationship( "Patron", back_populates="library", cascade="all, delete-orphan" ) # An Library may have many admin roles. - adminroles: Mapped[List[AdminRole]] = relationship( + adminroles: Mapped[list[AdminRole]] = relationship( "AdminRole", back_populates="library", cascade="all, delete-orphan" ) - # A Library may have many CachedMARCFiles. - cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( - "CachedMARCFile", - backref="library", - cascade="all, delete-orphan", - ) - # A Library may have many CustomLists. - custom_lists: Mapped[List[CustomList]] = relationship( + custom_lists: Mapped[list[CustomList]] = relationship( "CustomList", backref="library", uselist=True ) # Lists shared with this library # shared_custom_lists: "CustomList" - shared_custom_lists: Mapped[List[CustomList]] = relationship( + shared_custom_lists: Mapped[list[CustomList]] = relationship( "CustomList", secondary=lambda: customlist_sharedlibrary, back_populates="shared_locally_with_libraries", @@ -130,7 +115,7 @@ class Library(Base, HasSessionCache): ) # A Library may have many ExternalIntegrations. - integrations: Mapped[List[ExternalIntegration]] = relationship( + integrations: Mapped[list[ExternalIntegration]] = relationship( "ExternalIntegration", secondary=lambda: externalintegrations_libraries, back_populates="libraries", @@ -139,21 +124,21 @@ class Library(Base, HasSessionCache): # This parameter is deprecated, and will be removed once all of our integrations # are updated to use IntegrationSettings. New code shouldn't use it. # TODO: Remove this column. - external_integration_settings: Mapped[List[ConfigurationSetting]] = relationship( + external_integration_settings: Mapped[list[ConfigurationSetting]] = relationship( "ConfigurationSetting", back_populates="library", cascade="all, delete", ) # Any additional configuration information is stored as JSON on this column. - settings_dict: Dict[str, Any] = Column(JSONB, nullable=False, default=dict) + settings_dict: dict[str, Any] = Column(JSONB, nullable=False, default=dict) # A Library may have many CirculationEvents - circulation_events: Mapped[List[CirculationEvent]] = relationship( + circulation_events: Mapped[list[CirculationEvent]] = relationship( "CirculationEvent", backref="library", cascade="all, delete-orphan" ) - library_announcements: Mapped[List[Announcement]] = relationship( + library_announcements: Mapped[list[Announcement]] = relationship( "Announcement", back_populates="library", cascade="all, delete-orphan", @@ -162,12 +147,12 @@ class Library(Base, HasSessionCache): # A class-wide cache mapping library ID to the calculated value # used for Library.has_root_lane. This is invalidated whenever # Lane configuration changes, and it will also expire on its own. - _has_root_lane_cache: Dict[Union[int, None], bool] = ExpiringDict( + _has_root_lane_cache: dict[int | None, bool] = ExpiringDict( max_len=1000, max_age_seconds=3600 ) # A Library can have many lanes - lanes: Mapped[List[Lane]] = relationship( + lanes: Mapped[list[Lane]] = relationship( "Lane", back_populates="library", foreign_keys="Lane.library_id", @@ -189,11 +174,28 @@ class Library(Base, HasSessionCache): uselist=False, ) - # Typing specific - collections: List[Collection] + @property + def collections(self) -> Sequence[Collection]: + """Get the collections for this library""" + from core.model import ( + Collection, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + ) + + _db = Session.object_session(self) + return _db.scalars( + select(Collection) + .join(IntegrationConfiguration) + .join(IntegrationLibraryConfiguration) + .where( + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + IntegrationLibraryConfiguration.library_id == self.id, + ) + ).all() # Cache of the libraries loaded settings object - _settings: Optional[LibrarySettings] + _settings: LibrarySettings | None def __repr__(self) -> str: return ( @@ -201,14 +203,14 @@ def __repr__(self) -> str: % (self.name, self.short_name, self.uuid, self.library_registry_short_name) ) - def cache_key(self) -> Optional[str]: + def cache_key(self) -> str | None: return self.short_name @classmethod - def lookup(cls, _db: Session, short_name: Optional[str]) -> Optional[Library]: + def lookup(cls, _db: Session, short_name: str | None) -> Library | None: """Look up a library by short name.""" - def _lookup() -> Tuple[Optional[Library], bool]: + def _lookup() -> tuple[Library | None, bool]: library = get_one(_db, Library, short_name=short_name) return library, False @@ -216,13 +218,13 @@ def _lookup() -> Tuple[Optional[Library], bool]: return library @classmethod - def default(cls, _db: Session) -> Optional[Library]: + def default(cls, _db: Session) -> Library | None: """Find the default Library.""" # If for some reason there are multiple default libraries in # the database, they're not actually interchangeable, but # raising an error here might make it impossible to fix the # problem. - defaults: List[Library] = ( + defaults: list[Library] = ( _db.query(Library) .filter(Library._is_default == True) .order_by(Library.id.asc()) @@ -257,7 +259,7 @@ def default(cls, _db: Session) -> Optional[Library]: return default_library # type: ignore[no-any-return] @classmethod - def generate_keypair(cls) -> Tuple[str, bytes]: + def generate_keypair(cls) -> tuple[str, bytes]: """Generate a public / private keypair for a library.""" private_key = RSA.generate(2048) public_key = private_key.public_key() @@ -266,12 +268,12 @@ def generate_keypair(cls) -> Tuple[str, bytes]: return public_key_str, private_key_bytes @hybrid_property - def library_registry_short_name(self) -> Optional[str]: + def library_registry_short_name(self) -> str | None: """Gets library_registry_short_name from database""" return self._library_registry_short_name @library_registry_short_name.setter - def library_registry_short_name(self, value: Optional[str]) -> None: + def library_registry_short_name(self, value: str | None) -> None: """Uppercase the library registry short name on the way in.""" if value: value = value.upper() @@ -308,7 +310,7 @@ def all_collections(self) -> Generator[Collection, None, None]: yield from collection.parents @property - def entrypoints(self) -> Generator[Optional[Type[EntryPoint]], None, None]: + def entrypoints(self) -> Generator[type[EntryPoint] | None, None, None]: """The EntryPoints enabled for this library.""" values = self.settings.enabled_entry_points for v in values: @@ -316,7 +318,7 @@ def entrypoints(self) -> Generator[Optional[Type[EntryPoint]], None, None]: if cls: yield cls - def enabled_facets(self, group_name: str) -> List[str]: + def enabled_facets(self, group_name: str) -> list[str]: """Look up the enabled facets for a given facet group.""" if group_name == FacetConstants.DISTRIBUTOR_FACETS_GROUP_NAME: enabled = [] @@ -368,7 +370,7 @@ def has_root_lanes(self) -> bool: def restrict_to_ready_deliverable_works( self, query: Query[Work], - collection_ids: Optional[List[int]] = None, + collection_ids: list[int] | None = None, show_suppressed: bool = False, ) -> Query[Work]: """Restrict a query to show only presentation-ready works present in @@ -430,7 +432,7 @@ def default_facet(self, group_name: str) -> str: """Look up the default facet for a given facet group.""" return getattr(self.settings, "facets_default_" + group_name) # type: ignore[no-any-return] - def explain(self, include_secrets: bool = False) -> List[str]: + def explain(self, include_secrets: bool = False) -> list[str]: """Create a series of human-readable strings to explain a library's settings. @@ -476,7 +478,7 @@ def explain(self, include_secrets: bool = False) -> List[str]: return lines @property - def is_default(self) -> Optional[bool]: + def is_default(self) -> bool | None: return self._is_default @is_default.setter diff --git a/core/model/licensing.py b/core/model/licensing.py index 31b2f8fb7..9fbde77a0 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -5,7 +5,7 @@ import datetime import logging from enum import Enum as PythonEnum -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple, overload +from typing import TYPE_CHECKING, Literal, overload from sqlalchemy import Boolean, Column, DateTime from sqlalchemy import Enum as AlchemyEnum @@ -140,13 +140,22 @@ class License(Base, LicenseFunctions): ) # One License can have many Loans. - loans: Mapped[List[Loan]] = relationship( + loans: Mapped[list[Loan]] = relationship( "Loan", back_populates="license", cascade="all, delete-orphan" ) __table_args__ = (UniqueConstraint("identifier", "license_pool_id"),) - def loan_to(self, patron: Patron, **kwargs) -> Tuple[Loan, bool]: + @property + def is_available_for_borrowing(self) -> bool: + """Can this license currently be used to borrow a book?""" + return ( + not self.is_inactive + and self.checkouts_available is not None + and self.checkouts_available > 0 + ) + + def loan_to(self, patron: Patron, **kwargs) -> tuple[Loan, bool]: loan, is_new = self.license_pool.loan_to(patron, **kwargs) loan.license = self return loan, is_new @@ -215,7 +224,7 @@ class LicensePool(Base): # If the source provides information about individual licenses, the # LicensePool may have many Licenses. - licenses: Mapped[List[License]] = relationship( + licenses: Mapped[list[License]] = relationship( "License", back_populates="license_pool", cascade="all, delete-orphan", @@ -223,17 +232,17 @@ class LicensePool(Base): ) # One LicensePool can have many Loans. - loans: Mapped[List[Loan]] = relationship( + loans: Mapped[list[Loan]] = relationship( "Loan", back_populates="license_pool", cascade="all, delete-orphan" ) # One LicensePool can have many Holds. - holds: Mapped[List[Hold]] = relationship( + holds: Mapped[list[Hold]] = relationship( "Hold", back_populates="license_pool", cascade="all, delete-orphan" ) # One LicensePool can have many CirculationEvents - circulation_events: Mapped[List[CirculationEvent]] = relationship( + circulation_events: Mapped[list[CirculationEvent]] = relationship( "CirculationEvent", backref="license_pool", cascade="all, delete-orphan" ) @@ -261,6 +270,7 @@ class LicensePool(Base): licenses_available: int = Column(Integer, default=0, index=True) licenses_reserved: int = Column(Integer, default=0) patrons_in_hold_queue = Column(Integer, default=0) + should_track_playtime = Column(Boolean, default=False, nullable=False) # This lets us cache the work of figuring out the best open access # link for this LicensePool. @@ -272,7 +282,7 @@ class LicensePool(Base): UniqueConstraint("identifier_id", "data_source_id", "collection_id"), ) - delivery_mechanisms: Mapped[List[LicensePoolDeliveryMechanism]] = relationship( + delivery_mechanisms: Mapped[list[LicensePoolDeliveryMechanism]] = relationship( "LicensePoolDeliveryMechanism", primaryjoin="and_(LicensePool.data_source_id==LicensePoolDeliveryMechanism.data_source_id, LicensePool.identifier_id==LicensePoolDeliveryMechanism.identifier_id)", foreign_keys=(data_source_id, identifier_id), @@ -332,7 +342,7 @@ def for_foreign_id( foreign_id, rights_status=None, collection=None, - ) -> Tuple[LicensePool, bool]: + ) -> tuple[LicensePool, bool]: ... @classmethod @@ -346,7 +356,7 @@ def for_foreign_id( rights_status, collection, autocreate: Literal[False], - ) -> Tuple[LicensePool | None, bool]: + ) -> tuple[LicensePool | None, bool]: ... @classmethod @@ -359,7 +369,7 @@ def for_foreign_id( rights_status=None, collection=None, autocreate=True, - ) -> Tuple[LicensePool | None, bool]: + ) -> tuple[LicensePool | None, bool]: """Find or create a LicensePool for the given foreign ID.""" from core.model.collection import CollectionMissing from core.model.datasource import DataSource @@ -1017,7 +1027,7 @@ def loan_to( end=None, fulfillment=None, external_identifier=None, - ) -> Tuple[Loan, bool]: + ) -> tuple[Loan, bool]: _db = Session.object_session(patron) kwargs = dict(start=start or utc_now(), end=end) loan, is_new = get_one_or_create( @@ -1080,22 +1090,9 @@ def best_available_license(self) -> License | None: The worst option would be pay-per-use, but we don't yet support any distributors that offer that model. """ - best: Optional[License] = None - now = utc_now() - - for license in self.licenses: - if license.is_inactive: - continue - - active_loan_count = len( - [l for l in license.loans if not l.end or l.end > now] - ) - checkouts_available = ( - license.checkouts_available if license.checkouts_available else 0 - ) - if active_loan_count >= checkouts_available: - continue + best: License | None = None + for license in (l for l in self.licenses if l.is_available_for_borrowing): if ( not best or (license.is_time_limited and not best.is_time_limited) @@ -1479,7 +1476,7 @@ class LicensePoolDeliveryMechanism(Base): ) # One LicensePoolDeliveryMechanism may fulfill many Loans. - fulfills: Mapped[List[Loan]] = relationship("Loan", back_populates="fulfillment") + fulfills: Mapped[list[Loan]] = relationship("Loan", back_populates="fulfillment") # One LicensePoolDeliveryMechanism may be associated with one RightsStatus. rightsstatus_id = Column(Integer, ForeignKey("rightsstatus.id"), index=True) @@ -1748,7 +1745,7 @@ class DeliveryMechanism(Base, HasSessionCache): default_client_can_fulfill_lookup.add((_media_type, BEARER_TOKEN)) license_pool_delivery_mechanisms: Mapped[ - List[LicensePoolDeliveryMechanism] + list[LicensePoolDeliveryMechanism] ] = relationship( "LicensePoolDeliveryMechanism", back_populates="delivery_mechanism", @@ -2002,11 +1999,11 @@ class RightsStatus(Base): # One RightsStatus may apply to many LicensePoolDeliveryMechanisms. licensepooldeliverymechanisms: Mapped[ - List[LicensePoolDeliveryMechanism] + list[LicensePoolDeliveryMechanism] ] = relationship("LicensePoolDeliveryMechanism", backref="rights_status") # One RightsStatus may apply to many Resources. - resources: Mapped[List[Resource]] = relationship( + resources: Mapped[list[Resource]] = relationship( "Resource", backref="rights_status" ) diff --git a/core/model/listeners.py b/core/model/listeners.py index 6148dc999..9e12d34f3 100644 --- a/core/model/listeners.py +++ b/core/model/listeners.py @@ -2,7 +2,6 @@ import datetime from threading import RLock -from typing import Union from sqlalchemy import event, text from sqlalchemy.orm import Session @@ -108,8 +107,6 @@ def _site_configuration_has_changed(_db, cooldown=1): # catch most that slip through the cracks. @event.listens_for(Collection.children, "append") @event.listens_for(Collection.children, "remove") -@event.listens_for(Collection.libraries, "append") -@event.listens_for(Collection.libraries, "remove") @event.listens_for(ExternalIntegration.settings, "append") @event.listens_for(ExternalIntegration.settings, "remove") @event.listens_for(Library.integrations, "append") @@ -210,9 +207,7 @@ def recursive_equivalence_on_identifier_create( @Listener.before_flush((Work, LicensePool), ListenerState.new) -def add_work_to_customlists( - session: Session, instance: Union[Work, LicensePool] -) -> None: +def add_work_to_customlists(session: Session, instance: Work | LicensePool) -> None: """Whenever a Work or LicensePool is created we must add it to the custom lists for its collection""" add_work_to_customlists_for_collection(instance) diff --git a/core/model/marcfile.py b/core/model/marcfile.py new file mode 100644 index 000000000..2658670ad --- /dev/null +++ b/core/model/marcfile.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import uuid +from typing import TYPE_CHECKING + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, Unicode +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, relationship + +from core.model import Base + +if TYPE_CHECKING: + from core.model import Collection, Library + + +class MarcFile(Base): + """A record that a MARC file has been created and cached for a particular library and collection.""" + + __tablename__ = "marcfiles" + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + + # The library should never be null in normal operation, but if a library is deleted, we don't want to lose the + # record of the MARC file, so we set the library to null. + # TODO: We need a job to clean up these records. + library_id = Column( + Integer, + ForeignKey("libraries.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + library: Mapped[Library] = relationship( + "Library", + ) + + # The collection should never be null in normal operation, but similar to the library, if a collection is deleted, + # we don't want to lose the record of the MARC file, so we set the collection to null. + # TODO: We need a job to clean up these records. + collection_id = Column( + Integer, + ForeignKey("collections.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + collection: Mapped[Collection] = relationship( + "Collection", + ) + + # The key in s3 used to store the file. + key = Column(Unicode, nullable=False) + + # The creation date of the file. + created = Column(DateTime(timezone=True), nullable=False, index=True) + + # If the file is a delta, the date of the previous file. If the file is a full file, null. + since = Column(DateTime(timezone=True), nullable=True) diff --git a/core/model/patron.py b/core/model/patron.py index 188a68b46..4a0f3220f 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -4,7 +4,7 @@ import datetime import logging import uuid -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from psycopg2.extras import NumericRange from sqlalchemy import ( @@ -153,15 +153,15 @@ class Patron(Base): # be an explicit decision of the ILS integration code. cached_neighborhood = Column(Unicode, default=None, index=True) - loans: Mapped[List[Loan]] = relationship( + loans: Mapped[list[Loan]] = relationship( "Loan", backref="patron", cascade="delete", uselist=True ) - loan_checkouts: Mapped[List[LoanCheckout]] = relationship( + loan_checkouts: Mapped[list[LoanCheckout]] = relationship( "LoanCheckout", back_populates="patron", cascade="delete", uselist=True ) - holds: Mapped[List[Hold]] = relationship( + holds: Mapped[list[Hold]] = relationship( "Hold", back_populates="patron", cascade="delete", @@ -169,7 +169,7 @@ class Patron(Base): order_by="Hold.id", ) - annotations: Mapped[List[Annotation]] = relationship( + annotations: Mapped[list[Annotation]] = relationship( "Annotation", backref="patron", order_by="desc(Annotation.timestamp)", @@ -177,7 +177,7 @@ class Patron(Base): ) # One Patron can have many associated Credentials. - credentials: Mapped[List[Credential]] = relationship( + credentials: Mapped[list[Credential]] = relationship( "Credential", back_populates="patron", cascade="delete" ) @@ -550,7 +550,7 @@ class Loan(Base, LoanAndHoldMixin): license: Mapped[License] = relationship("License", back_populates="loans") fulfillment_id = Column(Integer, ForeignKey("licensepooldeliveries.id")) - fulfillment: Mapped[Optional[LicensePoolDeliveryMechanism]] = relationship( + fulfillment: Mapped[LicensePoolDeliveryMechanism | None] = relationship( "LicensePoolDeliveryMechanism", back_populates="fulfills" ) start = Column(DateTime(timezone=True), index=True) @@ -755,9 +755,6 @@ def get_one_or_create(self, _db, patron, *args, **kwargs): """Find or create an Annotation, but only if the patron has annotation sync turned on. """ - if not patron.synchronize_annotations: - raise ValueError("Patron has opted out of synchronizing annotations.") - return get_one_or_create(_db, Annotation, patron=patron, *args, **kwargs) def set_inactive(self): diff --git a/core/model/resource.py b/core/model/resource.py index 4aef497dd..0a5b1931e 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -10,7 +10,6 @@ import traceback from hashlib import md5 from io import BytesIO -from typing import TYPE_CHECKING, Dict, List, Tuple from urllib.parse import quote, urlparse, urlsplit import requests @@ -42,9 +41,6 @@ from core.util.datetime_helpers import utc_now from core.util.http import HTTP -if TYPE_CHECKING: - from core.model import CachedMARCFile - class Resource(Base): """An external resource that may be mirrored locally. @@ -69,7 +65,7 @@ class Resource(Base): # Many Editions may choose this resource (as opposed to other # resources linked to them with rel="image") as their cover image. - cover_editions: Mapped[List[Edition]] = relationship( + cover_editions: Mapped[list[Edition]] = relationship( "Edition", backref="cover", foreign_keys=[Edition.cover_id] ) @@ -77,21 +73,21 @@ class Resource(Base): # linked to them with rel="description") as their summary. from core.model.work import Work - summary_works: Mapped[List[Work]] = relationship( + summary_works: Mapped[list[Work]] = relationship( "Work", backref="summary", foreign_keys=[Work.summary_id] ) # Many LicensePools (but probably one at most) may use this # resource in a delivery mechanism. licensepooldeliverymechanisms: Mapped[ - List[LicensePoolDeliveryMechanism] + list[LicensePoolDeliveryMechanism] ] = relationship( "LicensePoolDeliveryMechanism", back_populates="resource", foreign_keys=[LicensePoolDeliveryMechanism.resource_id], ) - links: Mapped[List[Hyperlink]] = relationship("Hyperlink", backref="resource") + links: Mapped[list[Hyperlink]] = relationship("Hyperlink", backref="resource") # The DataSource that is the controlling authority for this Resource. data_source_id = Column(Integer, ForeignKey("datasources.id"), index=True) @@ -106,7 +102,7 @@ class Resource(Base): rights_explanation = Column(Unicode) # A Resource may be transformed into many derivatives. - transformations: Mapped[List[ResourceTransformation]] = relationship( + transformations: Mapped[list[ResourceTransformation]] = relationship( "ResourceTransformation", foreign_keys="ResourceTransformation.original_id", lazy="joined", @@ -379,7 +375,7 @@ class ResourceTransformation(Base): original_id = Column(Integer, ForeignKey("resources.id"), index=True) # The settings used for the transformation. - settings: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + settings: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) class Hyperlink(Base, LinkRelations): @@ -502,7 +498,7 @@ class Representation(Base, MediaTypes): # Representation. thumbnail_of_id = Column(Integer, ForeignKey("representations.id"), index=True) - thumbnails: Mapped[List[Representation]] = relationship( + thumbnails: Mapped[list[Representation]] = relationship( "Representation", backref=backref("thumbnail_of", remote_side=[id]), lazy="joined", @@ -542,13 +538,6 @@ class Representation(Base, MediaTypes): # data root. local_content_path = Column(Unicode) - # A Representation may be a CachedMARCFile. - marc_file: Mapped[CachedMARCFile] = relationship( - "CachedMARCFile", - back_populates="representation", - cascade="all, delete-orphan", - ) - # At any given time, we will have a single representation for a # given URL and media type. __table_args__ = (UniqueConstraint("url", "media_type"),) @@ -1020,7 +1009,7 @@ def headers_to_string(cls, d): @classmethod def simple_http_get( cls, url, headers, **kwargs - ) -> Tuple[int, Dict[str, str], bytes]: + ) -> tuple[int, dict[str, str], bytes]: """The most simple HTTP-based GET.""" if not "allow_redirects" in kwargs: kwargs["allow_redirects"] = True diff --git a/core/model/work.py b/core/model/work.py index 8ce42a396..d53c0e2cf 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -7,7 +7,7 @@ from collections import Counter from datetime import date, datetime from decimal import Decimal -from typing import TYPE_CHECKING, Any, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast import pytz from sqlalchemy import ( @@ -19,7 +19,6 @@ ForeignKey, Integer, Numeric, - String, Unicode, ) from sqlalchemy.dialects.postgresql import INT4RANGE @@ -123,7 +122,7 @@ class Work(Base): id = Column(Integer, primary_key=True) # One Work may have copies scattered across many LicensePools. - license_pools: Mapped[List[LicensePool]] = relationship( + license_pools: Mapped[list[LicensePool]] = relationship( "LicensePool", backref="work", lazy="joined", uselist=True ) @@ -132,20 +131,20 @@ class Work(Base): presentation_edition_id = Column(Integer, ForeignKey("editions.id"), index=True) # One Work may have many associated WorkCoverageRecords. - coverage_records: Mapped[List[WorkCoverageRecord]] = relationship( + coverage_records: Mapped[list[WorkCoverageRecord]] = relationship( "WorkCoverageRecord", back_populates="work", cascade="all, delete-orphan" ) # One Work may be associated with many CustomListEntries. # However, a CustomListEntry may lose its Work without # ceasing to exist. - custom_list_entries: Mapped[List[CustomListEntry]] = relationship( + custom_list_entries: Mapped[list[CustomListEntry]] = relationship( "CustomListEntry", backref="work" ) # One Work may participate in many WorkGenre assignments. genres = association_proxy("work_genres", "genre", creator=WorkGenre.from_genre) - work_genres: Mapped[List[WorkGenre]] = relationship( + work_genres: Mapped[list[WorkGenre]] = relationship( "WorkGenre", backref="work", cascade="all, delete-orphan" ) audience = Column(Unicode, index=True) @@ -209,15 +208,9 @@ class Work(Base): # will be made to make the Work presentation ready. presentation_ready_exception = Column(Unicode, default=None, index=True) - # A precalculated MARC record containing metadata about this - # work that would be relevant to display in a library's public - # catalog. - marc_record = Column(String, default=None) - # These fields are potentially large and can be deferred if you # don't need all the data in a Work. LARGE_FIELDS = [ - "marc_record", "summary_text", ] @@ -264,7 +257,7 @@ def sort_author(self): return self.presentation_edition.sort_author or self.presentation_edition.author @property - def language(self) -> Optional[str]: + def language(self) -> str | None: if self.presentation_edition: return self.presentation_edition.language return None @@ -1017,9 +1010,6 @@ def calculate_presentation( # change it. self.last_update_time = utc_now() - if changed or policy.regenerate_marc_record: - self.calculate_marc_record() - if (changed or policy.update_search_index) and not exclude_search: self.external_index_needs_updating() @@ -1147,17 +1137,6 @@ def _ensure(s): l = [_ensure(s) for s in l] return "\n".join(l) - def calculate_marc_record(self): - from core.marc import Annotator, MARCExporter - - _db = Session.object_session(self) - record = MARCExporter.create_record( - self, annotator=Annotator, force_create=True - ) - WorkCoverageRecord.add_for( - self, operation=WorkCoverageRecord.GENERATE_MARC_OPERATION - ) - def active_license_pool(self, library: Library | None = None) -> LicensePool | None: # The active license pool is the one that *would* be # associated with a loan, were a loan to be issued right @@ -2206,9 +2185,7 @@ def delete(self, search_index=None): _db.delete(self) -def add_work_to_customlists_for_collection( - pool_or_work: Union[LicensePool, Work] -) -> None: +def add_work_to_customlists_for_collection(pool_or_work: LicensePool | Work) -> None: if isinstance(pool_or_work, Work): work = pool_or_work pools = work.license_pools diff --git a/core/opds2_import.py b/core/opds2_import.py index 418caa052..7f26213bc 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -1,20 +1,10 @@ from __future__ import annotations import logging +from collections.abc import Callable, Iterable from datetime import datetime from io import BytesIO, StringIO -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - Literal, - Optional, - Tuple, - Type, -) +from typing import TYPE_CHECKING, Any from urllib.parse import urljoin, urlparse import webpub_manifest_parser.opds2.ast as opds2_ast @@ -23,8 +13,20 @@ from uritemplate import URITemplate from webpub_manifest_parser.core import ManifestParserFactory, ManifestParserResult from webpub_manifest_parser.core.analyzer import NodeFinder -from webpub_manifest_parser.core.ast import Link, Manifestlike +from webpub_manifest_parser.core.ast import ( + ArrayOfCollectionsProperty, + Link, + Manifestlike, +) +from webpub_manifest_parser.core.properties import BooleanProperty from webpub_manifest_parser.errors import BaseError +from webpub_manifest_parser.opds2 import ( + ManifestParser, + OPDS2CollectionRolesRegistry, + OPDS2FeedParserFactory, + OPDS2SemanticAnalyzer, + OPDS2SyntaxAnalyzer, +) from webpub_manifest_parser.opds2.registry import ( OPDS2LinkRelationsRegistry, OPDS2MediaTypesRegistry, @@ -66,7 +68,6 @@ RightsStatus, Subject, ) -from core.model.configuration import ConfigurationSetting from core.model.constants import IdentifierType from core.opds_import import ( BaseOPDSAPI, @@ -137,6 +138,53 @@ def parse_manifest( return result +class PalaceOPDS2PresentationMetadata(opds2_ast.PresentationMetadata): # type: ignore[misc] + time_tracking = BooleanProperty( + "http://palaceproject.io/terms/timeTracking", False, default_value=False + ) + + +class PalaceOPDS2Publication(opds2_ast.OPDS2Publication): # type: ignore[misc] + metadata = opds2_ast.TypeProperty( + key="metadata", required=True, nested_type=PalaceOPDS2PresentationMetadata + ) + + +class PalaceOPDS2Feed(opds2_ast.OPDS2Feed): # type: ignore[misc] + publications = ArrayOfCollectionsProperty( + "publications", + required=False, + role=OPDS2CollectionRolesRegistry.PUBLICATIONS, + collection_type=PalaceOPDS2Publication, + ) + + +class PalaceOPDS2SyntaxAnalyzer(OPDS2SyntaxAnalyzer): # type: ignore[misc] + def _create_manifest(self) -> opds2_ast.OPDS2Feed: + return PalaceOPDS2Feed() + + +class PalaceOPDS2FeedParserFactory(OPDS2FeedParserFactory): # type: ignore[misc] + def create(self) -> ManifestParser: + """Create a new OPDS 2.0 parser. + + :return: OPDS 2.0 parser + :rtype: Parser + """ + media_types_registry = OPDS2MediaTypesRegistry() + link_relations_registry = OPDS2LinkRelationsRegistry() + collection_roles_registry = OPDS2CollectionRolesRegistry() + syntax_analyzer = ( + PalaceOPDS2SyntaxAnalyzer() + ) # This is the only change from the base class + semantic_analyzer = OPDS2SemanticAnalyzer( + media_types_registry, link_relations_registry, collection_roles_registry + ) + parser = ManifestParser(syntax_analyzer, semantic_analyzer) + + return parser + + class OPDS2ImporterSettings(OPDSImporterSettings): custom_accept_header: str = FormField( default="{}, {};q=0.9, */*;q=0.1".format( @@ -152,7 +200,7 @@ class OPDS2ImporterSettings(OPDSImporterSettings): ), ) - ignored_identifier_types: List[str] = FormField( + ignored_identifier_types: list[str] = FormField( alias="IGNORED_IDENTIFIER_TYPE", default=[], form=ConfigurationFormItem( @@ -177,11 +225,11 @@ class OPDS2ImporterLibrarySettings(OPDSImporterLibrarySettings): class OPDS2API(BaseOPDSAPI): @classmethod - def settings_class(cls) -> Type[OPDS2ImporterSettings]: + def settings_class(cls) -> type[OPDS2ImporterSettings]: return OPDS2ImporterSettings @classmethod - def library_settings_class(cls) -> Type[OPDS2ImporterLibrarySettings]: + def library_settings_class(cls) -> type[OPDS2ImporterLibrarySettings]: return OPDS2ImporterLibrarySettings @classmethod @@ -194,16 +242,10 @@ def description(cls) -> str: def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) - # TODO: This needs to be refactored to use IntegrationConfiguration, - # but it has been temporarily rolled back, since the IntegrationConfiguration - # code caused problems fulfilling TOKEN_AUTH books in production. - # This should be fixed as part of the work PP-313 to fully remove - # ExternalIntegrations from our collections code. - token_auth_configuration = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, collection.external_integration - ) - self.token_auth_configuration = ( - token_auth_configuration.value if token_auth_configuration else None + self.token_auth_configuration: str | None = ( + collection.integration_configuration.context.get( + ExternalIntegration.TOKEN_AUTH + ) ) @classmethod @@ -249,6 +291,12 @@ def fulfill_token_auth( ) return fulfillment + if not self.token_auth_configuration: + self.log.warning( + "No token auth configuration found, unable to fulfill via OPDS2 token auth." + ) + return fulfillment + token = self.get_authentication_token( patron, licensepool.data_source, self.token_auth_configuration ) @@ -279,7 +327,7 @@ class OPDS2Importer(BaseOPDSImporter[OPDS2ImporterSettings]): NEXT_LINK_RELATION: str = "next" @classmethod - def settings_class(cls) -> Type[OPDS2ImporterSettings]: + def settings_class(cls) -> type[OPDS2ImporterSettings]: return OPDS2ImporterSettings def __init__( @@ -288,7 +336,7 @@ def __init__( collection: Collection, parser: RWPMManifestParser, data_source_name: str | None = None, - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): """Initialize a new instance of OPDS2Importer class. @@ -308,11 +356,6 @@ def __init__( self._parser = parser self.ignored_identifier_types = self.settings.ignored_identifier_types - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: - raise NotImplementedError("OPDS2Importer does not support this method") - def _is_identifier_allowed(self, identifier: Identifier) -> bool: """Check the identifier and return a boolean value indicating whether CM can import it. @@ -411,7 +454,7 @@ def _extract_contributors( return contributor_metadata_list def _extract_link( - self, link: Link, feed_self_url: str, default_link_rel: Optional[str] = None + self, link: Link, feed_self_url: str, default_link_rel: str | None = None ) -> LinkData: """Extract a LinkData object from webpub-manifest-parser's link. @@ -698,7 +741,7 @@ def _extract_publication_metadata( self, feed: opds2_ast.OPDS2Feed, publication: opds2_ast.OPDS2Publication, - data_source_name: Optional[str], + data_source_name: str | None, ) -> Metadata: """Extract a Metadata object from webpub-manifest-parser's publication. @@ -771,6 +814,13 @@ def _extract_publication_metadata( ) # Audiobook duration duration = publication.metadata.duration + # Not all parsers support time_tracking + time_tracking = getattr(publication.metadata, "time_tracking", False) + if medium != Edition.AUDIO_MEDIUM and time_tracking is True: + time_tracking = False + self.log.warning( + f"Ignoring the time tracking flag for entry {publication.metadata.identifier}" + ) feed_self_url = first_or_default( feed.links.get_by_rel(OPDS2LinkRelationsRegistry.SELF.key) @@ -804,6 +854,7 @@ def _extract_publication_metadata( licenses_reserved=0, patrons_in_hold_queue=0, formats=[], + should_track_playtime=time_tracking, ) formats = self._find_formats_in_non_open_access_acquisition_links( @@ -878,15 +929,6 @@ def _find_formats_in_non_open_access_acquisition_links( return formats - def external_integration(self, db: Session) -> ExternalIntegration: - """Return an external integration associated with this object. - :param db: Database session - :return: External integration associated with this object - """ - if self.collection is None: - raise ValueError("Collection is not set") - return self.collection.external_integration - @staticmethod def _get_publications( feed: opds2_ast.OPDS2Feed, @@ -1009,7 +1051,7 @@ def extract_next_links(self, feed: str | opds2_ast.OPDS2Feed) -> list[str]: def extract_last_update_dates( self, feed: str | opds2_ast.OPDS2Feed - ) -> list[tuple[Optional[str], Optional[datetime]]]: + ) -> list[tuple[str | None, datetime | None]]: """Extract last update date of the feed. :param feed: OPDS 2.0 feed @@ -1034,10 +1076,9 @@ def _parse_feed_links(self, links: list[core_ast.Link]) -> None: for link in links: if first_or_default(link.rels) == Hyperlink.TOKEN_AUTH: # Save the collection-wide token authentication endpoint - auth_setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, self.external_integration(self._db) + self.collection.integration_configuration.context_update( + {ExternalIntegration.TOKEN_AUTH: link.href} ) - auth_setting.value = link.href def extract_feed_data( self, feed: str | opds2_ast.OPDS2Feed, feed_url: str | None = None @@ -1100,7 +1141,7 @@ class OPDS2ImportMonitor(OPDSImportMonitor): MEDIA_TYPE = OPDS2MediaTypesRegistry.OPDS_FEED.key, "application/json" def _verify_media_type( - self, url: str, status_code: int, headers: Dict[str, str], feed: bytes + self, url: str, status_code: int, headers: dict[str, str], feed: bytes ) -> None: # Make sure we got an OPDS feed, and not an error page that was # sent with a 200 status code. diff --git a/core/opds_import.py b/core/opds_import.py index 5274a6ef2..03a46929c 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -5,25 +5,10 @@ import urllib from abc import ABC, abstractmethod from collections import defaultdict +from collections.abc import Callable, Generator, Iterable, Sequence from datetime import datetime from io import BytesIO -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - Generic, - Iterable, - List, - Literal, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - overload, -) +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast, overload from urllib.parse import urljoin, urlparse from xml.etree.ElementTree import Element @@ -32,15 +17,19 @@ from feedparser import FeedParserDict from flask_babel import lazy_gettext as _ from lxml import etree -from pydantic import HttpUrl +from pydantic import AnyHttpUrl from sqlalchemy.orm.session import Session -from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.circulation import ( + BaseCirculationAPI, + BaseCirculationApiSettings, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) from api.circulation_exceptions import CurrentlyAvailable, FormatNotAvailable, NotOnHold from api.saml.credential import SAMLCredentialManager -from api.selftest import HasCollectionSelfTests from core.classifier import Classifier -from core.config import IntegrationException from core.connection_config import ConnectionSetting from core.coverage import CoverageFailure from core.integration.base import integration_settings_load @@ -78,7 +67,6 @@ Subject, get_one, ) -from core.model.configuration import HasExternalIntegration from core.model.formats import FormatPrioritiesSettings from core.monitor import CollectionMonitor from core.saml.wayfless import ( @@ -86,7 +74,6 @@ SAMLWAYFlessFulfillmentError, SAMLWAYFlessSetttings, ) -from core.selftest import SelfTestResult from core.util import base64 from core.util.datetime_helpers import datetime_utc, to_utc, utc_now from core.util.http import HTTP, BadResponseException @@ -108,6 +95,7 @@ class OPDSXMLParser(XMLParser): "schema": "http://schema.org/", "atom": "http://www.w3.org/2005/Atom", "drm": "http://librarysimplified.org/terms/drm", + "palace": "http://palaceproject.io/terms", } @@ -115,10 +103,9 @@ class OPDSImporterSettings( ConnectionSetting, SAMLWAYFlessSetttings, FormatPrioritiesSettings, + BaseCirculationApiSettings, ): - _NO_DEFAULT_AUDIENCE = "" - - external_account_id: Optional[HttpUrl] = FormField( + external_account_id: AnyHttpUrl = FormField( form=ConfigurationFormItem( label=_("URL"), required=True, @@ -129,8 +116,8 @@ class OPDSImporterSettings( form=ConfigurationFormItem(label=_("Data source name"), required=True) ) - default_audience: str = FormField( - default=_NO_DEFAULT_AUDIENCE, + default_audience: str | None = FormField( + None, form=ConfigurationFormItem( label=_("Default audience"), description=_( @@ -138,15 +125,15 @@ class OPDSImporterSettings( "assume the books have this target audience." ), type=ConfigurationFormItemType.SELECT, - format="narrow", - options={_NO_DEFAULT_AUDIENCE: _("No default audience")}.update( - {audience: audience for audience in sorted(Classifier.AUDIENCES)} - ), + options={ + **{None: _("No default audience")}, + **{audience: audience for audience in sorted(Classifier.AUDIENCES)}, + }, required=False, ), ) - username: Optional[str] = FormField( + username: str | None = FormField( form=ConfigurationFormItem( label=_("Username"), description=_( @@ -156,7 +143,7 @@ class OPDSImporterSettings( ) ) - password: Optional[str] = FormField( + password: str | None = FormField( form=ConfigurationFormItem( label=_("Password"), description=_( @@ -185,7 +172,7 @@ class OPDSImporterSettings( ), ) - primary_identifier_source: Optional[str] = FormField( + primary_identifier_source: str | None = FormField( form=ConfigurationFormItem( label=_("Identifer"), required=False, @@ -229,7 +216,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: # Because all OPDS content is assumed to be simultaneously # available to all patrons, there is no such thing as a hold. @@ -352,7 +339,7 @@ def checkout( def can_fulfill_without_loan( self, - patron: Optional[Patron], + patron: Patron | None, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism, ) -> bool: @@ -371,8 +358,8 @@ def __init__( self, _db: Session, collection: Collection, - data_source_name: Optional[str], - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + data_source_name: str | None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): self._db = _db if collection.id is None: @@ -402,29 +389,23 @@ def __init__( @classmethod @abstractmethod - def settings_class(cls) -> Type[SettingsType]: + def settings_class(cls) -> type[SettingsType]: ... @abstractmethod def extract_feed_data( - self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: ... @abstractmethod def extract_last_update_dates( self, feed: str | bytes | FeedParserDict - ) -> List[Tuple[Optional[str], Optional[datetime]]]: + ) -> list[tuple[str | None, datetime | None]]: ... @abstractmethod - def extract_next_links(self, feed: str | bytes) -> List[str]: - ... - - @abstractmethod - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: + def extract_next_links(self, feed: str | bytes) -> list[str]: ... @overload @@ -432,10 +413,10 @@ def parse_identifier(self, identifier: str) -> Identifier: ... @overload - def parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: + def parse_identifier(self, identifier: str | None) -> Identifier | None: ... - def parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: + def parse_identifier(self, identifier: str | None) -> Identifier | None: """Parse the identifier and return an Identifier object representing it. :param identifier: String containing the identifier @@ -555,12 +536,12 @@ def update_work_for_edition( return pool, work def import_from_feed( - self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[ - List[Edition], - List[LicensePool], - List[Work], - Dict[str, List[CoverageFailure]], + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[ + list[Edition], + list[LicensePool], + list[Work], + dict[str, list[CoverageFailure]], ]: # Keep track of editions that were imported. Pools and works # for those editions may be looked up or created. @@ -643,11 +624,11 @@ def import_from_feed( class OPDSAPI(BaseOPDSAPI): @classmethod - def settings_class(cls) -> Type[OPDSImporterSettings]: + def settings_class(cls) -> type[OPDSImporterSettings]: return OPDSImporterSettings @classmethod - def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: + def library_settings_class(cls) -> type[OPDSImporterLibrarySettings]: return OPDSImporterLibrarySettings @classmethod @@ -678,15 +659,15 @@ class OPDSImporter(BaseOPDSImporter[OPDSImporterSettings]): PARSER_CLASS = OPDSXMLParser @classmethod - def settings_class(cls) -> Type[OPDSImporterSettings]: + def settings_class(cls) -> type[OPDSImporterSettings]: return OPDSImporterSettings def __init__( self, _db: Session, collection: Collection, - data_source_name: Optional[str] = None, - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + data_source_name: str | None = None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): """:param collection: LicensePools created by this OPDS import will be associated with the given Collection. If this is None, @@ -705,93 +686,14 @@ def __init__( """ super().__init__(_db, collection, data_source_name) - self.primary_identifier_source = None - if collection: - self.primary_identifier_source = collection.primary_identifier_source + self.primary_identifier_source = self.settings.primary_identifier_source # In general, we are cautious when mirroring resources so that # we don't, e.g. accidentally get our IP banned from # gutenberg.org. self.http_get = http_get or Representation.cautious_http_get - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: - """Raise an exception if the given feed contains nothing that can, - even theoretically, be turned into a LicensePool. - - By default, this means the feed must link to open-access content - that can actually be retrieved. - """ - metadata, failures = self.extract_feed_data(feed, feed_url) - get_attempts = 0 - - # Find an open-access link, and try to GET it just to make - # sure OPDS feed isn't hiding non-open-access stuff behind an - # open-access link. - # - # To avoid taking forever or antagonizing API providers, we'll - # give up after `max_get_attempts` failures. - for link in self._open_access_links(list(metadata.values())): - url = link.href - success = self._is_open_access_link(url, link.media_type) - if success: - return True - get_attempts += 1 - if get_attempts >= max_get_attempts: - error = ( - "Was unable to GET supposedly open-access content such as %s (tried %s times)" - % (url, get_attempts) - ) - explanation = "This might be an OPDS For Distributors feed, or it might require different authentication credentials." - raise IntegrationException(error, explanation) - - raise IntegrationException( - "No open-access links were found in the OPDS feed.", - "This might be an OPDS for Distributors feed.", - ) - - @classmethod - def _open_access_links( - cls, metadatas: List[Metadata] - ) -> Generator[LinkData, None, None]: - """Find all open-access links in a list of Metadata objects. - - :param metadatas: A list of Metadata objects. - :yield: A sequence of `LinkData` objects. - """ - for item in metadatas: - if not item.circulation: - continue - for link in item.circulation.links: - if link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - yield link - - def _is_open_access_link( - self, url: str, type: Optional[str] - ) -> str | Literal[False]: - """Is `url` really an open-access link? - - That is, can we make a normal GET request and get something - that looks like a book? - """ - headers = {} - if type: - headers["Accept"] = type - status, headers, body = self.http_get(url, headers=headers) - if status == 200 and len(body) > 1024 * 10: - # We could also check the media types, but this is good - # enough for now. - return "Found a book-like thing at %s" % url - self.log.error( - "Supposedly open-access link %s didn't give us a book. Status=%s, body length=%s", - url, - status, - len(body), - ) - return False - - def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: + def extract_next_links(self, feed: str | bytes | FeedParserDict) -> list[str]: if isinstance(feed, (bytes, str)): parsed = feedparser.parse(feed) else: @@ -806,7 +708,7 @@ def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: def extract_last_update_dates( self, feed: str | bytes | FeedParserDict - ) -> List[Tuple[Optional[str], Optional[datetime]]]: + ) -> list[tuple[str | None, datetime | None]]: if isinstance(feed, (bytes, str)): parsed_feed = feedparser.parse(feed) else: @@ -818,8 +720,8 @@ def extract_last_update_dates( return [x for x in dates if x and x[1]] def extract_feed_data( - self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: """Turn an OPDS feed into lists of Metadata and CirculationData objects, with associated messages and next_links. """ @@ -909,6 +811,19 @@ def extract_feed_data( combined_circ["data_source"] = self.data_source_name combined_circ["primary_identifier"] = identifier_obj + + combined_circ["should_track_playtime"] = xml_data_dict.get( + "should_track_playtime", False + ) + if ( + combined_circ["should_track_playtime"] + and xml_data_dict["medium"] != Edition.AUDIO_MEDIUM + ): + combined_circ["should_track_playtime"] = False + self.log.warning( + f"Ignoring the time tracking flag for entry {identifier_obj.identifier}" + ) + circulation = CirculationData(**combined_circ) self._add_format_data(circulation) @@ -930,18 +845,18 @@ def extract_feed_data( @overload def handle_failure( self, urn: str, failure: Identifier - ) -> Tuple[Identifier, Identifier]: + ) -> tuple[Identifier, Identifier]: ... @overload def handle_failure( self, urn: str, failure: CoverageFailure - ) -> Tuple[Identifier, CoverageFailure]: + ) -> tuple[Identifier, CoverageFailure]: ... def handle_failure( self, urn: str, failure: Identifier | CoverageFailure - ) -> Tuple[Identifier, CoverageFailure | Identifier]: + ) -> tuple[Identifier, CoverageFailure | Identifier]: """Convert a URN and a failure message that came in through an OPDS feed into an Identifier and a CoverageFailure object. @@ -972,8 +887,8 @@ def _add_format_data(cls, circulation: CirculationData) -> None: @classmethod def combine( - self, d1: Optional[Dict[str, Any]], d2: Optional[Dict[str, Any]] - ) -> Dict[str, Any]: + self, d1: dict[str, Any] | None, d2: dict[str, Any] | None + ) -> dict[str, Any]: """Combine two dictionaries that can be used as keyword arguments to the Metadata constructor. """ @@ -1010,7 +925,7 @@ def combine( def extract_data_from_feedparser( self, feed: str | bytes, data_source: DataSource - ) -> Tuple[Dict[str, Any], Dict[str, CoverageFailure]]: + ) -> tuple[dict[str, Any], dict[str, CoverageFailure]]: feedparser_parsed = feedparser.parse(feed) values = {} failures = {} @@ -1040,9 +955,9 @@ def extract_metadata_from_elementtree( cls, feed: bytes | str, data_source: DataSource, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Tuple[Dict[str, Any], Dict[str, CoverageFailure]]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> tuple[dict[str, Any], dict[str, CoverageFailure]]: """Parse the OPDS as XML and extract all author and subject information, as well as ratings and medium. @@ -1099,23 +1014,23 @@ def extract_metadata_from_elementtree( return values, failures @classmethod - def _datetime(cls, entry: Dict[str, str], key: str) -> Optional[datetime]: + def _datetime(cls, entry: dict[str, str], key: str) -> datetime | None: value = entry.get(key, None) if not value: return None return datetime_utc(*value[:6]) def last_update_date_for_feedparser_entry( - self, entry: Dict[str, Any] - ) -> Tuple[Optional[str], Optional[datetime]]: + self, entry: dict[str, Any] + ) -> tuple[str | None, datetime | None]: identifier = entry.get("id") updated = self._datetime(entry, "updated_parsed") return identifier, updated @classmethod def data_detail_for_feedparser_entry( - cls, entry: Dict[str, str], data_source: DataSource - ) -> Tuple[Optional[str], Optional[Dict[str, Any]], Optional[CoverageFailure]]: + cls, entry: dict[str, str], data_source: DataSource + ) -> tuple[str | None, dict[str, Any] | None, CoverageFailure | None]: """Turn an entry dictionary created by feedparser into dictionaries of data that can be used as keyword arguments to the Metadata and CirculationData constructors. @@ -1140,8 +1055,8 @@ def data_detail_for_feedparser_entry( @classmethod def _data_detail_for_feedparser_entry( - cls, entry: Dict[str, Any], metadata_data_source: DataSource - ) -> Dict[str, Any]: + cls, entry: dict[str, Any], metadata_data_source: DataSource + ) -> dict[str, Any]: """Helper method that extracts metadata and circulation data from a feedparser entry. This method can be overridden in tests to check that callers handle things properly when it throws an exception. @@ -1201,7 +1116,7 @@ def _data_detail_for_feedparser_entry( links = [] - def summary_to_linkdata(detail: Optional[Dict[str, str]]) -> Optional[LinkData]: + def summary_to_linkdata(detail: dict[str, str] | None) -> LinkData | None: if not detail: return None if not "value" in detail or not detail["value"]: @@ -1254,7 +1169,7 @@ def rights_uri(cls, rights_string: str) -> str: return RightsStatus.rights_uri_from_string(rights_string) @classmethod - def rights_uri_from_feedparser_entry(cls, entry: Dict[str, str]) -> str: + def rights_uri_from_feedparser_entry(cls, entry: dict[str, str]) -> str: """Extract a rights URI from a parsed feedparser entry. :return: A rights URI. @@ -1263,7 +1178,7 @@ def rights_uri_from_feedparser_entry(cls, entry: Dict[str, str]) -> str: return cls.rights_uri(rights) @classmethod - def rights_uri_from_entry_tag(cls, entry: Element) -> Optional[str]: + def rights_uri_from_entry_tag(cls, entry: Element) -> str | None: """Extract a rights string from an lxml tag. :return: A rights URI. @@ -1325,7 +1240,7 @@ def coveragefailures_from_messages( @classmethod def coveragefailure_from_message( cls, data_source: DataSource, message: OPDSMessage - ) -> Optional[CoverageFailure]: + ) -> CoverageFailure | None: """Turn a tag into a CoverageFailure.""" _db = Session.object_session(data_source) @@ -1370,9 +1285,9 @@ def detail_for_elementtree_entry( parser: OPDSXMLParser, entry_tag: Element, data_source: DataSource, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Tuple[Optional[str], Optional[Dict[str, Any]], Optional[CoverageFailure]]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> tuple[str | None, dict[str, Any] | None, CoverageFailure | None]: """Turn an tag into a dictionary of metadata that can be used as keyword arguments to the Metadata contructor. @@ -1404,16 +1319,16 @@ def _detail_for_elementtree_entry( cls, parser: OPDSXMLParser, entry_tag: Element, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Dict[str, Any]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> dict[str, Any]: """Helper method that extracts metadata and circulation data from an elementtree entry. This method can be overridden in tests to check that callers handle things properly when it throws an exception. """ # We will fill this dictionary with all the information # we can find. - data: Dict[str, Any] = dict() + data: dict[str, Any] = dict() alternate_identifiers = [] for id_tag in parser._xpath(entry_tag, "dcterms:identifier"): @@ -1470,10 +1385,14 @@ def _detail_for_elementtree_entry( # This entry had an issued tag, but it was in a format we couldn't parse. pass + data["should_track_playtime"] = False + time_tracking_tag = parser._xpath(entry_tag, "palace:timeTracking") + if time_tracking_tag: + data["should_track_playtime"] = time_tracking_tag[0].text.lower() == "true" return data @classmethod - def get_medium_from_links(cls, links: List[LinkData]) -> Optional[str]: + def get_medium_from_links(cls, links: list[LinkData]) -> str | None: """Get medium if derivable from information in an acquisition link.""" derived = None for link in links: @@ -1489,7 +1408,7 @@ def get_medium_from_links(cls, links: List[LinkData]) -> Optional[str]: return derived @classmethod - def extract_identifier(cls, identifier_tag: Element) -> Optional[IdentifierData]: + def extract_identifier(cls, identifier_tag: Element) -> IdentifierData | None: """Turn a tag into an IdentifierData object.""" try: if identifier_tag.text is None: @@ -1503,8 +1422,8 @@ def extract_identifier(cls, identifier_tag: Element) -> Optional[IdentifierData] @classmethod def extract_medium( - cls, entry_tag: Optional[Element], default: Optional[str] = Edition.BOOK_MEDIUM - ) -> Optional[str]: + cls, entry_tag: Element | None, default: str | None = Edition.BOOK_MEDIUM + ) -> str | None: """Derive a value for Edition.medium from schema:additionalType or from a subtag. @@ -1528,7 +1447,7 @@ def extract_medium( @classmethod def extract_contributor( cls, parser: OPDSXMLParser, author_tag: Element - ) -> Optional[ContributorData]: + ) -> ContributorData | None: """Turn an tag into a ContributorData object.""" subtag = parser.text_of_optional_subtag sort_name = subtag(author_tag, "simplified:sort_name") @@ -1591,9 +1510,9 @@ def extract_subject( def extract_link( cls, link_tag: Element, - feed_url: Optional[str] = None, - entry_rights_uri: Optional[str] = None, - ) -> Optional[LinkData]: + feed_url: str | None = None, + entry_rights_uri: str | None = None, + ) -> LinkData | None: """Convert a tag into a LinkData object. :param feed_url: The URL to the enclosing feed, for use in resolving @@ -1628,10 +1547,10 @@ def extract_link( def make_link_data( cls, rel: str, - href: Optional[str] = None, - media_type: Optional[str] = None, - rights_uri: Optional[str] = None, - content: Optional[str] = None, + href: str | None = None, + media_type: str | None = None, + rights_uri: str | None = None, + content: str | None = None, ) -> LinkData: """Hook method for creating a LinkData object. @@ -1646,7 +1565,7 @@ def make_link_data( ) @classmethod - def consolidate_links(cls, links: Sequence[LinkData | None]) -> List[LinkData]: + def consolidate_links(cls, links: Sequence[LinkData | None]) -> list[LinkData]: """Try to match up links with their thumbnails. If link n is an image and link n+1 is a thumbnail, then the @@ -1709,7 +1628,7 @@ def consolidate_links(cls, links: Sequence[LinkData | None]) -> List[LinkData]: return new_links @classmethod - def extract_measurement(cls, rating_tag: Element) -> Optional[MeasurementData]: + def extract_measurement(cls, rating_tag: Element) -> MeasurementData | None: type = rating_tag.get("{http://schema.org/}additionalType") value = rating_tag.get("{http://schema.org/}ratingValue") if not value: @@ -1730,16 +1649,14 @@ def extract_measurement(cls, rating_tag: Element) -> Optional[MeasurementData]: return None @classmethod - def extract_series(cls, series_tag: Element) -> Tuple[Optional[str], Optional[str]]: + def extract_series(cls, series_tag: Element) -> tuple[str | None, str | None]: attr = series_tag.attrib series_name = attr.get("{http://schema.org/}name", None) series_position = attr.get("{http://schema.org/}position", None) return series_name, series_position -class OPDSImportMonitor( - CollectionMonitor, HasCollectionSelfTests, HasExternalIntegration -): +class OPDSImportMonitor(CollectionMonitor): """Periodically monitor a Collection's OPDS archive feed and import every title it mentions. """ @@ -1758,7 +1675,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[BaseOPDSImporter[OPDSImporterSettings]], + import_class: type[BaseOPDSImporter[OPDSImporterSettings]], force_reimport: bool = False, **import_class_kwargs: Any, ) -> None: @@ -1779,16 +1696,13 @@ def __init__( "Collection %s has no associated data source." % collection.name ) - self.external_integration_id = collection.external_integration.id - feed_url = self.opds_url(collection) - self.feed_url = "" if feed_url is None else feed_url - self.force_reimport = force_reimport self.importer = import_class(_db, collection=collection, **import_class_kwargs) settings = self.importer.settings self.username = settings.username self.password = settings.password + self.feed_url = settings.external_account_id self.custom_accept_header = settings.custom_accept_header self._max_retry_count = settings.max_retry_count @@ -1797,35 +1711,9 @@ def __init__( self._feed_base_url = f"{parsed_url.scheme}://{parsed_url.hostname}{(':' + str(parsed_url.port)) if parsed_url.port else ''}/" super().__init__(_db, collection) - def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: - return get_one(_db, ExternalIntegration, id=self.external_integration_id) - - def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: - """Retrieve the first page of the OPDS feed""" - first_page = self.run_test( - "Retrieve the first page of the OPDS feed (%s)" % self.feed_url, - self.follow_one_link, - self.feed_url, - ) - yield first_page - if not first_page.result: - return - - # We got a page, but does it have anything the importer can - # turn into a Work? - # - # By default, this means it must contain an open-access link. - next_links, content = first_page.result - yield self.run_test( - "Checking for importable content", - self.importer.assert_importable_content, - content, - self.feed_url, - ) - def _get( - self, url: str, headers: Dict[str, str] - ) -> Tuple[int, Dict[str, str], bytes]: + self, url: str, headers: dict[str, str] + ) -> tuple[int, dict[str, str], bytes]: """Make the sort of HTTP request that's normal for an OPDS feed. Long timeout, raise error on anything but 2xx or 3xx. @@ -1852,7 +1740,7 @@ def _get_accept_header(self) -> str: ] ) - def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: + def _update_headers(self, headers: dict[str, str] | None) -> dict[str, str]: headers = dict(headers) if headers else {} if self.username and self.password and not "Authorization" in headers: headers["Authorization"] = "Basic %s" % base64.b64encode( @@ -1866,15 +1754,7 @@ def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: return headers - def opds_url(self, collection: Collection) -> Optional[str]: - """Returns the OPDS import URL for the given collection. - - By default, this URL is stored as the external account ID, but - subclasses may override this. - """ - return collection.external_account_id - - def data_source(self, collection: Collection) -> Optional[DataSource]: + def data_source(self, collection: Collection) -> DataSource | None: """Returns the data source name for the given collection. By default, this URL is stored as a setting on the collection, but @@ -1913,7 +1793,7 @@ def feed_contains_new_data(self, feed: bytes | str) -> bool: return new_data def identifier_needs_import( - self, identifier: Optional[Identifier], last_updated_remote: Optional[datetime] + self, identifier: Identifier | None, last_updated_remote: datetime | None ) -> bool: """Does the remote side have new information about this Identifier? @@ -1979,7 +1859,7 @@ def identifier_needs_import( return False def _verify_media_type( - self, url: str, status_code: int, headers: Dict[str, str], feed: bytes + self, url: str, status_code: int, headers: dict[str, str], feed: bytes ) -> None: # Make sure we got an OPDS feed, and not an error page that was # sent with a 200 status code. @@ -1993,8 +1873,8 @@ def _verify_media_type( ) def follow_one_link( - self, url: str, do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None - ) -> Tuple[List[str], Optional[bytes]]: + self, url: str, do_get: Callable[..., tuple[int, Any, bytes]] | None = None + ) -> tuple[list[str], bytes | None]: """Download a representation of a URL and extract the useful information. @@ -2023,13 +1903,13 @@ def follow_one_link( def import_one_feed( self, feed: bytes | str - ) -> Tuple[List[Edition], Dict[str, List[CoverageFailure]]]: + ) -> tuple[list[Edition], dict[str, list[CoverageFailure]]]: """Import every book mentioned in an OPDS feed.""" # Because we are importing into a Collection, we will immediately # mark a book as presentation-ready if possible. imported_editions, pools, works, failures = self.importer.import_from_feed( - feed, feed_url=self.opds_url(self.collection) + feed, feed_url=self.feed_url ) # Create CoverageRecords for the successful imports. @@ -2051,9 +1931,9 @@ def import_one_feed( return imported_editions, failures - def _get_feeds(self) -> Iterable[Tuple[str, bytes]]: + def _get_feeds(self) -> Iterable[tuple[str, bytes]]: feeds = [] - queue = [self.feed_url] + queue = [cast(str, self.feed_url)] seen_links = set() # First, follow the feed's next links until we reach a page with diff --git a/core/python_expression_dsl/evaluator.py b/core/python_expression_dsl/evaluator.py index 8f3b18083..14e02d8c8 100644 --- a/core/python_expression_dsl/evaluator.py +++ b/core/python_expression_dsl/evaluator.py @@ -1,7 +1,7 @@ import operator import types +from collections.abc import Callable from copy import copy, deepcopy -from typing import Callable, Dict, List, Optional, Union from multipledispatch import dispatch @@ -74,8 +74,8 @@ class DSLEvaluationVisitor(Visitor): def __init__( self, - context: Optional[Union[Dict, object]] = None, - safe_classes: Optional[List[type]] = None, + context: dict | object | None = None, + safe_classes: list[type] | None = None, ): """Initialize a new instance of DSLEvaluationVisitor class. @@ -83,8 +83,8 @@ def __init__( :param safe_classes: Optional list of classes which methods can be called. By default it contains only built-in classes: float, int, str """ - self._context: Optional[Union[Dict, object]] = {} - self._safe_classes: Optional[List[type]] = [] + self._context: dict | object | None = {} + self._safe_classes: list[type] | None = [] self._current_scope = None self._root_dot_node = None @@ -95,7 +95,7 @@ def __init__( self.safe_classes = safe_classes @staticmethod - def _get_attribute_value(obj: Union[Dict, object], attribute: str): + def _get_attribute_value(obj: dict | object, attribute: str): """Return the attribute's value by its name. :param obj: Object or a dictionary containing the attribute @@ -121,7 +121,7 @@ def _get_attribute_value(obj: Union[Dict, object], attribute: str): def _evaluate_unary_expression( self, unary_expression: UnaryExpression, - available_operators: Dict[Operator, Callable], + available_operators: dict[Operator, Callable], ): """Evaluate the unary expression. @@ -147,7 +147,7 @@ def _evaluate_unary_expression( def _evaluate_binary_expression( self, binary_expression: BinaryExpression, - available_operators: Dict[Operator, Callable], + available_operators: dict[Operator, Callable], ): """Evaluate the binary expression. @@ -172,7 +172,7 @@ def _evaluate_binary_expression( return result @property - def context(self) -> Union[Dict, object]: + def context(self) -> dict | object: """Return the evaluation context. :return: Evaluation context @@ -180,7 +180,7 @@ def context(self) -> Union[Dict, object]: return self._context @context.setter - def context(self, value: Union[Dict, object]): + def context(self, value: dict | object): """Set the evaluation context. :param value: New evaluation context @@ -202,7 +202,7 @@ def context(self, value: Union[Dict, object]): self._context = new_context @property - def safe_classes(self) -> Optional[List[type]]: + def safe_classes(self) -> list[type] | None: """Return a list of classes which methods can be called. :return: List of safe classes which methods can be called @@ -210,7 +210,7 @@ def safe_classes(self) -> Optional[List[type]]: return self._safe_classes @safe_classes.setter - def safe_classes(self, value: List[type]): + def safe_classes(self, value: list[type]): """Set safe classes which methods can be called. :param value: List of safe classes which methods be called @@ -397,8 +397,8 @@ def parser(self) -> DSLParser: def evaluate( self, expression: str, - context: Optional[Union[Dict, object]] = None, - safe_classes: Optional[List[type]] = None, + context: dict | object | None = None, + safe_classes: list[type] | None = None, ): """Evaluate the expression and return the resulting value. diff --git a/core/python_expression_dsl/util.py b/core/python_expression_dsl/util.py index 4f3f4ba16..54bd2ebaf 100644 --- a/core/python_expression_dsl/util.py +++ b/core/python_expression_dsl/util.py @@ -1,4 +1,4 @@ -from typing import Optional, Type, TypeVar +from typing import TypeVar from pyparsing import ParseResults @@ -55,8 +55,8 @@ def _parse_number(tokens: ParseResults) -> Number: def _parse_unary_expression( - expression_type: Type[UE], tokens: ParseResults -) -> Optional[UE]: + expression_type: type[UE], tokens: ParseResults +) -> UE | None: """Transform the token into an unary expression. :param tokens: ParseResults objects @@ -80,7 +80,7 @@ def _parse_unary_expression( def _parse_unary_arithmetic_expression( tokens: ParseResults, -) -> Optional[UnaryArithmeticExpression]: +) -> UnaryArithmeticExpression | None: """Transform the token into an UnaryArithmeticExpression node. :param tokens: ParseResults objects @@ -92,7 +92,7 @@ def _parse_unary_arithmetic_expression( def _parse_unary_boolean_expression( tokens: ParseResults, -) -> Optional[UnaryBooleanExpression]: +) -> UnaryBooleanExpression | None: """Transform the token into an UnaryBooleanExpression node. :param tokens: ParseResults objects @@ -103,8 +103,8 @@ def _parse_unary_boolean_expression( def _parse_binary_expression( - expression_type: Type[BE], tokens: ParseResults -) -> Optional[BE]: + expression_type: type[BE], tokens: ParseResults +) -> BE | None: """Transform the token into a BinaryExpression node. :param tokens: ParseResults objects @@ -129,7 +129,7 @@ def _parse_binary_expression( def _parse_binary_arithmetic_expression( tokens: ParseResults, -) -> Optional[BinaryArithmeticExpression]: +) -> BinaryArithmeticExpression | None: """Transform the token into a BinaryArithmeticExpression node. :param tokens: ParseResults objects @@ -141,7 +141,7 @@ def _parse_binary_arithmetic_expression( def _parse_binary_boolean_expression( tokens: ParseResults, -) -> Optional[BinaryBooleanExpression]: +) -> BinaryBooleanExpression | None: """Transform the token into a BinaryBooleanExpression node. :param tokens: ParseResults objects @@ -153,7 +153,7 @@ def _parse_binary_boolean_expression( def _parse_comparison_expression( tokens: ParseResults, -) -> Optional[ComparisonExpression]: +) -> ComparisonExpression | None: """Transform the token into a ComparisonExpression node. :param tokens: ParseResults objects diff --git a/core/query/coverage.py b/core/query/coverage.py index 0a9db44fa..07ba502c5 100644 --- a/core/query/coverage.py +++ b/core/query/coverage.py @@ -1,5 +1,3 @@ -from typing import List - from sqlalchemy.orm.session import Session from core.model.coverage import EquivalencyCoverageRecord @@ -9,8 +7,8 @@ class EquivalencyCoverageQueries: @classmethod def add_coverage_for_identifiers_chain( - cls, identifiers: List[Identifier], _db=None - ) -> List[EquivalencyCoverageRecord]: + cls, identifiers: list[Identifier], _db=None + ) -> list[EquivalencyCoverageRecord]: """Hunt down any recursive identifiers that may be touched by these identifiers set all the possible coverages to reset and recompute the chain """ @@ -29,7 +27,7 @@ def add_coverage_for_identifiers_chain( ) # Need to be reset - equivs: List[Equivalency] = Equivalency.for_identifiers( + equivs: list[Equivalency] = Equivalency.for_identifiers( _db, (p[0] for p in parent_ids) ) records = [] diff --git a/core/query/customlist.py b/core/query/customlist.py index c1ce20dab..8a50bafd4 100644 --- a/core/query/customlist.py +++ b/core/query/customlist.py @@ -2,7 +2,6 @@ import datetime import json -import logging from typing import TYPE_CHECKING from api.admin.problem_details import ( @@ -14,20 +13,28 @@ from core.model.customlist import CustomList, CustomListEntry from core.model.library import Library from core.model.licensing import LicensePool +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: from sqlalchemy.orm import Session -class CustomListQueries: +class CustomListQueries(LoggerMixin): @classmethod def share_locally_with_library( cls, _db, customlist: CustomList, library: Library ) -> ProblemDetail | bool: # All customlist collections must be present in the library + log = cls.logger() + log.info( + f"Attempting to share customlist '{customlist.name}' with library '{library.name}'." + ) for collection in customlist.collections: if collection not in library.collections: + log.info( + f"Unable to share: Collection '{collection.name}' is missing from the library." + ) return CUSTOMLIST_SOURCE_COLLECTION_MISSING # All entries must be valid for the library @@ -43,14 +50,18 @@ def share_locally_with_library( .first() ) if valid_license is None: + log.info(f"Unable to share: No license for work '{entry.work.title}'.") return CUSTOMLIST_ENTRY_NOT_VALID_FOR_LIBRARY customlist.shared_locally_with_libraries.append(library) + log.info( + f"Successfully shared '{customlist.name}' with library '{library.name}'." + ) return True @classmethod def populate_query_pages( - self, + cls, _db: Session, custom_list: CustomList, start_page: int = 1, @@ -67,7 +78,7 @@ def populate_query_pages( :param json_query: If provided, use this json query rather than that of the custom list """ - log = logging.getLogger("Auto Update Custom List") + log = cls.logger() search = ExternalSearchIndex(_db) if not custom_list.auto_update_query: diff --git a/core/query/playtime_entries.py b/core/query/playtime_entries.py index de8270169..6b9f79c3e 100644 --- a/core/query/playtime_entries.py +++ b/core/query/playtime_entries.py @@ -1,5 +1,4 @@ import logging -from typing import List, Tuple from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session @@ -26,7 +25,7 @@ def insert_playtime_entries( collection: Collection, library: Library, data: PlaytimeEntriesPost, - ) -> Tuple[List, PlaytimeEntriesPostSummary]: + ) -> tuple[list, PlaytimeEntriesPostSummary]: """Insert into the database playtime entries from a request""" responses = [] summary = PlaytimeEntriesPostSummary() diff --git a/core/saml/wayfless.py b/core/saml/wayfless.py index 5a6e655dd..d40305e1f 100644 --- a/core/saml/wayfless.py +++ b/core/saml/wayfless.py @@ -1,5 +1,3 @@ -from typing import Optional - from flask_babel import lazy_gettext as _ from core.exceptions import BaseError @@ -18,7 +16,7 @@ class SAMLWAYFlessConstants: class SAMLWAYFlessSetttings(BaseSettings): - saml_wayfless_url_template: Optional[str] = FormField( + saml_wayfless_url_template: str | None = FormField( default=None, form=ConfigurationFormItem( label=_("SAML WAYFless URL Template"), diff --git a/core/scripts.py b/core/scripts.py index e5d4f4ab6..19e839533 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -8,10 +8,10 @@ import traceback import unicodedata import uuid +from collections.abc import Generator from enum import Enum -from typing import Generator, Optional, Type -from sqlalchemy import and_, exists, or_, tuple_ +from sqlalchemy import and_, exists, or_, select, tuple_ from sqlalchemy.orm import Query, Session, defer from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound @@ -23,6 +23,7 @@ Filter, SearchIndexCoverageProvider, ) +from core.integration.goals import Goals from core.lane import Lane from core.metadata_layer import TimestampData from core.model import ( @@ -35,6 +36,7 @@ Edition, ExternalIntegration, Identifier, + IntegrationConfiguration, Library, LicensePool, LicensePoolDeliveryMechanism, @@ -118,7 +120,7 @@ def parse_time(cls, time_string): continue raise ValueError("Could not parse time: %s" % time_string) - def __init__(self, _db=None, services: Optional[Services] = None, *args, **kwargs): + def __init__(self, _db=None, services: Services | None = None, *args, **kwargs): """Basic constructor. :_db: A database session to be used instead of @@ -1111,14 +1113,23 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): args = self.parse_command_line(_db, cmd_args=cmd_args) if args.name: name = args.name - collection = get_one(_db, Collection, name=name) + collection = Collection.by_name(_db, name) if collection: collections = [collection] else: output.write("Could not locate collection by name: %s" % name) collections = [] else: - collections = _db.query(Collection).order_by(Collection.name).all() + collections = ( + _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where(IntegrationConfiguration.goal == Goals.LICENSE_GOAL) + .order_by(IntegrationConfiguration.name) + ) + .scalars() + .all() + ) if not collections: output.write("No collections found.\n") for collection in collections: @@ -1243,7 +1254,7 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): protocol = None name = args.name protocol = args.protocol - collection = get_one(_db, Collection, Collection.name == name) + collection = Collection.by_name(_db, name) if not collection: if protocol: collection, is_new = Collection.by_name_and_protocol( @@ -1258,20 +1269,16 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): ) config = collection.integration_configuration settings = config.settings_dict.copy() - integration = collection.external_integration if protocol: config.protocol = protocol - integration.protocol = protocol if args.external_account_id: - collection.external_account_id = args.external_account_id - + settings["external_account_id"] = args.external_account_id if args.url: settings["url"] = args.url if args.username: settings["username"] = args.username if args.password: settings["password"] = args.password - self.apply_settings(args.setting, integration) if args.setting: for setting in args.setting: key, value = ConfigurationSettingScript._parse_setting(setting) @@ -1288,8 +1295,7 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): message += " I only know about: %s" % library_names raise ValueError(message) if collection not in library.collections: - library.collections.append(collection) - config.for_library(library.id, create=True) + collection.libraries.append(library) site_configuration_has_changed(_db) _db.commit() output.write("Configuration settings stored.\n") @@ -1556,7 +1562,6 @@ def do_run(self): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_marc_record=True, update_search_index=True, verbose=True, ) @@ -1725,7 +1730,6 @@ class WorkClassificationScript(WorkPresentationScript): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_marc_record=False, update_search_index=False, ) @@ -1804,7 +1808,7 @@ def paginate_query(self, query) -> Generator: the ordering of the rows follows all the joined tables""" for subject in self._unchecked_subjects(): - last_work: Optional[Work] = None # Last work object of the previous page + last_work: Work | None = None # Last work object of the previous page # IDs of the last work, for paging work_id, license_id, iden_id, classn_id = ( None, @@ -1873,7 +1877,6 @@ class WorkOPDSScript(WorkPresentationScript): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_marc_record=True, update_search_index=True, ) @@ -1934,7 +1937,8 @@ def look_up_collections(cls, _db, parsed, *args, **kwargs): """ parsed.collections = [] for name in parsed.collection_names: - collection = get_one(_db, Collection, name=name) + collection = Collection.by_name(_db, name) + if not collection: raise ValueError("Unknown collection: %s" % name) parsed.collections.append(collection) @@ -2005,7 +2009,7 @@ class OPDSImportScript(CollectionInputScript): name = "Import all books from the OPDS feed associated with a collection." IMPORTER_CLASS = OPDSImporter - MONITOR_CLASS: Type[OPDSImportMonitor] = OPDSImportMonitor + MONITOR_CLASS: type[OPDSImportMonitor] = OPDSImportMonitor PROTOCOL = ExternalIntegration.OPDS_IMPORT def __init__( @@ -2463,7 +2467,7 @@ class WhereAreMyBooksScript(CollectionInputScript): """ def __init__( - self, _db=None, output=None, search: Optional[ExternalSearchIndex] = None + self, _db=None, output=None, search: ExternalSearchIndex | None = None ): _db = _db or self._db super().__init__(_db) diff --git a/core/search/document.py b/core/search/document.py index 987433ed9..6cabe57d6 100644 --- a/core/search/document.py +++ b/core/search/document.py @@ -1,5 +1,4 @@ from abc import ABC, abstractmethod -from typing import Dict class SearchMappingFieldType(ABC): @@ -68,14 +67,14 @@ def serialize(self) -> dict: class SearchMappingFieldTypeParameterized(SearchMappingFieldType): """The base class for types that have parameters (date, keyword, etc)""" - _parameters: Dict[str, str] + _parameters: dict[str, str] def __init__(self, name: str): self._name = name self._parameters = {} @property - def parameters(self) -> Dict[str, str]: + def parameters(self) -> dict[str, str]: return self._parameters def serialize(self) -> dict: @@ -111,14 +110,14 @@ def sort_author_keyword() -> SearchMappingFieldTypeParameterized: class SearchMappingFieldTypeObject(SearchMappingFieldType): """See: https://opensearch.org/docs/latest/field-types/supported-field-types/object/""" - _properties: Dict[str, SearchMappingFieldType] + _properties: dict[str, SearchMappingFieldType] def __init__(self, type: str): self._type = type self._properties = {} @property - def properties(self) -> Dict[str, SearchMappingFieldType]: + def properties(self) -> dict[str, SearchMappingFieldType]: return self._properties def add_property(self, name, type: SearchMappingFieldType): @@ -226,24 +225,24 @@ class SearchMappingDocument: """ def __init__(self): - self._settings: Dict[str, dict] = {} - self._fields: Dict[str, SearchMappingFieldType] = {} - self._scripts: Dict[str, str] = {} + self._settings: dict[str, dict] = {} + self._fields: dict[str, SearchMappingFieldType] = {} + self._scripts: dict[str, str] = {} @property - def settings(self) -> Dict[str, dict]: + def settings(self) -> dict[str, dict]: return self._settings @property - def scripts(self) -> Dict[str, str]: + def scripts(self) -> dict[str, str]: return self._scripts @property - def properties(self) -> Dict[str, SearchMappingFieldType]: + def properties(self) -> dict[str, SearchMappingFieldType]: return self._fields @properties.setter - def properties(self, fields: Dict[str, SearchMappingFieldType]): + def properties(self, fields: dict[str, SearchMappingFieldType]): self._fields = dict(fields) def serialize(self) -> dict: diff --git a/core/search/migrator.py b/core/search/migrator.py index e68141e25..2aff64102 100644 --- a/core/search/migrator.py +++ b/core/search/migrator.py @@ -1,6 +1,6 @@ import logging from abc import ABC, abstractmethod -from typing import Iterable, List, Optional +from collections.abc import Iterable from core.search.revision import SearchSchemaRevision from core.search.revision_directory import SearchRevisionDirectory @@ -21,7 +21,7 @@ class SearchDocumentReceiverType(ABC): @abstractmethod def add_documents( self, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit documents to be indexed.""" @abstractmethod @@ -44,7 +44,7 @@ def pointer(self) -> str: def add_documents( self, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit documents to be indexed.""" return self._service.index_submit_documents( pointer=self._pointer, documents=documents @@ -77,7 +77,7 @@ def __init__( def add_documents( self, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit documents to be indexed.""" return self._receiver.add_documents(documents) @@ -109,9 +109,7 @@ def __init__(self, revisions: SearchRevisionDirectory, service: SearchService): self._revisions = revisions self._service = service - def migrate( - self, base_name: str, version: int - ) -> Optional[SearchMigrationInProgress]: + def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | None: """ Migrate to the given version using the given base name (such as 'circulation-works'). The function returns an object that expects to receive batches of search documents used to populate any new index. When all diff --git a/core/search/revision_directory.py b/core/search/revision_directory.py index 6adbdb01b..55e246252 100644 --- a/core/search/revision_directory.py +++ b/core/search/revision_directory.py @@ -1,4 +1,4 @@ -from typing import Mapping +from collections.abc import Mapping from core.config import CannotLoadConfiguration from core.search.revision import SearchSchemaRevision diff --git a/core/search/service.py b/core/search/service.py index 3a7122d8e..bf751fd91 100644 --- a/core/search/service.py +++ b/core/search/service.py @@ -1,8 +1,8 @@ import logging import re from abc import ABC, abstractmethod +from collections.abc import Iterable from dataclasses import dataclass -from typing import Iterable, List, Optional import opensearchpy.helpers from opensearch_dsl import MultiSearch, Search @@ -81,11 +81,11 @@ def write_pointer_name(self) -> str: """Get the name used for the write pointer.""" @abstractmethod - def read_pointer(self) -> Optional[str]: + def read_pointer(self) -> str | None: """Get the read pointer, if it exists.""" @abstractmethod - def write_pointer(self) -> Optional[SearchWritePointer]: + def write_pointer(self) -> SearchWritePointer | None: """Get the writer pointer, if it exists.""" @abstractmethod @@ -105,7 +105,7 @@ def index_create(self, revision: SearchSchemaRevision) -> None: """Atomically create an index for the given base name and revision.""" @abstractmethod - def indexes_created(self) -> List[str]: + def indexes_created(self) -> list[str]: """A log of all the indexes that have been created by this client service.""" @abstractmethod @@ -125,7 +125,7 @@ def index_submit_documents( self, pointer: str, documents: Iterable[dict], - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit search documents to the given index.""" @abstractmethod @@ -166,7 +166,7 @@ def __init__(self, client: OpenSearch, base_revision_name: str): self._search = Search(using=self._client) self.base_revision_name = base_revision_name self._multi_search = MultiSearch(using=self._client) - self._indexes_created: List[str] = [] + self._indexes_created: list[str] = [] # Documents are not allowed to automatically create indexes. # AWS OpenSearch only accepts the "flat" format @@ -174,10 +174,10 @@ def __init__(self, client: OpenSearch, base_revision_name: str): body={"persistent": {"action.auto_create_index": "false"}} ) - def indexes_created(self) -> List[str]: + def indexes_created(self) -> list[str]: return self._indexes_created - def write_pointer(self) -> Optional[SearchWritePointer]: + def write_pointer(self) -> SearchWritePointer | None: try: result: dict = self._client.indices.get_alias( name=self.write_pointer_name() @@ -278,7 +278,7 @@ def _ensure_scripts(self, revision: SearchSchemaRevision) -> None: def index_submit_documents( self, pointer: str, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: self._logger.info(f"submitting documents to index {pointer}") # Specifically override the target in all documents to the target pointer @@ -303,7 +303,7 @@ def index_submit_documents( yield_ok=False, ) - error_results: List[SearchServiceFailedDocument] = [] + error_results: list[SearchServiceFailedDocument] = [] if isinstance(errors, list): for error in errors: error_results.append(SearchServiceFailedDocument.from_bulk_error(error)) @@ -335,7 +335,7 @@ def write_pointer_set(self, revision: SearchSchemaRevision) -> None: self._logger.debug(f"setting write pointer {alias_name} to {target_index}") self._client.indices.update_aliases(body=action) - def read_pointer(self) -> Optional[str]: + def read_pointer(self) -> str | None: try: result: dict = self._client.indices.get_alias(name=self.read_pointer_name()) for name in result.keys(): diff --git a/core/search/v5.py b/core/search/v5.py index d206f5e78..7c202a2a3 100644 --- a/core/search/v5.py +++ b/core/search/v5.py @@ -1,5 +1,3 @@ -from typing import Dict - from core.search.document import ( BASIC_TEXT, BOOLEAN, @@ -227,7 +225,7 @@ def __init__(self): char_filter=self.AUTHOR_CHAR_FILTER_NAMES, ) - self._fields: Dict[str, SearchMappingFieldType] = { + self._fields: dict[str, SearchMappingFieldType] = { "summary": BASIC_TEXT, "title": FILTERABLE_TEXT, "subtitle": FILTERABLE_TEXT, diff --git a/core/selftest.py b/core/selftest.py index a01c16c2c..5136629b3 100644 --- a/core/selftest.py +++ b/core/selftest.py @@ -7,19 +7,9 @@ import sys import traceback from abc import ABC, abstractmethod +from collections.abc import Callable, Generator from datetime import datetime -from typing import ( - Any, - Callable, - Dict, - Generator, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) +from typing import Any, ParamSpec, TypeVar from sqlalchemy.orm import Session @@ -30,11 +20,6 @@ from core.util.log import LoggerMixin from core.util.opds_writer import AtomFeed -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - if sys.version_info >= (3, 11): from typing import Self else: @@ -47,7 +32,7 @@ class SelfTestResult: HasSelfTest.run_self_tests() returns a list of these """ - def __init__(self, name: Optional[str]): + def __init__(self, name: str | None): # Name of the test. self.name = name @@ -55,7 +40,7 @@ def __init__(self, name: Optional[str]): self.success = False # The exception raised, if any. - self.exception: Optional[Exception] = None + self.exception: Exception | None = None # The return value of the test method, assuming it ran to # completion. @@ -65,13 +50,13 @@ def __init__(self, name: Optional[str]): self.start: datetime = utc_now() # End time of the test. - self.end: Optional[datetime] = None + self.end: datetime | None = None # Collection associated with the test - self.collection: Optional[Collection] = None + self.collection: Collection | None = None @property - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: """Convert this SelfTestResult to a dictionary for use in JSON serialization. """ @@ -85,7 +70,7 @@ def to_dict(self) -> Dict[str, Any]: } else: exception = None - value: Dict[str, Any] = dict( + value: dict[str, Any] = dict( name=self.name, success=self.success, duration=self.duration, @@ -140,7 +125,7 @@ def duration(self) -> float: return (self.end - self.start).total_seconds() @property - def debug_message(self) -> Optional[str]: + def debug_message(self) -> str | None: """The debug message associated with the Exception, if any.""" if not self.exception: return None @@ -158,12 +143,12 @@ class BaseHasSelfTests(ABC): @classmethod def run_self_tests( - cls: Type[Self], + cls: type[Self], _db: Session, - constructor_method: Optional[Callable[..., Self]] = None, + constructor_method: Callable[..., Self] | None = None, *args: Any, **kwargs: Any, - ) -> Tuple[Dict[str, Any], List[SelfTestResult]]: + ) -> tuple[dict[str, Any], list[SelfTestResult]]: """Instantiate this class and call _run_self_tests on it. :param _db: A database connection. Will be passed into `_run_self_tests`. @@ -262,8 +247,8 @@ def run_test( def test_failure( cls, name: str, - message: Union[Optional[str], Exception], - debug_message: Optional[str] = None, + message: str | None | Exception, + debug_message: str | None = None, ) -> SelfTestResult: """Create a SelfTestResult for a known failure. @@ -292,7 +277,7 @@ def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None] @abstractmethod def store_self_test_results( - self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] + self, _db: Session, value: dict[str, Any], results: list[SelfTestResult] ) -> None: ... @@ -307,10 +292,10 @@ class HasSelfTests(BaseHasSelfTests, ABC): SELF_TEST_RESULTS_SETTING = "self_test_results" def store_self_test_results( - self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] + self, _db: Session, value: dict[str, Any], results: list[SelfTestResult] ) -> None: """Store the results of a self-test in the database.""" - integration: Optional[ExternalIntegration] + integration: ExternalIntegration | None from core.external_search import ExternalSearchIndex if isinstance(self, ExternalSearchIndex): @@ -328,19 +313,19 @@ def store_self_test_results( @classmethod def prior_test_results( - cls: Type[Self], + cls: type[Self], _db: Session, - constructor_method: Optional[Callable[..., Self]] = None, + constructor_method: Callable[..., Self] | None = None, *args: Any, **kwargs: Any, - ) -> Union[Optional[Dict[str, Any]], str]: + ) -> dict[str, Any] | None | str: """Retrieve the last set of test results from the database. The arguments here are the same as the arguments to run_self_tests. """ constructor_method = constructor_method or cls instance = constructor_method(*args, **kwargs) - integration: Optional[ExternalIntegration] + integration: ExternalIntegration | None from core.external_search import ExternalSearchIndex @@ -357,7 +342,7 @@ def prior_test_results( return None - def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: + def external_integration(self, _db: Session) -> ExternalIntegration | None: """Locate the ExternalIntegration associated with this object. The status of the self-tests will be stored as a ConfigurationSetting on this ExternalIntegration. @@ -373,11 +358,8 @@ def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: class HasSelfTestsIntegrationConfiguration(BaseHasSelfTests, LoggerMixin, ABC): - # Typing specific - collection: Any - def store_self_test_results( - self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] + self, _db: Session, value: dict[str, Any], results: list[SelfTestResult] ) -> None: integration = self.integration(_db) if integration is None: @@ -389,8 +371,8 @@ def store_self_test_results( @classmethod def load_self_test_results( - cls, integration: Optional[IntegrationConfiguration] - ) -> Optional[Dict[str, Any]]: + cls, integration: IntegrationConfiguration | None + ) -> dict[str, Any] | None: if integration is None: cls.logger().error( "No IntegrationConfiguration was found. Self-test results could not be loaded." @@ -407,21 +389,21 @@ def load_self_test_results( @classmethod def prior_test_results( - cls: Type[Self], + cls: type[Self], _db: Session, - constructor_method: Optional[Callable[..., Self]] = None, + constructor_method: Callable[..., Self] | None = None, *args: Any, **kwargs: Any, - ) -> Union[Optional[Dict[str, Any]], str]: + ) -> dict[str, Any] | None | str: """Retrieve the last set of test results from the database. The arguments here are the same as the arguments to run_self_tests. """ constructor_method = constructor_method or cls instance = constructor_method(*args, **kwargs) - integration: Optional[IntegrationConfiguration] = instance.integration(_db) + integration: IntegrationConfiguration | None = instance.integration(_db) return cls.load_self_test_results(integration) or "No results yet" @abstractmethod - def integration(self, _db: Session) -> Optional[IntegrationConfiguration]: + def integration(self, _db: Session) -> IntegrationConfiguration | None: ... diff --git a/core/service/container.py b/core/service/container.py index b05986a57..273dbdc3b 100644 --- a/core/service/container.py +++ b/core/service/container.py @@ -46,7 +46,7 @@ def create_container() -> Services: "api.axis", "api.bibliotheca", "api.enki", - "api.controller", + "api.circulation_manager", "api.overdrive", "core.feed.annotator.circulation", ] diff --git a/core/service/logging/configuration.py b/core/service/logging/configuration.py index 0a758c972..a39ce37dd 100644 --- a/core/service/logging/configuration.py +++ b/core/service/logging/configuration.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, Dict, Optional +from typing import Any import boto3 from pydantic import PositiveInt, validator @@ -20,18 +20,18 @@ class LoggingConfiguration(ServiceConfiguration): verbose_level: LogLevel = LogLevel.warning cloudwatch_enabled: bool = False - cloudwatch_region: Optional[str] = None + cloudwatch_region: str | None = None cloudwatch_group: str = "palace" cloudwatch_stream: str = DEFAULT_LOG_STREAM_NAME cloudwatch_interval: PositiveInt = 60 cloudwatch_create_group: bool = True - cloudwatch_access_key: Optional[str] = None - cloudwatch_secret_key: Optional[str] = None + cloudwatch_access_key: str | None = None + cloudwatch_secret_key: str | None = None @validator("cloudwatch_region") def validate_cloudwatch_region( - cls, v: Optional[str], values: Dict[str, Any] - ) -> Optional[str]: + cls, v: str | None, values: dict[str, Any] + ) -> str | None: if not values.get("cloudwatch_enabled"): # If cloudwatch is not enabled, no validation is needed. return None diff --git a/core/service/logging/container.py b/core/service/logging/container.py index 91f4fa281..54b8c247e 100644 --- a/core/service/logging/container.py +++ b/core/service/logging/container.py @@ -1,7 +1,7 @@ from __future__ import annotations from logging import Handler -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import boto3 from dependency_injector import providers @@ -32,7 +32,7 @@ class Logging(DeclarativeContainer): json_formatter: Provider[JSONFormatter] = Singleton(JSONFormatter) - cloudwatch_handler: Provider[Optional[Handler]] = providers.Singleton( + cloudwatch_handler: Provider[Handler | None] = providers.Singleton( create_cloudwatch_handler, formatter=json_formatter, level=config.level, diff --git a/core/service/logging/log.py b/core/service/logging/log.py index 23dc873e0..8cedf4462 100644 --- a/core/service/logging/log.py +++ b/core/service/logging/log.py @@ -3,8 +3,9 @@ import json import logging import socket +from collections.abc import Callable from logging import Handler -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from watchtower import CloudWatchLogHandler diff --git a/core/service/storage/configuration.py b/core/service/storage/configuration.py index 6e9b51f05..677a4badf 100644 --- a/core/service/storage/configuration.py +++ b/core/service/storage/configuration.py @@ -1,5 +1,3 @@ -from typing import Optional - import boto3 from pydantic import AnyHttpUrl, parse_obj_as, validator @@ -7,21 +5,21 @@ class StorageConfiguration(ServiceConfiguration): - region: Optional[str] = None - access_key: Optional[str] = None - secret_key: Optional[str] = None + region: str | None = None + access_key: str | None = None + secret_key: str | None = None - public_access_bucket: Optional[str] = None - analytics_bucket: Optional[str] = None + public_access_bucket: str | None = None + analytics_bucket: str | None = None - endpoint_url: Optional[AnyHttpUrl] = None + endpoint_url: AnyHttpUrl | None = None url_template: AnyHttpUrl = parse_obj_as( AnyHttpUrl, "https://{bucket}.s3.{region}.amazonaws.com/{key}" ) @validator("region") - def validate_region(cls, v: Optional[str]) -> Optional[str]: + def validate_region(cls, v: str | None) -> str | None: # No validation if region is not provided. if v is None: return None diff --git a/core/service/storage/container.py b/core/service/storage/container.py index cf454d39a..9375c7508 100644 --- a/core/service/storage/container.py +++ b/core/service/storage/container.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import boto3 from dependency_injector import providers @@ -25,7 +25,7 @@ class Storage(DeclarativeContainer): endpoint_url=config.endpoint_url, ) - analytics: Provider[Optional[S3Service]] = providers.Singleton( + analytics: Provider[S3Service | None] = providers.Singleton( S3Service.factory, client=s3_client, region=config.region, @@ -33,7 +33,7 @@ class Storage(DeclarativeContainer): url_template=config.url_template, ) - public: Provider[Optional[S3Service]] = providers.Singleton( + public: Provider[S3Service | None] = providers.Singleton( S3Service.factory, client=s3_client, region=config.region, diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py index 65cb5539b..558d46630 100644 --- a/core/service/storage/s3.py +++ b/core/service/storage/s3.py @@ -1,12 +1,11 @@ from __future__ import annotations import dataclasses -import logging import sys from io import BytesIO from string import Formatter from types import TracebackType -from typing import TYPE_CHECKING, BinaryIO, List, Optional, Type +from typing import TYPE_CHECKING, BinaryIO from urllib.parse import quote from botocore.exceptions import BotoCoreError, ClientError @@ -37,19 +36,19 @@ def __init__( bucket: str, key: str, url: str, - media_type: Optional[str] = None, + media_type: str | None = None, ) -> None: self.client = client self.key = key self.bucket = bucket self.part_number = 1 - self.parts: List[MultipartS3UploadPart] = [] + self.parts: list[MultipartS3UploadPart] = [] self.media_type = media_type - self.upload: Optional[CreateMultipartUploadOutputTypeDef] = None - self.upload_id: Optional[str] = None + self.upload: CreateMultipartUploadOutputTypeDef | None = None + self.upload_id: str | None = None self._complete = False self._url = url - self._exception: Optional[BaseException] = None + self._exception: BaseException | None = None def __enter__(self) -> Self: params = { @@ -64,9 +63,9 @@ def __enter__(self) -> Self: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> bool: if exc_val is None: self._upload_complete() @@ -77,15 +76,13 @@ def __exit__( ) self._upload_abort() self._exception = exc_val - if isinstance(exc_val, (ClientError, BotoCoreError)): - return True - return False + return True def upload_part(self, content: bytes) -> None: if self.complete or self.exception or self.upload_id is None: raise RuntimeError("Upload already complete or aborted.") - logging.info( + self.log.info( f"Uploading part {self.part_number} of {self.key} to {self.bucket}" ) result = self.client.upload_part( @@ -100,7 +97,7 @@ def upload_part(self, content: bytes) -> None: def _upload_complete(self) -> None: if not self.parts: - logging.info(f"Upload of {self.key} was empty.") + self.log.info(f"Upload of {self.key} was empty.") self._upload_abort() elif self.upload_id is None: raise RuntimeError("Upload ID not set.") @@ -114,7 +111,7 @@ def _upload_complete(self) -> None: self._complete = True def _upload_abort(self) -> None: - logging.info(f"Aborting upload of {self.key}.") + self.log.info(f"Aborting upload of {self.key}.") if self.upload_id is not None: self.client.abort_multipart_upload( Bucket=self.bucket, @@ -122,7 +119,7 @@ def _upload_abort(self) -> None: UploadId=self.upload_id, ) else: - logging.error("Upload ID not set, unable to abort.") + self.log.error("Upload ID not set, unable to abort.") @property def url(self) -> str: @@ -133,7 +130,7 @@ def complete(self) -> bool: return self._complete @property - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception @@ -141,7 +138,7 @@ class S3Service(LoggerMixin): def __init__( self, client: S3Client, - region: Optional[str], + region: str | None, bucket: str, url_template: str, ) -> None: @@ -167,10 +164,10 @@ def __init__( def factory( cls, client: S3Client, - region: Optional[str], - bucket: Optional[str], + region: str | None, + bucket: str | None, url_template: str, - ) -> Optional[Self]: + ) -> Self | None: if bucket is None: return None return cls(client, region, bucket, url_template) @@ -180,12 +177,15 @@ def generate_url(self, key: str) -> str: bucket=self.bucket, key=quote(key), region=self.region ) + def delete(self, key: str) -> None: + self.client.delete_object(Bucket=self.bucket, Key=key) + def store( self, key: str, content: str | bytes, - content_type: Optional[str] = None, - ) -> Optional[str]: + content_type: str | None = None, + ) -> str | None: if isinstance(content, str): content = content.encode("utf8") return self.store_stream( @@ -196,8 +196,8 @@ def store_stream( self, key: str, stream: BinaryIO, - content_type: Optional[str] = None, - ) -> Optional[str]: + content_type: str | None = None, + ) -> str | None: try: extra_args = {} if content_type is None else {"ContentType": content_type} self.client.upload_fileobj( @@ -223,7 +223,7 @@ def store_stream( return url def multipart( - self, key: str, content_type: Optional[str] = None + self, key: str, content_type: str | None = None ) -> MultipartS3ContextManager: url = self.generate_url(key) return MultipartS3ContextManager( diff --git a/core/util/__init__.py b/core/util/__init__.py index 7e7acbbab..9754039e1 100644 --- a/core/util/__init__.py +++ b/core/util/__init__.py @@ -5,7 +5,8 @@ import re import string from collections import Counter -from typing import Any, Iterable, Optional +from collections.abc import Iterable +from typing import Any, Optional import sqlalchemy from money import Money @@ -580,9 +581,9 @@ def chunks(lst, chunk_size, start_index=0): def ansible_boolean( - value: Optional[str | bool], - label: Optional[str] = None, - default: Optional[bool] = None, + value: str | bool | None, + label: str | None = None, + default: bool | None = None, ) -> bool: """Map Ansible "truthy" and "falsy" values to a Python boolean. diff --git a/core/util/accept_language.py b/core/util/accept_language.py index ae0ea14b0..0696bde4f 100644 --- a/core/util/accept_language.py +++ b/core/util/accept_language.py @@ -22,7 +22,6 @@ import re from collections import namedtuple from operator import attrgetter -from typing import Optional VALIDATE_LANG_REGEX = re.compile("^[a-z]+$", flags=re.IGNORECASE) QUALITY_VAL_SUB_REGEX = re.compile("^q=", flags=re.IGNORECASE) @@ -34,7 +33,7 @@ def parse_accept_language( - accept_language_str: str, default_quality: Optional[float] = None + accept_language_str: str, default_quality: float | None = None ) -> list: """ Parse a RFC 2616 Accept-Language string. diff --git a/core/util/authentication_for_opds.py b/core/util/authentication_for_opds.py index 334bee377..dfcb899d7 100644 --- a/core/util/authentication_for_opds.py +++ b/core/util/authentication_for_opds.py @@ -1,7 +1,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional +from typing import Any from sqlalchemy.orm import Session @@ -19,7 +19,7 @@ def flow_type(self) -> str: """ ... - def authentication_flow_document(self, _db: Session) -> Dict[str, Any]: + def authentication_flow_document(self, _db: Session) -> dict[str, Any]: """Convert this object into a dictionary that can be used in the `authentication` list of an Authentication For OPDS document. """ @@ -28,7 +28,7 @@ def authentication_flow_document(self, _db: Session) -> Dict[str, Any]: return data @abstractmethod - def _authentication_flow_document(self, _db: Session) -> Dict[str, Any]: + def _authentication_flow_document(self, _db: Session) -> dict[str, Any]: ... @@ -44,8 +44,8 @@ def __init__( self, id: str | None = None, title: str | None = None, - authentication_flows: List[OPDSAuthenticationFlow] | None = None, - links: List[Dict[str, Optional[str]]] | None = None, + authentication_flows: list[OPDSAuthenticationFlow] | None = None, + links: list[dict[str, str | None]] | None = None, ): """Initialize an Authentication For OPDS document. @@ -63,7 +63,7 @@ def __init__( self.authentication_flows = authentication_flows or [] self.links = links or [] - def to_dict(self, _db: Session) -> Dict[str, Any]: + def to_dict(self, _db: Session) -> dict[str, Any]: """Convert this data structure to a dictionary that becomes an Authentication For OPDS document when serialized to JSON. @@ -83,7 +83,7 @@ def to_dict(self, _db: Session) -> Dict[str, Any]: if not isinstance(value, list): raise ValueError("'%s' must be a list." % key) - document: Dict[str, Any] = dict(id=self.id, title=self.title) + document: dict[str, Any] = dict(id=self.id, title=self.title) flow_documents = document.setdefault("authentication", []) for flow in self.authentication_flows: doc = flow.authentication_flow_document(_db) diff --git a/core/util/base64.py b/core/util/base64.py index 3de2b6e6f..77e1db67d 100644 --- a/core/util/base64.py +++ b/core/util/base64.py @@ -1,15 +1,9 @@ from __future__ import annotations import base64 as stdlib_base64 -import sys +from collections.abc import Callable from functools import wraps -from typing import Callable, TypeVar - -# TODO: Remove this when we drop support for Python 3.9 -if sys.version_info >= (3, 10): - from typing import Concatenate, ParamSpec -else: - from typing_extensions import Concatenate, ParamSpec +from typing import Concatenate, ParamSpec, TypeVar P = ParamSpec("P") T = TypeVar("T") diff --git a/core/util/cache.py b/core/util/cache.py index 986d03e34..36919acc2 100644 --- a/core/util/cache.py +++ b/core/util/cache.py @@ -1,22 +1,15 @@ from __future__ import annotations -import sys import time +from collections.abc import Callable from functools import wraps from threading import Lock -from typing import Any, Callable, Dict, List, Optional, TypeVar, cast +from typing import Any, ParamSpec, TypeVar, cast from sqlalchemy.orm import Session from core.model.datasource import DataSource -# TODO: Remove this when we drop support for Python 3.9 -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - - P = ParamSpec("P") T = TypeVar("T") @@ -41,7 +34,7 @@ def func(...): because the first argument will always be the instance itself Hence the signatures will be different for each object """ - cache: Dict[str, Any] = {} + cache: dict[str, Any] = {} def outer(func: Callable[P, T]) -> Callable[P, T]: @wraps(func) @@ -74,7 +67,7 @@ class CachedData: """ # Instance of itself - cache: Optional[CachedData] = None + cache: CachedData | None = None @classmethod def initialize(cls, _db: Session) -> CachedData: @@ -96,7 +89,7 @@ def __init__(self, _db: Session) -> None: self.lock = Lock() @memoize(ttls=3600) - def data_sources(self) -> List[DataSource]: + def data_sources(self) -> list[DataSource]: """List of all datasources within the system""" with self.lock: sources = self._db.query(DataSource).order_by(DataSource.id).all() diff --git a/core/util/datetime_helpers.py b/core/util/datetime_helpers.py index 137ec1fa4..9f3bd8e6d 100644 --- a/core/util/datetime_helpers.py +++ b/core/util/datetime_helpers.py @@ -1,5 +1,5 @@ import datetime -from typing import Optional, Tuple, overload +from typing import overload import pytz from dateutil.relativedelta import relativedelta @@ -41,11 +41,11 @@ def to_utc(dt: datetime.datetime) -> datetime.datetime: @overload -def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: +def to_utc(dt: datetime.datetime | None) -> datetime.datetime | None: ... -def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: +def to_utc(dt: datetime.datetime | None) -> datetime.datetime | None: """This converts a naive datetime object that represents UTC into an aware datetime object. @@ -73,7 +73,7 @@ def strptime_utc(date_string: str, format: str) -> datetime.datetime: return to_utc(datetime.datetime.strptime(date_string, format)) -def previous_months(number_of_months: int) -> Tuple[datetime.date, datetime.date]: +def previous_months(number_of_months: int) -> tuple[datetime.date, datetime.date]: """Calculate date boundaries for matching the specified previous number of months. :param number_of_months: The number of months in the interval. diff --git a/core/util/flask_util.py b/core/util/flask_util.py index d728af5e0..c2bbd876b 100644 --- a/core/util/flask_util.py +++ b/core/util/flask_util.py @@ -1,7 +1,7 @@ """Utilities for Flask applications.""" import datetime import time -from typing import Any, Dict +from typing import Any from wsgiref.handlers import format_date_time from flask import Response as FlaskResponse @@ -198,7 +198,7 @@ class Config: def api_dict( self, *args: Any, by_alias: bool = True, **kwargs: Any - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Return the instance in a form suitable for a web response. By default, the properties use their lower camel case aliases, diff --git a/core/util/http.py b/core/util/http.py index c15687682..c88596701 100644 --- a/core/util/http.py +++ b/core/util/http.py @@ -1,7 +1,8 @@ import logging import time +from collections.abc import Callable from json import JSONDecodeError -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any from urllib.parse import urlparse import requests @@ -407,7 +408,7 @@ def debuggable_get(cls, url: str, **kwargs: Any) -> Response: @classmethod def debuggable_post( - cls, url: str, payload: Union[str, Dict[str, Any]], **kwargs: Any + cls, url: str, payload: str | dict[str, Any], **kwargs: Any ) -> Response: """Make a POST request that returns a detailed problem detail document on error. @@ -420,7 +421,7 @@ def debuggable_request( cls, http_method: str, url: str, - make_request_with: Optional[Callable[..., Response]] = None, + make_request_with: Callable[..., Response] | None = None, **kwargs: Any, ) -> Response: """Make a request that raises a ProblemError with a detailed problem detail @@ -451,8 +452,8 @@ def process_debuggable_response( cls, url: str, response: Response, - allowed_response_codes: Optional[List[Union[str, int]]] = None, - disallowed_response_codes: Optional[List[Union[str, int]]] = None, + allowed_response_codes: list[str | int] | None = None, + disallowed_response_codes: list[str | int] | None = None, expected_encoding: str = "utf-8", ) -> Response: """If there was a problem with an integration request, diff --git a/core/util/languages.py b/core/util/languages.py index cd277fe7a..346d23be0 100644 --- a/core/util/languages.py +++ b/core/util/languages.py @@ -3,7 +3,7 @@ import re from collections import defaultdict -from typing import Dict, List, Pattern +from re import Pattern class LookupTable(dict): @@ -31,9 +31,9 @@ class LanguageCodes: two_to_three = LookupTable() three_to_two = LookupTable() terminologic_to_three = LookupTable() - english_names: Dict[str, List[str]] = defaultdict(list) + english_names: dict[str, list[str]] = defaultdict(list) english_names_to_three = LookupTable() - native_names: Dict[str, List[str]] = defaultdict(list) + native_names: dict[str, list[str]] = defaultdict(list) RAW_DATA = """aar||aa|Afar|afar abk||ab|Abkhazian|abkhaze @@ -653,7 +653,7 @@ class LanguageNames: number = re.compile("[0-9]") parentheses = re.compile(r"\([^)]+\)") - name_to_codes: Dict[str, List[str]] + name_to_codes: dict[str, list[str]] name_re: Pattern @classmethod diff --git a/core/util/log.py b/core/util/log.py index 2ac04a422..1d092d6a0 100644 --- a/core/util/log.py +++ b/core/util/log.py @@ -1,25 +1,51 @@ import functools import logging -import sys import time +from collections.abc import Callable, Generator from contextlib import contextmanager -from typing import Callable, Optional +from typing import TypeVar + +from typing_extensions import ParamSpec + +from core.service.logging.configuration import LogLevel + +P = ParamSpec("P") +T = TypeVar("T") def log_elapsed_time( - *, log_method: Callable, message_prefix: Optional[str] = None, skip_start=False -): + *, + log_level: LogLevel, + message_prefix: str | None = None, + skip_start: bool = False, +) -> Callable[[Callable[P, T]], Callable[P, T]]: """Decorator for logging elapsed time. - :param log_method: Callable to be used to log the message(s). + Must be applied to a method of a subclass of LoggerMixin or a class that has a log property + that is an instance of logging.Logger. + + :param log_level: The log level to use for the emitted log records. :param message_prefix: Optional string to be prepended to the emitted log records. :param skip_start: Boolean indicating whether to skip the starting message. """ prefix = f"{message_prefix}: " if message_prefix else "" - def outer(fn): + def outer(fn: Callable[P, T]) -> Callable[P, T]: @functools.wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + if ( + len(args) > 0 + and hasattr(args[0], "log") + and isinstance(args[0].log, logging.Logger) + ): + log_method = getattr(args[0].log, log_level.name) + elif len(args) > 0 and hasattr(args[0], "logger"): + log_method = getattr(args[0].logger(), log_level.name) + else: + raise RuntimeError( + "Decorator must be applied to a method of a LoggerMixin or a subclass of LoggerMixin." + ) + if not skip_start: log_method(f"{prefix}Starting...") tic = time.perf_counter() @@ -38,8 +64,11 @@ def wrapper(*args, **kwargs): @contextmanager def elapsed_time_logging( - *, log_method: Callable, message_prefix: Optional[str] = None, skip_start=False -): + *, + log_method: Callable[[str], None], + message_prefix: str | None = None, + skip_start: bool = False, +) -> Generator[None, None, None]: """Context manager for logging elapsed time. :param log_method: Callable to be used to log the message(s). @@ -59,18 +88,11 @@ def elapsed_time_logging( log_method(f"{prefix}Completed. (elapsed time: {elapsed_time:0.4f} seconds)") -# Once we drop python 3.8 this can go away -if sys.version_info >= (3, 9): - cache_decorator = functools.cache -else: - cache_decorator = functools.lru_cache - - class LoggerMixin: """Mixin that adds a logger with a standardized name""" @classmethod - @cache_decorator + @functools.cache def logger(cls) -> logging.Logger: """ Returns a logger named after the module and name of the class. diff --git a/core/util/notifications.py b/core/util/notifications.py index 4c11f9af7..0dab79079 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Mapping, Optional, cast +from collections.abc import Mapping +from typing import cast import firebase_admin from firebase_admin import credentials, messaging @@ -55,7 +56,7 @@ def base_url(cls, _db: Session) -> str: def send_messages( cls, tokens: list[DeviceToken], - notification: Optional[messaging.Notification], + notification: messaging.Notification | None, data: Mapping[str, str | None], ) -> list[str]: responses = [] @@ -195,9 +196,11 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: loans_api = f"{url}/{hold.patron.library.short_name}/loans" work: Work = hold.work identifier: Identifier = hold.license_pool.identifier - title = f'Your hold on "{work.title}" is available!' + title = "Your hold is available!" + body = f'Your hold on "{work.title}" is available!' data = dict( title=title, + body=body, event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, loans_endpoint=loans_api, identifier=identifier.identifier, @@ -209,7 +212,9 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: if hold.patron.authorization_identifier: data["authorization_identifier"] = hold.patron.authorization_identifier - resp = cls.send_messages(tokens, messaging.Notification(title=title), data) + resp = cls.send_messages( + tokens, messaging.Notification(title=title, body=body), data + ) if len(resp) > 0: # Atleast one notification succeeded hold.patron_last_notified = utc_now().date() diff --git a/core/util/problem_detail.py b/core/util/problem_detail.py index 72b4f507e..7c9172830 100644 --- a/core/util/problem_detail.py +++ b/core/util/problem_detail.py @@ -6,7 +6,6 @@ import json as j import logging -from typing import Dict, Optional, Tuple from flask_babel import LazyString from pydantic import BaseModel @@ -18,10 +17,10 @@ def json( type: str, - status: Optional[int], - title: Optional[str], - detail: Optional[str] = None, - debug_message: Optional[str] = None, + status: int | None, + title: str | None, + detail: str | None = None, + debug_message: str | None = None, ) -> str: d = dict(type=type, title=str(title), status=status) if detail: @@ -32,11 +31,11 @@ def json( class ProblemDetailModel(BaseModel): - type: Optional[str] = None - status: Optional[int] = None - title: Optional[str] = None - detail: Optional[str] = None - debug_message: Optional[str] = None + type: str | None = None + status: int | None = None + title: str | None = None + detail: str | None = None + debug_message: str | None = None class ProblemDetail: @@ -48,10 +47,10 @@ class ProblemDetail: def __init__( self, uri: str, - status_code: Optional[int] = None, - title: Optional[str] = None, - detail: Optional[str] = None, - debug_message: Optional[str] = None, + status_code: int | None = None, + title: str | None = None, + detail: str | None = None, + debug_message: str | None = None, ): self.uri = uri self.title = title @@ -60,7 +59,7 @@ def __init__( self.debug_message = debug_message @property - def response(self) -> Tuple[str, int, Dict[str, str]]: + def response(self) -> tuple[str, int, dict[str, str]]: """Create a Flask-style response.""" return ( json( @@ -77,9 +76,9 @@ def response(self) -> Tuple[str, int, Dict[str, str]]: def detailed( self, detail: str, - status_code: Optional[int] = None, - title: Optional[str] = None, - debug_message: Optional[str] = None, + status_code: int | None = None, + title: str | None = None, + debug_message: str | None = None, ) -> ProblemDetail: """Create a ProblemDetail for a more specific occurrence of an existing ProblemDetail. @@ -105,9 +104,9 @@ def detailed( def with_debug( self, debug_message: str, - detail: Optional[str] = None, - status_code: Optional[int] = None, - title: Optional[str] = None, + detail: str | None = None, + status_code: int | None = None, + title: str | None = None, ) -> ProblemDetail: """Insert debugging information into a ProblemDetail. diff --git a/core/util/uuid.py b/core/util/uuid.py new file mode 100644 index 000000000..07d81774a --- /dev/null +++ b/core/util/uuid.py @@ -0,0 +1,25 @@ +from base64 import urlsafe_b64decode +from uuid import UUID + +from core.util.base64 import urlsafe_b64encode + + +def uuid_encode(uuid: UUID) -> str: + """ + Encode a UUID to a URL-safe base64 string with = padding removed, + provides a compact representation of the UUID to use in URLs. + """ + encoded = urlsafe_b64encode(uuid.bytes) + unpadded = encoded.rstrip("=") + return unpadded + + +def uuid_decode(encoded: str) -> UUID: + """ + Decode a URL-safe base64 string to a UUID. Reverse of uuid_encode. + """ + if len(encoded) != 22: + raise ValueError("Invalid base64 string for UUID") + padding = "==" + decoded_bytes = urlsafe_b64decode(encoded + padding) + return UUID(bytes=decoded_bytes) diff --git a/core/util/worker_pools.py b/core/util/worker_pools.py index 97ad1fe2b..bd3aff258 100644 --- a/core/util/worker_pools.py +++ b/core/util/worker_pools.py @@ -1,10 +1,11 @@ from __future__ import annotations import sys +from collections.abc import Callable from queue import Queue from threading import Thread from types import TracebackType -from typing import Any, Callable, Literal, Optional, Type +from typing import Any, Literal from sqlalchemy.orm import Session @@ -114,9 +115,9 @@ def restart(self) -> Self: def __exit__( self, - type: Optional[Type[BaseException]], - value: Optional[BaseException], - traceback: Optional[TracebackType], + type: type[BaseException] | None, + value: BaseException | None, + traceback: TracebackType | None, ) -> Literal[False]: self.join() if value is not None: diff --git a/core/util/xmlparser.py b/core/util/xmlparser.py index d14fb7c7b..653719bc7 100644 --- a/core/util/xmlparser.py +++ b/core/util/xmlparser.py @@ -1,17 +1,9 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Callable, Generator from io import BytesIO -from typing import ( - TYPE_CHECKING, - Callable, - Dict, - Generator, - Generic, - List, - Optional, - TypeVar, -) +from typing import TYPE_CHECKING, Generic, TypeVar from lxml import etree @@ -25,12 +17,12 @@ class XMLParser: """Helper functions to process XML data.""" - NAMESPACES: Dict[str, str] = {} + NAMESPACES: dict[str, str] = {} @classmethod def _xpath( - cls, tag: _Element, expression: str, namespaces: Optional[Dict[str, str]] = None - ) -> List[_Element]: + cls, tag: _Element, expression: str, namespaces: dict[str, str] | None = None + ) -> list[_Element]: if not namespaces: namespaces = cls.NAMESPACES """Wrapper to do a namespaced XPath expression.""" @@ -38,8 +30,8 @@ def _xpath( @classmethod def _xpath1( - cls, tag: _Element, expression: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[_Element]: + cls, tag: _Element, expression: str, namespaces: dict[str, str] | None = None + ) -> _Element | None: """Wrapper to do a namespaced XPath expression.""" values = cls._xpath(tag, expression, namespaces=namespaces) if not values: @@ -54,8 +46,8 @@ def _cls(self, tag_name: str, class_name: str) -> str: ) def text_of_optional_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[str]: + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None + ) -> str | None: tag = self._xpath1(tag, name, namespaces=namespaces) if tag is None or tag.text is None: return None @@ -63,18 +55,18 @@ def text_of_optional_subtag( return str(tag.text) def text_of_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None ) -> str: return str(tag.xpath(name, namespaces=namespaces)[0].text) def int_of_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None ) -> int: return int(self.text_of_subtag(tag, name, namespaces=namespaces)) def int_of_optional_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[int]: + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None + ) -> int | None: v = self.text_of_optional_subtag(tag, name, namespaces=namespaces) if not v: return None @@ -107,8 +99,8 @@ def _load_xml( def _process_all( xml: _ElementTree, xpath_expression: str, - namespaces: Dict[str, str], - handler: Callable[[_Element, Dict[str, str]], Optional[T]], + namespaces: dict[str, str], + handler: Callable[[_Element, dict[str, str]], T | None], ) -> Generator[T, None, None]: """ Process all elements matching the given XPath expression. Calling @@ -144,7 +136,7 @@ def process_all( def process_first( self, xml: str | bytes | _ElementTree, - ) -> Optional[T]: + ) -> T | None: """ Process the first element matching the given XPath expression. Calling process_one on the element and returning None if no elements match or @@ -163,9 +155,7 @@ def xpath_expression(self) -> str: ... @abstractmethod - def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Optional[T]: + def process_one(self, tag: _Element, namespaces: dict[str, str] | None) -> T | None: """ Process one element and return the result. Return None if the element should be ignored. diff --git a/customlists/customlist_explain.py b/customlists/customlist_explain.py index da99cefba..9d3f4fd86 100644 --- a/customlists/customlist_explain.py +++ b/customlists/customlist_explain.py @@ -2,7 +2,6 @@ import csv import json import logging -from typing import List from customlists.customlist_report import ( CustomListProblemBookBrokenOnSourceCM, @@ -21,7 +20,7 @@ class CustomListImportExplainer: @staticmethod - def _parse_arguments(args: List[str]) -> argparse.Namespace: + def _parse_arguments(args: list[str]) -> argparse.Namespace: parser: argparse.ArgumentParser = argparse.ArgumentParser( description="Explain what went wrong during an import." ) @@ -214,7 +213,7 @@ def __init__(self, args: argparse.Namespace): self._output_csv_file = args.output_csv_file @staticmethod - def create(args: List[str]) -> "CustomListImportExplainer": + def create(args: list[str]) -> "CustomListImportExplainer": return CustomListImportExplainer( CustomListImportExplainer._parse_arguments(args) ) diff --git a/customlists/customlist_export.py b/customlists/customlist_export.py index 9cfcb9e9a..0d9d64695 100644 --- a/customlists/customlist_export.py +++ b/customlists/customlist_export.py @@ -3,7 +3,8 @@ import logging import os import re -from typing import IO, Any, Iterable, List, Mapping, Union +from collections.abc import Iterable, Mapping +from typing import IO, Any from urllib.parse import unquote import feedparser @@ -162,9 +163,9 @@ def name(self) -> str: class CustomList: - _books: List[Book] - _problematic_books: List[ProblematicBook] - _collections: List[CollectionReference] + _books: list[Book] + _problematic_books: list[ProblematicBook] + _collections: list[CollectionReference] _id: int _name: str _library_id: str @@ -225,8 +226,8 @@ def name(self) -> str: class CustomListExports: - _lists: List[CustomList] - _problematic_lists: List[ProblematicCustomList] + _lists: list[CustomList] + _problematic_lists: list[ProblematicCustomList] def __init__(self): self._lists = [] @@ -336,14 +337,14 @@ class CustomListExporter: _output_file: str _library_name: str _schema_file: str - _lists: List[str] + _lists: list[str] @staticmethod def _fatal(message: str): raise CustomListExportFailed(message) @staticmethod - def _parse_arguments(args: List[str]) -> argparse.Namespace: + def _parse_arguments(args: list[str]) -> argparse.Namespace: parser: argparse.ArgumentParser = argparse.ArgumentParser( description="Fetch one or more custom lists." ) @@ -374,9 +375,7 @@ def _parse_arguments(args: List[str]) -> argparse.Namespace: ) return parser.parse_args(args) - def _make_custom_list( - self, raw_list: dict - ) -> Union[CustomList, ProblematicCustomList]: + def _make_custom_list(self, raw_list: dict) -> CustomList | ProblematicCustomList: id: int = raw_list["id"] name: str = raw_list["name"] @@ -529,5 +528,5 @@ def __init__(self, args: argparse.Namespace): self._logger.setLevel(logging.DEBUG) @staticmethod - def create(args: List[str]) -> "CustomListExporter": + def create(args: list[str]) -> "CustomListExporter": return CustomListExporter(CustomListExporter._parse_arguments(args)) diff --git a/customlists/customlist_import.py b/customlists/customlist_import.py index 82faa10a9..1d97d2500 100644 --- a/customlists/customlist_import.py +++ b/customlists/customlist_import.py @@ -3,7 +3,6 @@ import logging import os import re -from typing import Dict, List, Set from urllib.parse import unquote import feedparser @@ -62,7 +61,7 @@ def _fatal_response(message: str, response: Response) -> None: CustomListImporter._fatal(CustomListImporter._error_response(message, response)) @staticmethod - def _parse_arguments(args: List[str]) -> argparse.Namespace: + def _parse_arguments(args: list[str]) -> argparse.Namespace: parser: argparse.ArgumentParser = argparse.ArgumentParser( description="Import custom lists." ) @@ -133,7 +132,7 @@ def _process_check_book( report: CustomListReport, customlist: CustomList, book: Book, - rejected_books: Set[str], + rejected_books: set[str], ) -> None: self._logger.info( f"Checking that book '{book.title()}' ({book.id()}) has a matching ID and title on the target CM." @@ -217,7 +216,7 @@ def _process_customlist_check_collections( self, list_report: CustomListReport, customlist: CustomList, - rejected_collections: Set[str], + rejected_collections: set[str], ) -> None: self._logger.info( "Checking that all referenced collections exist on the target CM" @@ -250,7 +249,7 @@ def _process_customlist_check_books( self, list_report: CustomListReport, customlist: CustomList, - rejected_books: Set[str], + rejected_books: set[str], ) -> None: for book in customlist.books(): self._process_check_book( @@ -269,7 +268,7 @@ def _process_customlist_check_list( self, list_report: CustomListReport, customlist: CustomList, - rejected_lists: Set[int], + rejected_lists: set[int], ) -> None: self._logger.info( f"Checking that list '{customlist.name()}' ({customlist.id()}) does not exist on the target CM" @@ -300,20 +299,20 @@ def _process_customlist_update_list( self, list_report: CustomListReport, customlist: CustomList, - rejected_books: Set[str], - rejected_collections: Set[str], + rejected_books: set[str], + rejected_collections: set[str], ) -> None: self._logger.info( f"Updating list '{customlist.name()}' ({customlist.id()}) on the target CM with {customlist.size()} books" ) if not self._dry_run: - output_books: List[dict] = [] + output_books: list[dict] = [] for book in customlist.books(): if book.id() in rejected_books: continue output_books.append({"id": book.id(), "title": book.title()}) - output_collections: List[int] = [] + output_collections: list[int] = [] for collection in customlist.collections(): if collection.name() in rejected_collections: continue @@ -362,10 +361,10 @@ def _process_customlists( report: CustomListsReport, customlists: CustomListExports, ) -> None: - list_reports: Dict[int, CustomListReport] = {} - rejected_books: Set[str] = set({}) - rejected_lists: Set[int] = set({}) - rejected_collections: Set[str] = set({}) + list_reports: dict[int, CustomListReport] = {} + rejected_books: set[str] = set({}) + rejected_lists: set[int] = set({}) + rejected_collections: set[str] = set({}) for customlist in customlists.lists(): list_report = CustomListReport(customlist.id(), customlist.name()) @@ -434,5 +433,5 @@ def __init__(self, args: argparse.Namespace): self._logger.setLevel(logging.DEBUG) @staticmethod - def create(args: List[str]) -> "CustomListImporter": + def create(args: list[str]) -> "CustomListImporter": return CustomListImporter(CustomListImporter._parse_arguments(args)) diff --git a/customlists/customlist_report.py b/customlists/customlist_report.py index 8d15a5528..7f1a61619 100644 --- a/customlists/customlist_report.py +++ b/customlists/customlist_report.py @@ -1,6 +1,6 @@ import json import logging -from typing import Iterable, List +from collections.abc import Iterable import jsonschema @@ -304,7 +304,7 @@ def name(self) -> str: class CustomListReport: - _errors: List[CustomListProblem] + _errors: list[CustomListProblem] _id: int _name: str @@ -407,7 +407,7 @@ def _parse(document: dict) -> "CustomListReport": class CustomListsReport: - _reports: List[CustomListReport] + _reports: list[CustomListReport] def __init__(self): self._reports = [] diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh new file mode 100755 index 000000000..2ebe133e1 --- /dev/null +++ b/docker/ci/test_migrations.sh @@ -0,0 +1,139 @@ +#!/bin/bash + +# This script makes sure that our database migrations bring the database up to date +# so that the resulting database is the same as if we had initialized a new instance. +# +# This is done by (1) checking out an older version of our codebase at the commit on +# which the first migration was added and then (2) initializing a new instance. Then +# we check out the current version of our codebase and run our migrations. +# +# After the migrations are complete we use `alembic check` [1] to make sure that the +# database model matches the migrated database. If the model matches, then the database +# database is in sync and the migrations are up to date. If the database doesn't match +# then a new migration is required. We then repeat this process with our down +# migrations to make sure that the down migrations stay in sync as well. +# +# Note: This test cannot be added to the normal migration test suite since it requires +# manipulating the git history and checking out older versions of the codebase. All of +# the commands in this script are run inside a docker-compose environment. +# +# [1] https://alembic.sqlalchemy.org/en/latest/autogenerate.html#running-alembic-check-to-test-for-new-upgrade-operations + + +compose_cmd() { + docker --log-level ERROR compose --progress quiet "$@" +} + +run_in_container() +{ + compose_cmd run --build --rm webapp /bin/bash -c "source env/bin/activate && $*" +} + +cleanup() { + compose_cmd down + git checkout -q "${current_branch}" +} + +run_migrations() { + ALEMBIC_CMD=$1 + run_in_container "alembic ${ALEMBIC_CMD}" + exit_code=$? + if [[ $exit_code -ne 0 ]]; then + echo "ERROR: Running database migrations failed." + cleanup + exit $exit_code + fi + echo "" +} + +check_db() { + DETAILED_ERROR=$1 + run_in_container "alembic check" + exit_code=$? + if [[ $exit_code -eq 0 ]]; then + echo "SUCCESS: Database is in sync." + echo "" + else + echo "ERROR: Database is out of sync! ${DETAILED_ERROR}" + cleanup + exit $exit_code + fi +} + +if ! git diff --quiet; then + echo "ERROR: You have uncommitted changes. These changes will be lost if you run this script." + echo " Please commit or stash your changes and try again." + exit 1 +fi + +# Find the currently checked out branch +current_branch=$(git symbolic-ref -q --short HEAD) +current_branch_exit_code=$? + +# If we are not on a branch, then we are in a detached HEAD state, so +# we use the commit hash instead. This happens in CI when being run +# against a PR instead of a branch. +# See: https://stackoverflow.com/questions/69935511/how-do-i-save-the-current-head-so-i-can-check-it-back-out-in-the-same-way-later +if [[ $current_branch_exit_code -ne 0 ]]; then + current_branch=$(git rev-parse HEAD) + echo "WARNING: You are in a detached HEAD state. This is normal when running in CI." + echo " The current commit hash will be used instead of a branch name." +fi + +echo "Current branch: ${current_branch}" + +# Find the first migration file +first_migration_id=$(run_in_container alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) +if [[ -z $first_migration_id ]]; then + echo "ERROR: Could not find first migration id." + exit 1 +fi + +first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") +if [[ -z $first_migration_file ]]; then + echo "ERROR: Could not find first migration file." + exit 1 +fi + +echo "First migration file: ${first_migration_file}" +echo "" + +# Find the git commit where the first migration file was added +first_migration_commit=$(git log --follow --format=%H --reverse "${first_migration_file}" | head -n 1) +if [[ -z $first_migration_commit ]]; then + echo "ERROR: Could not find first migration commit hash." + exit 1 +fi + +echo "Starting containers and initializing database at commit ${first_migration_commit}" +git checkout -q "${first_migration_commit}" +compose_cmd down +compose_cmd up -d pg +run_in_container "./bin/util/initialize_instance" +initialize_exit_code=$? +if [[ $initialize_exit_code -ne 0 ]]; then + echo "ERROR: Failed to initialize instance." + cleanup + exit $initialize_exit_code +fi +echo "" + +# Migrate up to the current commit and check if the database is in sync +echo "Testing upgrade migrations on branch ${current_branch}" +git checkout -q "${current_branch}" +run_migrations "upgrade head" +check_db "A new migration is required or a up migration is broken." + +# Migrate down to the first migration and check if the database is in sync +echo "Testing downgrade migrations" +run_migrations "downgrade ${first_migration_id}" +git checkout -q "${first_migration_commit}" +check_db "A down migration is broken." + +# Migrate back up once more to make sure that the database is still in sync +echo "Testing upgrade migrations a second time" +git checkout -q "${current_branch}" +run_migrations "upgrade head" +check_db "A up migration is likely broken." + +cleanup diff --git a/docker/config.py_admin b/docker/config.py_admin deleted file mode 100755 index fbaf1f08f..000000000 --- a/docker/config.py_admin +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env python3 - -import os -from enum import Enum -from typing import Optional -from urllib.parse import urljoin - - -class OperationalMode(str, Enum): - production = "production" - development = "development" - - -class Configuration: - APP_NAME = "E-kirjasto Collection Manager" - PACKAGE_NAME = "@natlibfi/ekirjasto-circulation-admin" - PACKAGE_VERSION = "1.7.0" - - STATIC_ASSETS = { - "admin_js": "circulation-admin.js", - "admin_css": "circulation-admin.css", - "admin_logo": "PalaceCollectionManagerLogo.svg", - } - - # For proper operation, `package_url` MUST end with a slash ('/') and - # `asset_rel_url` MUST NOT begin with one. - # (Finland: Modified to serve static files from admin-ui build in production) - PACKAGE_TEMPLATES = { - OperationalMode.production: { - "package_url": "/admin/", - "asset_rel_url": "static/{filename}", - }, - OperationalMode.development: { - "package_url": "/admin/", - "asset_rel_url": "static/{filename}", - }, - } - - DEVELOPMENT_MODE_PACKAGE_TEMPLATE = "node_modules/{name}" - STATIC_ASSETS_REL_PATH = "dist" - - ADMIN_DIRECTORY = os.path.abspath(os.path.dirname(__file__)) - - # Environment variables that contain admin client package information. - ENV_ADMIN_UI_PACKAGE_NAME = "TPP_CIRCULATION_ADMIN_PACKAGE_NAME" - ENV_ADMIN_UI_PACKAGE_VERSION = "TPP_CIRCULATION_ADMIN_PACKAGE_VERSION" - - @classmethod - def operational_mode(cls) -> OperationalMode: - return ( - OperationalMode.development - if os.path.isdir(cls.package_development_directory()) - else OperationalMode.production - ) - - @classmethod - def package_name(cls) -> str: - """Get the effective package name. - - :return: A package name. - :rtype: str - """ - return os.environ.get(cls.ENV_ADMIN_UI_PACKAGE_NAME) or cls.PACKAGE_NAME - - @classmethod - def package_version(cls) -> str: - """Get the effective package version. - - :return Package verison. - """ - return os.environ.get(cls.ENV_ADMIN_UI_PACKAGE_VERSION) or cls.PACKAGE_VERSION - - @classmethod - def lookup_asset_url( - cls, key: str, *, _operational_mode: Optional[OperationalMode] = None - ) -> str: - """Get the URL for the asset_type. - - :param key: The key used to lookup an asset's filename. If the key is - not found in the asset list, then the key itself is used as the asset. - :type key: str - :param _operational_mode: Provided for testing purposes. The operational - mode is normally determined by local state - :type _operational_mode: OperationalMode - :return: A URL string. - :rtype: str - """ - operational_mode = _operational_mode or cls.operational_mode() - filename = cls.STATIC_ASSETS.get(key, key) - return urljoin( - cls.package_url(_operational_mode=operational_mode), - cls.PACKAGE_TEMPLATES[operational_mode]["asset_rel_url"].format( - filename=filename - ), - ) - - @classmethod - def package_url(cls, *, _operational_mode: Optional[OperationalMode] = None) -> str: - """Compute the URL for the admin UI package. - - :param _operational_mode: For testing. The operational mode is - normally determined by local state. - :type _operational_mode: OperationalMode - :return: String representation of the URL/path for either the asset - of the given type or, if no type is specified, the base path - of the package. - :rtype: str - """ - operational_mode = _operational_mode or cls.operational_mode() - template = cls.PACKAGE_TEMPLATES[operational_mode]["package_url"] - url = template.format(name=cls.package_name(), version=cls.package_version()) - if not url.endswith("/"): - url += "/" - return url - - @classmethod - def package_development_directory(cls, *, _base_dir: Optional[str] = None) -> str: - """Absolute path for the admin UI package when in development mode. - - :param _base_dir: For testing purposes. Not used in normal operation. - :type _base_dir: str - :returns: String containing absolute path to the admin UI package. - :rtype: str - """ - base_dir = _base_dir or cls.ADMIN_DIRECTORY - return os.path.join( - base_dir, - cls.DEVELOPMENT_MODE_PACKAGE_TEMPLATE.format(name=cls.package_name()), - ) - - @classmethod - def static_files_directory(cls, *, _base_dir: Optional[str] = None) -> str: - """Absolute path for the admin UI static files. - (Finland: Modified to serve static files from admin-ui build in production) - - :param _base_dir: For testing purposes. Not used in normal operation. - :type _base_dir: str - :returns: String containing absolute path to the admin UI package. - :rtype: str - """ - operational_mode = cls.operational_mode() - package_dir = ( - cls.package_development_directory(_base_dir=_base_dir) - if operational_mode == OperationalMode.development - else cls.ADMIN_DIRECTORY - ) - return os.path.join(package_dir, cls.STATIC_ASSETS_REL_PATH) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 744a67097..ae3c77a36 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -32,9 +32,6 @@ HOME=/var/www/circulation # those works. 30 22 * * * root core/bin/run work_classify_unchecked_subjects >> /var/log/cron.log 2>&1 -# If any works have out-of-date OPDS entries or MARC records, rebuild them, -40 23 * * * root core/bin/run marc_record_coverage >> /var/log/cron.log 2>&1 - # Remove miscellaneous expired things from the database 0 2 * * * root core/bin/run database_reaper >> /var/log/cron.log 2>&1 @@ -106,8 +103,8 @@ HOME=/var/www/circulation # Notifications # -10 * * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 -15 * * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 +10 */8 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 +15 */8 * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 0 1 * * * root core/bin/run patron_activity_sync_notifications >> /var/log/cron.log 2>&1 # Audiobook playtimes diff --git a/poetry.lock b/poetry.lock index 4d07529ad..920695ab3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,25 +2,22 @@ [[package]] name = "alembic" -version = "1.12.1" +version = "1.13.1" description = "A database migration tool for SQLAlchemy." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, - {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, + {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, + {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, ] [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} Mako = "*" -python-dateutil = {version = "*", optional = true, markers = "extra == \"tz\""} SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" [package.extras] -tz = ["python-dateutil"] +tz = ["backports.zoneinfo"] [[package]] name = "attrs" @@ -66,40 +63,43 @@ files = [ {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, ] -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] @@ -137,433 +137,446 @@ files = [ [[package]] name = "boto3" -version = "1.29.0" +version = "1.34.0" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "boto3-1.29.0-py3-none-any.whl", hash = "sha256:91c72fa4848eda9311c273db667946bd9d953285ae8d54b7bbad541b74adc254"}, - {file = "boto3-1.29.0.tar.gz", hash = "sha256:3e90ea2faa3e9892b9140f857911f9ef0013192a106f50d0ec7b71e8d1afc90a"}, + {file = "boto3-1.34.0-py3-none-any.whl", hash = "sha256:8b3c4d4e720c0ad706590c284b8f30c76de3472c1ce1bac610425f99bf6ab53b"}, + {file = "boto3-1.34.0.tar.gz", hash = "sha256:c9b400529932ed4652304756528ab235c6730aa5d00cb4d9e4848ce460c82c16"}, ] [package.dependencies] -botocore = ">=1.32.0,<1.33.0" +botocore = ">=1.34.0,<1.35.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.7.0,<0.8.0" +s3transfer = ">=0.9.0,<0.10.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.29.0" -description = "Type annotations for boto3 1.29.0 generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3 1.34.0 generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.29.0.tar.gz", hash = "sha256:897cb22cbf7971809cac10470121ac194a5cc57d5fb3d8bfec09e07b3cb7646b"}, - {file = "boto3_stubs-1.29.0-py3-none-any.whl", hash = "sha256:e0ffd497ebd63b5d66b7eeef3192201be5453e8e5f449c864dd23877cf18fe3e"}, + {file = "boto3-stubs-1.34.0.tar.gz", hash = "sha256:39ad9a9ab399c012713a719d90feb1dee819d310f136a0c7d7fddc477d7f251e"}, + {file = "boto3_stubs-1.34.0-py3-none-any.whl", hash = "sha256:477b7da7432ab26123324249411111f2350e0b5ef0418e7bc1124600c810ac41"}, ] [package.dependencies] -boto3 = {version = "1.29.0", optional = true, markers = "extra == \"boto3\""} -botocore = {version = "1.32.0", optional = true, markers = "extra == \"boto3\""} +boto3 = {version = "1.34.0", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.34.0", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" -mypy-boto3-cloudformation = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-dynamodb = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-ec2 = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-lambda = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-logs = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"logs\""} -mypy-boto3-rds = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-s3 = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} -mypy-boto3-sqs = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-cloudformation = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-dynamodb = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-ec2 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-lambda = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-logs = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"logs\""} +mypy-boto3-rds = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-s3 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} +mypy-boto3-sqs = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} types-s3transfer = "*" typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.29.0,<1.30.0)"] -account = ["mypy-boto3-account (>=1.29.0,<1.30.0)"] -acm = ["mypy-boto3-acm (>=1.29.0,<1.30.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.29.0,<1.30.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.29.0,<1.30.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.29.0,<1.30.0)", "mypy-boto3-account (>=1.29.0,<1.30.0)", "mypy-boto3-acm (>=1.29.0,<1.30.0)", "mypy-boto3-acm-pca (>=1.29.0,<1.30.0)", "mypy-boto3-alexaforbusiness (>=1.29.0,<1.30.0)", "mypy-boto3-amp (>=1.29.0,<1.30.0)", "mypy-boto3-amplify (>=1.29.0,<1.30.0)", "mypy-boto3-amplifybackend (>=1.29.0,<1.30.0)", "mypy-boto3-amplifyuibuilder (>=1.29.0,<1.30.0)", "mypy-boto3-apigateway (>=1.29.0,<1.30.0)", "mypy-boto3-apigatewaymanagementapi (>=1.29.0,<1.30.0)", "mypy-boto3-apigatewayv2 (>=1.29.0,<1.30.0)", "mypy-boto3-appconfig (>=1.29.0,<1.30.0)", "mypy-boto3-appconfigdata (>=1.29.0,<1.30.0)", "mypy-boto3-appfabric (>=1.29.0,<1.30.0)", "mypy-boto3-appflow (>=1.29.0,<1.30.0)", "mypy-boto3-appintegrations (>=1.29.0,<1.30.0)", "mypy-boto3-application-autoscaling (>=1.29.0,<1.30.0)", "mypy-boto3-application-insights (>=1.29.0,<1.30.0)", "mypy-boto3-applicationcostprofiler (>=1.29.0,<1.30.0)", "mypy-boto3-appmesh (>=1.29.0,<1.30.0)", "mypy-boto3-apprunner (>=1.29.0,<1.30.0)", "mypy-boto3-appstream (>=1.29.0,<1.30.0)", "mypy-boto3-appsync (>=1.29.0,<1.30.0)", "mypy-boto3-arc-zonal-shift (>=1.29.0,<1.30.0)", "mypy-boto3-athena (>=1.29.0,<1.30.0)", "mypy-boto3-auditmanager (>=1.29.0,<1.30.0)", "mypy-boto3-autoscaling (>=1.29.0,<1.30.0)", "mypy-boto3-autoscaling-plans (>=1.29.0,<1.30.0)", "mypy-boto3-backup (>=1.29.0,<1.30.0)", "mypy-boto3-backup-gateway (>=1.29.0,<1.30.0)", "mypy-boto3-backupstorage (>=1.29.0,<1.30.0)", "mypy-boto3-batch (>=1.29.0,<1.30.0)", "mypy-boto3-bedrock (>=1.29.0,<1.30.0)", "mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-billingconductor (>=1.29.0,<1.30.0)", "mypy-boto3-braket (>=1.29.0,<1.30.0)", "mypy-boto3-budgets (>=1.29.0,<1.30.0)", "mypy-boto3-ce (>=1.29.0,<1.30.0)", "mypy-boto3-chime (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-identity (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-meetings (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-messaging (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-voice (>=1.29.0,<1.30.0)", "mypy-boto3-cleanrooms (>=1.29.0,<1.30.0)", "mypy-boto3-cloud9 (>=1.29.0,<1.30.0)", "mypy-boto3-cloudcontrol (>=1.29.0,<1.30.0)", "mypy-boto3-clouddirectory (>=1.29.0,<1.30.0)", "mypy-boto3-cloudformation (>=1.29.0,<1.30.0)", "mypy-boto3-cloudfront (>=1.29.0,<1.30.0)", "mypy-boto3-cloudhsm (>=1.29.0,<1.30.0)", "mypy-boto3-cloudhsmv2 (>=1.29.0,<1.30.0)", "mypy-boto3-cloudsearch (>=1.29.0,<1.30.0)", "mypy-boto3-cloudsearchdomain (>=1.29.0,<1.30.0)", "mypy-boto3-cloudtrail (>=1.29.0,<1.30.0)", "mypy-boto3-cloudtrail-data (>=1.29.0,<1.30.0)", "mypy-boto3-cloudwatch (>=1.29.0,<1.30.0)", "mypy-boto3-codeartifact (>=1.29.0,<1.30.0)", "mypy-boto3-codebuild (>=1.29.0,<1.30.0)", "mypy-boto3-codecatalyst (>=1.29.0,<1.30.0)", "mypy-boto3-codecommit (>=1.29.0,<1.30.0)", "mypy-boto3-codedeploy (>=1.29.0,<1.30.0)", "mypy-boto3-codeguru-reviewer (>=1.29.0,<1.30.0)", "mypy-boto3-codeguru-security (>=1.29.0,<1.30.0)", "mypy-boto3-codeguruprofiler (>=1.29.0,<1.30.0)", "mypy-boto3-codepipeline (>=1.29.0,<1.30.0)", "mypy-boto3-codestar (>=1.29.0,<1.30.0)", "mypy-boto3-codestar-connections (>=1.29.0,<1.30.0)", "mypy-boto3-codestar-notifications (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-identity (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-idp (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-sync (>=1.29.0,<1.30.0)", "mypy-boto3-comprehend (>=1.29.0,<1.30.0)", "mypy-boto3-comprehendmedical (>=1.29.0,<1.30.0)", "mypy-boto3-compute-optimizer (>=1.29.0,<1.30.0)", "mypy-boto3-config (>=1.29.0,<1.30.0)", "mypy-boto3-connect (>=1.29.0,<1.30.0)", "mypy-boto3-connect-contact-lens (>=1.29.0,<1.30.0)", "mypy-boto3-connectcampaigns (>=1.29.0,<1.30.0)", "mypy-boto3-connectcases (>=1.29.0,<1.30.0)", "mypy-boto3-connectparticipant (>=1.29.0,<1.30.0)", "mypy-boto3-controltower (>=1.29.0,<1.30.0)", "mypy-boto3-cur (>=1.29.0,<1.30.0)", "mypy-boto3-customer-profiles (>=1.29.0,<1.30.0)", "mypy-boto3-databrew (>=1.29.0,<1.30.0)", "mypy-boto3-dataexchange (>=1.29.0,<1.30.0)", "mypy-boto3-datapipeline (>=1.29.0,<1.30.0)", "mypy-boto3-datasync (>=1.29.0,<1.30.0)", "mypy-boto3-datazone (>=1.29.0,<1.30.0)", "mypy-boto3-dax (>=1.29.0,<1.30.0)", "mypy-boto3-detective (>=1.29.0,<1.30.0)", "mypy-boto3-devicefarm (>=1.29.0,<1.30.0)", "mypy-boto3-devops-guru (>=1.29.0,<1.30.0)", "mypy-boto3-directconnect (>=1.29.0,<1.30.0)", "mypy-boto3-discovery (>=1.29.0,<1.30.0)", "mypy-boto3-dlm (>=1.29.0,<1.30.0)", "mypy-boto3-dms (>=1.29.0,<1.30.0)", "mypy-boto3-docdb (>=1.29.0,<1.30.0)", "mypy-boto3-docdb-elastic (>=1.29.0,<1.30.0)", "mypy-boto3-drs (>=1.29.0,<1.30.0)", "mypy-boto3-ds (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodb (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodbstreams (>=1.29.0,<1.30.0)", "mypy-boto3-ebs (>=1.29.0,<1.30.0)", "mypy-boto3-ec2 (>=1.29.0,<1.30.0)", "mypy-boto3-ec2-instance-connect (>=1.29.0,<1.30.0)", "mypy-boto3-ecr (>=1.29.0,<1.30.0)", "mypy-boto3-ecr-public (>=1.29.0,<1.30.0)", "mypy-boto3-ecs (>=1.29.0,<1.30.0)", "mypy-boto3-efs (>=1.29.0,<1.30.0)", "mypy-boto3-eks (>=1.29.0,<1.30.0)", "mypy-boto3-elastic-inference (>=1.29.0,<1.30.0)", "mypy-boto3-elasticache (>=1.29.0,<1.30.0)", "mypy-boto3-elasticbeanstalk (>=1.29.0,<1.30.0)", "mypy-boto3-elastictranscoder (>=1.29.0,<1.30.0)", "mypy-boto3-elb (>=1.29.0,<1.30.0)", "mypy-boto3-elbv2 (>=1.29.0,<1.30.0)", "mypy-boto3-emr (>=1.29.0,<1.30.0)", "mypy-boto3-emr-containers (>=1.29.0,<1.30.0)", "mypy-boto3-emr-serverless (>=1.29.0,<1.30.0)", "mypy-boto3-entityresolution (>=1.29.0,<1.30.0)", "mypy-boto3-es (>=1.29.0,<1.30.0)", "mypy-boto3-events (>=1.29.0,<1.30.0)", "mypy-boto3-evidently (>=1.29.0,<1.30.0)", "mypy-boto3-finspace (>=1.29.0,<1.30.0)", "mypy-boto3-finspace-data (>=1.29.0,<1.30.0)", "mypy-boto3-firehose (>=1.29.0,<1.30.0)", "mypy-boto3-fis (>=1.29.0,<1.30.0)", "mypy-boto3-fms (>=1.29.0,<1.30.0)", "mypy-boto3-forecast (>=1.29.0,<1.30.0)", "mypy-boto3-forecastquery (>=1.29.0,<1.30.0)", "mypy-boto3-frauddetector (>=1.29.0,<1.30.0)", "mypy-boto3-fsx (>=1.29.0,<1.30.0)", "mypy-boto3-gamelift (>=1.29.0,<1.30.0)", "mypy-boto3-glacier (>=1.29.0,<1.30.0)", "mypy-boto3-globalaccelerator (>=1.29.0,<1.30.0)", "mypy-boto3-glue (>=1.29.0,<1.30.0)", "mypy-boto3-grafana (>=1.29.0,<1.30.0)", "mypy-boto3-greengrass (>=1.29.0,<1.30.0)", "mypy-boto3-greengrassv2 (>=1.29.0,<1.30.0)", "mypy-boto3-groundstation (>=1.29.0,<1.30.0)", "mypy-boto3-guardduty (>=1.29.0,<1.30.0)", "mypy-boto3-health (>=1.29.0,<1.30.0)", "mypy-boto3-healthlake (>=1.29.0,<1.30.0)", "mypy-boto3-honeycode (>=1.29.0,<1.30.0)", "mypy-boto3-iam (>=1.29.0,<1.30.0)", "mypy-boto3-identitystore (>=1.29.0,<1.30.0)", "mypy-boto3-imagebuilder (>=1.29.0,<1.30.0)", "mypy-boto3-importexport (>=1.29.0,<1.30.0)", "mypy-boto3-inspector (>=1.29.0,<1.30.0)", "mypy-boto3-inspector2 (>=1.29.0,<1.30.0)", "mypy-boto3-internetmonitor (>=1.29.0,<1.30.0)", "mypy-boto3-iot (>=1.29.0,<1.30.0)", "mypy-boto3-iot-data (>=1.29.0,<1.30.0)", "mypy-boto3-iot-jobs-data (>=1.29.0,<1.30.0)", "mypy-boto3-iot-roborunner (>=1.29.0,<1.30.0)", "mypy-boto3-iot1click-devices (>=1.29.0,<1.30.0)", "mypy-boto3-iot1click-projects (>=1.29.0,<1.30.0)", "mypy-boto3-iotanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-iotdeviceadvisor (>=1.29.0,<1.30.0)", "mypy-boto3-iotevents (>=1.29.0,<1.30.0)", "mypy-boto3-iotevents-data (>=1.29.0,<1.30.0)", "mypy-boto3-iotfleethub (>=1.29.0,<1.30.0)", "mypy-boto3-iotfleetwise (>=1.29.0,<1.30.0)", "mypy-boto3-iotsecuretunneling (>=1.29.0,<1.30.0)", "mypy-boto3-iotsitewise (>=1.29.0,<1.30.0)", "mypy-boto3-iotthingsgraph (>=1.29.0,<1.30.0)", "mypy-boto3-iottwinmaker (>=1.29.0,<1.30.0)", "mypy-boto3-iotwireless (>=1.29.0,<1.30.0)", "mypy-boto3-ivs (>=1.29.0,<1.30.0)", "mypy-boto3-ivs-realtime (>=1.29.0,<1.30.0)", "mypy-boto3-ivschat (>=1.29.0,<1.30.0)", "mypy-boto3-kafka (>=1.29.0,<1.30.0)", "mypy-boto3-kafkaconnect (>=1.29.0,<1.30.0)", "mypy-boto3-kendra (>=1.29.0,<1.30.0)", "mypy-boto3-kendra-ranking (>=1.29.0,<1.30.0)", "mypy-boto3-keyspaces (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-archived-media (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-media (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-signaling (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisvideo (>=1.29.0,<1.30.0)", "mypy-boto3-kms (>=1.29.0,<1.30.0)", "mypy-boto3-lakeformation (>=1.29.0,<1.30.0)", "mypy-boto3-lambda (>=1.29.0,<1.30.0)", "mypy-boto3-launch-wizard (>=1.29.0,<1.30.0)", "mypy-boto3-lex-models (>=1.29.0,<1.30.0)", "mypy-boto3-lex-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-lexv2-models (>=1.29.0,<1.30.0)", "mypy-boto3-lexv2-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.29.0,<1.30.0)", "mypy-boto3-lightsail (>=1.29.0,<1.30.0)", "mypy-boto3-location (>=1.29.0,<1.30.0)", "mypy-boto3-logs (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutequipment (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutmetrics (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutvision (>=1.29.0,<1.30.0)", "mypy-boto3-m2 (>=1.29.0,<1.30.0)", "mypy-boto3-machinelearning (>=1.29.0,<1.30.0)", "mypy-boto3-macie (>=1.29.0,<1.30.0)", "mypy-boto3-macie2 (>=1.29.0,<1.30.0)", "mypy-boto3-managedblockchain (>=1.29.0,<1.30.0)", "mypy-boto3-managedblockchain-query (>=1.29.0,<1.30.0)", "mypy-boto3-marketplace-catalog (>=1.29.0,<1.30.0)", "mypy-boto3-marketplace-entitlement (>=1.29.0,<1.30.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-mediaconnect (>=1.29.0,<1.30.0)", "mypy-boto3-mediaconvert (>=1.29.0,<1.30.0)", "mypy-boto3-medialive (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackage (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackage-vod (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackagev2 (>=1.29.0,<1.30.0)", "mypy-boto3-mediastore (>=1.29.0,<1.30.0)", "mypy-boto3-mediastore-data (>=1.29.0,<1.30.0)", "mypy-boto3-mediatailor (>=1.29.0,<1.30.0)", "mypy-boto3-medical-imaging (>=1.29.0,<1.30.0)", "mypy-boto3-memorydb (>=1.29.0,<1.30.0)", "mypy-boto3-meteringmarketplace (>=1.29.0,<1.30.0)", "mypy-boto3-mgh (>=1.29.0,<1.30.0)", "mypy-boto3-mgn (>=1.29.0,<1.30.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhub-config (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhuborchestrator (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhubstrategy (>=1.29.0,<1.30.0)", "mypy-boto3-mobile (>=1.29.0,<1.30.0)", "mypy-boto3-mq (>=1.29.0,<1.30.0)", "mypy-boto3-mturk (>=1.29.0,<1.30.0)", "mypy-boto3-mwaa (>=1.29.0,<1.30.0)", "mypy-boto3-neptune (>=1.29.0,<1.30.0)", "mypy-boto3-neptunedata (>=1.29.0,<1.30.0)", "mypy-boto3-network-firewall (>=1.29.0,<1.30.0)", "mypy-boto3-networkmanager (>=1.29.0,<1.30.0)", "mypy-boto3-nimble (>=1.29.0,<1.30.0)", "mypy-boto3-oam (>=1.29.0,<1.30.0)", "mypy-boto3-omics (>=1.29.0,<1.30.0)", "mypy-boto3-opensearch (>=1.29.0,<1.30.0)", "mypy-boto3-opensearchserverless (>=1.29.0,<1.30.0)", "mypy-boto3-opsworks (>=1.29.0,<1.30.0)", "mypy-boto3-opsworkscm (>=1.29.0,<1.30.0)", "mypy-boto3-organizations (>=1.29.0,<1.30.0)", "mypy-boto3-osis (>=1.29.0,<1.30.0)", "mypy-boto3-outposts (>=1.29.0,<1.30.0)", "mypy-boto3-panorama (>=1.29.0,<1.30.0)", "mypy-boto3-payment-cryptography (>=1.29.0,<1.30.0)", "mypy-boto3-payment-cryptography-data (>=1.29.0,<1.30.0)", "mypy-boto3-pca-connector-ad (>=1.29.0,<1.30.0)", "mypy-boto3-personalize (>=1.29.0,<1.30.0)", "mypy-boto3-personalize-events (>=1.29.0,<1.30.0)", "mypy-boto3-personalize-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-pi (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-email (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-sms-voice (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.29.0,<1.30.0)", "mypy-boto3-pipes (>=1.29.0,<1.30.0)", "mypy-boto3-polly (>=1.29.0,<1.30.0)", "mypy-boto3-pricing (>=1.29.0,<1.30.0)", "mypy-boto3-privatenetworks (>=1.29.0,<1.30.0)", "mypy-boto3-proton (>=1.29.0,<1.30.0)", "mypy-boto3-qldb (>=1.29.0,<1.30.0)", "mypy-boto3-qldb-session (>=1.29.0,<1.30.0)", "mypy-boto3-quicksight (>=1.29.0,<1.30.0)", "mypy-boto3-ram (>=1.29.0,<1.30.0)", "mypy-boto3-rbin (>=1.29.0,<1.30.0)", "mypy-boto3-rds (>=1.29.0,<1.30.0)", "mypy-boto3-rds-data (>=1.29.0,<1.30.0)", "mypy-boto3-redshift (>=1.29.0,<1.30.0)", "mypy-boto3-redshift-data (>=1.29.0,<1.30.0)", "mypy-boto3-redshift-serverless (>=1.29.0,<1.30.0)", "mypy-boto3-rekognition (>=1.29.0,<1.30.0)", "mypy-boto3-resiliencehub (>=1.29.0,<1.30.0)", "mypy-boto3-resource-explorer-2 (>=1.29.0,<1.30.0)", "mypy-boto3-resource-groups (>=1.29.0,<1.30.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.29.0,<1.30.0)", "mypy-boto3-robomaker (>=1.29.0,<1.30.0)", "mypy-boto3-rolesanywhere (>=1.29.0,<1.30.0)", "mypy-boto3-route53 (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-cluster (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-control-config (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-readiness (>=1.29.0,<1.30.0)", "mypy-boto3-route53domains (>=1.29.0,<1.30.0)", "mypy-boto3-route53resolver (>=1.29.0,<1.30.0)", "mypy-boto3-rum (>=1.29.0,<1.30.0)", "mypy-boto3-s3 (>=1.29.0,<1.30.0)", "mypy-boto3-s3control (>=1.29.0,<1.30.0)", "mypy-boto3-s3outposts (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-edge (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-geospatial (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-metrics (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-savingsplans (>=1.29.0,<1.30.0)", "mypy-boto3-scheduler (>=1.29.0,<1.30.0)", "mypy-boto3-schemas (>=1.29.0,<1.30.0)", "mypy-boto3-sdb (>=1.29.0,<1.30.0)", "mypy-boto3-secretsmanager (>=1.29.0,<1.30.0)", "mypy-boto3-securityhub (>=1.29.0,<1.30.0)", "mypy-boto3-securitylake (>=1.29.0,<1.30.0)", "mypy-boto3-serverlessrepo (>=1.29.0,<1.30.0)", "mypy-boto3-service-quotas (>=1.29.0,<1.30.0)", "mypy-boto3-servicecatalog (>=1.29.0,<1.30.0)", "mypy-boto3-servicecatalog-appregistry (>=1.29.0,<1.30.0)", "mypy-boto3-servicediscovery (>=1.29.0,<1.30.0)", "mypy-boto3-ses (>=1.29.0,<1.30.0)", "mypy-boto3-sesv2 (>=1.29.0,<1.30.0)", "mypy-boto3-shield (>=1.29.0,<1.30.0)", "mypy-boto3-signer (>=1.29.0,<1.30.0)", "mypy-boto3-simspaceweaver (>=1.29.0,<1.30.0)", "mypy-boto3-sms (>=1.29.0,<1.30.0)", "mypy-boto3-sms-voice (>=1.29.0,<1.30.0)", "mypy-boto3-snow-device-management (>=1.29.0,<1.30.0)", "mypy-boto3-snowball (>=1.29.0,<1.30.0)", "mypy-boto3-sns (>=1.29.0,<1.30.0)", "mypy-boto3-sqs (>=1.29.0,<1.30.0)", "mypy-boto3-ssm (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-contacts (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-incidents (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-sap (>=1.29.0,<1.30.0)", "mypy-boto3-sso (>=1.29.0,<1.30.0)", "mypy-boto3-sso-admin (>=1.29.0,<1.30.0)", "mypy-boto3-sso-oidc (>=1.29.0,<1.30.0)", "mypy-boto3-stepfunctions (>=1.29.0,<1.30.0)", "mypy-boto3-storagegateway (>=1.29.0,<1.30.0)", "mypy-boto3-sts (>=1.29.0,<1.30.0)", "mypy-boto3-support (>=1.29.0,<1.30.0)", "mypy-boto3-support-app (>=1.29.0,<1.30.0)", "mypy-boto3-swf (>=1.29.0,<1.30.0)", "mypy-boto3-synthetics (>=1.29.0,<1.30.0)", "mypy-boto3-textract (>=1.29.0,<1.30.0)", "mypy-boto3-timestream-query (>=1.29.0,<1.30.0)", "mypy-boto3-timestream-write (>=1.29.0,<1.30.0)", "mypy-boto3-tnb (>=1.29.0,<1.30.0)", "mypy-boto3-transcribe (>=1.29.0,<1.30.0)", "mypy-boto3-transfer (>=1.29.0,<1.30.0)", "mypy-boto3-translate (>=1.29.0,<1.30.0)", "mypy-boto3-verifiedpermissions (>=1.29.0,<1.30.0)", "mypy-boto3-voice-id (>=1.29.0,<1.30.0)", "mypy-boto3-vpc-lattice (>=1.29.0,<1.30.0)", "mypy-boto3-waf (>=1.29.0,<1.30.0)", "mypy-boto3-waf-regional (>=1.29.0,<1.30.0)", "mypy-boto3-wafv2 (>=1.29.0,<1.30.0)", "mypy-boto3-wellarchitected (>=1.29.0,<1.30.0)", "mypy-boto3-wisdom (>=1.29.0,<1.30.0)", "mypy-boto3-workdocs (>=1.29.0,<1.30.0)", "mypy-boto3-worklink (>=1.29.0,<1.30.0)", "mypy-boto3-workmail (>=1.29.0,<1.30.0)", "mypy-boto3-workmailmessageflow (>=1.29.0,<1.30.0)", "mypy-boto3-workspaces (>=1.29.0,<1.30.0)", "mypy-boto3-workspaces-web (>=1.29.0,<1.30.0)", "mypy-boto3-xray (>=1.29.0,<1.30.0)"] -amp = ["mypy-boto3-amp (>=1.29.0,<1.30.0)"] -amplify = ["mypy-boto3-amplify (>=1.29.0,<1.30.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.29.0,<1.30.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.29.0,<1.30.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.29.0,<1.30.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.29.0,<1.30.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.29.0,<1.30.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.29.0,<1.30.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.29.0,<1.30.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.29.0,<1.30.0)"] -appflow = ["mypy-boto3-appflow (>=1.29.0,<1.30.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.29.0,<1.30.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.29.0,<1.30.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.29.0,<1.30.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.29.0,<1.30.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.29.0,<1.30.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.29.0,<1.30.0)"] -appstream = ["mypy-boto3-appstream (>=1.29.0,<1.30.0)"] -appsync = ["mypy-boto3-appsync (>=1.29.0,<1.30.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.29.0,<1.30.0)"] -athena = ["mypy-boto3-athena (>=1.29.0,<1.30.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.29.0,<1.30.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.29.0,<1.30.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.29.0,<1.30.0)"] -backup = ["mypy-boto3-backup (>=1.29.0,<1.30.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.29.0,<1.30.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.29.0,<1.30.0)"] -batch = ["mypy-boto3-batch (>=1.29.0,<1.30.0)"] -bedrock = ["mypy-boto3-bedrock (>=1.29.0,<1.30.0)"] -bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.29.0,<1.30.0)"] -boto3 = ["boto3 (==1.29.0)", "botocore (==1.32.0)"] -braket = ["mypy-boto3-braket (>=1.29.0,<1.30.0)"] -budgets = ["mypy-boto3-budgets (>=1.29.0,<1.30.0)"] -ce = ["mypy-boto3-ce (>=1.29.0,<1.30.0)"] -chime = ["mypy-boto3-chime (>=1.29.0,<1.30.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.29.0,<1.30.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.29.0,<1.30.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.29.0,<1.30.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.29.0,<1.30.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.29.0,<1.30.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.29.0,<1.30.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.29.0,<1.30.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.29.0,<1.30.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.29.0,<1.30.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.29.0,<1.30.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.29.0,<1.30.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.29.0,<1.30.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.29.0,<1.30.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.29.0,<1.30.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.29.0,<1.30.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.29.0,<1.30.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.29.0,<1.30.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.29.0,<1.30.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.29.0,<1.30.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.29.0,<1.30.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.29.0,<1.30.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.29.0,<1.30.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.29.0,<1.30.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.29.0,<1.30.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.29.0,<1.30.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.29.0,<1.30.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.29.0,<1.30.0)"] -codestar = ["mypy-boto3-codestar (>=1.29.0,<1.30.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.29.0,<1.30.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.29.0,<1.30.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.29.0,<1.30.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.29.0,<1.30.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.29.0,<1.30.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.29.0,<1.30.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.29.0,<1.30.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.29.0,<1.30.0)"] -config = ["mypy-boto3-config (>=1.29.0,<1.30.0)"] -connect = ["mypy-boto3-connect (>=1.29.0,<1.30.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.29.0,<1.30.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.29.0,<1.30.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.29.0,<1.30.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.29.0,<1.30.0)"] -controltower = ["mypy-boto3-controltower (>=1.29.0,<1.30.0)"] -cur = ["mypy-boto3-cur (>=1.29.0,<1.30.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.29.0,<1.30.0)"] -databrew = ["mypy-boto3-databrew (>=1.29.0,<1.30.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.29.0,<1.30.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.29.0,<1.30.0)"] -datasync = ["mypy-boto3-datasync (>=1.29.0,<1.30.0)"] -datazone = ["mypy-boto3-datazone (>=1.29.0,<1.30.0)"] -dax = ["mypy-boto3-dax (>=1.29.0,<1.30.0)"] -detective = ["mypy-boto3-detective (>=1.29.0,<1.30.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.29.0,<1.30.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.29.0,<1.30.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.29.0,<1.30.0)"] -discovery = ["mypy-boto3-discovery (>=1.29.0,<1.30.0)"] -dlm = ["mypy-boto3-dlm (>=1.29.0,<1.30.0)"] -dms = ["mypy-boto3-dms (>=1.29.0,<1.30.0)"] -docdb = ["mypy-boto3-docdb (>=1.29.0,<1.30.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.29.0,<1.30.0)"] -drs = ["mypy-boto3-drs (>=1.29.0,<1.30.0)"] -ds = ["mypy-boto3-ds (>=1.29.0,<1.30.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.29.0,<1.30.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.29.0,<1.30.0)"] -ebs = ["mypy-boto3-ebs (>=1.29.0,<1.30.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.29.0,<1.30.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.29.0,<1.30.0)"] -ecr = ["mypy-boto3-ecr (>=1.29.0,<1.30.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.29.0,<1.30.0)"] -ecs = ["mypy-boto3-ecs (>=1.29.0,<1.30.0)"] -efs = ["mypy-boto3-efs (>=1.29.0,<1.30.0)"] -eks = ["mypy-boto3-eks (>=1.29.0,<1.30.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.29.0,<1.30.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.29.0,<1.30.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.29.0,<1.30.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.29.0,<1.30.0)"] -elb = ["mypy-boto3-elb (>=1.29.0,<1.30.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.29.0,<1.30.0)"] -emr = ["mypy-boto3-emr (>=1.29.0,<1.30.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.29.0,<1.30.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.29.0,<1.30.0)"] -entityresolution = ["mypy-boto3-entityresolution (>=1.29.0,<1.30.0)"] -es = ["mypy-boto3-es (>=1.29.0,<1.30.0)"] -essential = ["mypy-boto3-cloudformation (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodb (>=1.29.0,<1.30.0)", "mypy-boto3-ec2 (>=1.29.0,<1.30.0)", "mypy-boto3-lambda (>=1.29.0,<1.30.0)", "mypy-boto3-rds (>=1.29.0,<1.30.0)", "mypy-boto3-s3 (>=1.29.0,<1.30.0)", "mypy-boto3-sqs (>=1.29.0,<1.30.0)"] -events = ["mypy-boto3-events (>=1.29.0,<1.30.0)"] -evidently = ["mypy-boto3-evidently (>=1.29.0,<1.30.0)"] -finspace = ["mypy-boto3-finspace (>=1.29.0,<1.30.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.29.0,<1.30.0)"] -firehose = ["mypy-boto3-firehose (>=1.29.0,<1.30.0)"] -fis = ["mypy-boto3-fis (>=1.29.0,<1.30.0)"] -fms = ["mypy-boto3-fms (>=1.29.0,<1.30.0)"] -forecast = ["mypy-boto3-forecast (>=1.29.0,<1.30.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.29.0,<1.30.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.29.0,<1.30.0)"] -fsx = ["mypy-boto3-fsx (>=1.29.0,<1.30.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.29.0,<1.30.0)"] -glacier = ["mypy-boto3-glacier (>=1.29.0,<1.30.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.29.0,<1.30.0)"] -glue = ["mypy-boto3-glue (>=1.29.0,<1.30.0)"] -grafana = ["mypy-boto3-grafana (>=1.29.0,<1.30.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.29.0,<1.30.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.29.0,<1.30.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.29.0,<1.30.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.29.0,<1.30.0)"] -health = ["mypy-boto3-health (>=1.29.0,<1.30.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.29.0,<1.30.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.29.0,<1.30.0)"] -iam = ["mypy-boto3-iam (>=1.29.0,<1.30.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.29.0,<1.30.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.29.0,<1.30.0)"] -importexport = ["mypy-boto3-importexport (>=1.29.0,<1.30.0)"] -inspector = ["mypy-boto3-inspector (>=1.29.0,<1.30.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.29.0,<1.30.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.29.0,<1.30.0)"] -iot = ["mypy-boto3-iot (>=1.29.0,<1.30.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.29.0,<1.30.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.29.0,<1.30.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.29.0,<1.30.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.29.0,<1.30.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.29.0,<1.30.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.29.0,<1.30.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.29.0,<1.30.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.29.0,<1.30.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.29.0,<1.30.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.29.0,<1.30.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.29.0,<1.30.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.29.0,<1.30.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.29.0,<1.30.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.29.0,<1.30.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.29.0,<1.30.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.29.0,<1.30.0)"] -ivs = ["mypy-boto3-ivs (>=1.29.0,<1.30.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.29.0,<1.30.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.29.0,<1.30.0)"] -kafka = ["mypy-boto3-kafka (>=1.29.0,<1.30.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.29.0,<1.30.0)"] -kendra = ["mypy-boto3-kendra (>=1.29.0,<1.30.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.29.0,<1.30.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.29.0,<1.30.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.29.0,<1.30.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.29.0,<1.30.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.29.0,<1.30.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.29.0,<1.30.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.29.0,<1.30.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.29.0,<1.30.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.29.0,<1.30.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.29.0,<1.30.0)"] -kms = ["mypy-boto3-kms (>=1.29.0,<1.30.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.29.0,<1.30.0)"] -lambda = ["mypy-boto3-lambda (>=1.29.0,<1.30.0)"] -launch-wizard = ["mypy-boto3-launch-wizard (>=1.29.0,<1.30.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.29.0,<1.30.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.29.0,<1.30.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.29.0,<1.30.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.29.0,<1.30.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.29.0,<1.30.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.29.0,<1.30.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.29.0,<1.30.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.29.0,<1.30.0)"] -location = ["mypy-boto3-location (>=1.29.0,<1.30.0)"] -logs = ["mypy-boto3-logs (>=1.29.0,<1.30.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.29.0,<1.30.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.29.0,<1.30.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.29.0,<1.30.0)"] -m2 = ["mypy-boto3-m2 (>=1.29.0,<1.30.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.29.0,<1.30.0)"] -macie = ["mypy-boto3-macie (>=1.29.0,<1.30.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.29.0,<1.30.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.29.0,<1.30.0)"] -managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.29.0,<1.30.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.29.0,<1.30.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.29.0,<1.30.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.29.0,<1.30.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.29.0,<1.30.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.29.0,<1.30.0)"] -medialive = ["mypy-boto3-medialive (>=1.29.0,<1.30.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.29.0,<1.30.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.29.0,<1.30.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.29.0,<1.30.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.29.0,<1.30.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.29.0,<1.30.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.29.0,<1.30.0)"] -medical-imaging = ["mypy-boto3-medical-imaging (>=1.29.0,<1.30.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.29.0,<1.30.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.29.0,<1.30.0)"] -mgh = ["mypy-boto3-mgh (>=1.29.0,<1.30.0)"] -mgn = ["mypy-boto3-mgn (>=1.29.0,<1.30.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.29.0,<1.30.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.29.0,<1.30.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.29.0,<1.30.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.29.0,<1.30.0)"] -mobile = ["mypy-boto3-mobile (>=1.29.0,<1.30.0)"] -mq = ["mypy-boto3-mq (>=1.29.0,<1.30.0)"] -mturk = ["mypy-boto3-mturk (>=1.29.0,<1.30.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.29.0,<1.30.0)"] -neptune = ["mypy-boto3-neptune (>=1.29.0,<1.30.0)"] -neptunedata = ["mypy-boto3-neptunedata (>=1.29.0,<1.30.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.29.0,<1.30.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.29.0,<1.30.0)"] -nimble = ["mypy-boto3-nimble (>=1.29.0,<1.30.0)"] -oam = ["mypy-boto3-oam (>=1.29.0,<1.30.0)"] -omics = ["mypy-boto3-omics (>=1.29.0,<1.30.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.29.0,<1.30.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.29.0,<1.30.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.29.0,<1.30.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.29.0,<1.30.0)"] -organizations = ["mypy-boto3-organizations (>=1.29.0,<1.30.0)"] -osis = ["mypy-boto3-osis (>=1.29.0,<1.30.0)"] -outposts = ["mypy-boto3-outposts (>=1.29.0,<1.30.0)"] -panorama = ["mypy-boto3-panorama (>=1.29.0,<1.30.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.29.0,<1.30.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.29.0,<1.30.0)"] -pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.29.0,<1.30.0)"] -personalize = ["mypy-boto3-personalize (>=1.29.0,<1.30.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.29.0,<1.30.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.29.0,<1.30.0)"] -pi = ["mypy-boto3-pi (>=1.29.0,<1.30.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.29.0,<1.30.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.29.0,<1.30.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.29.0,<1.30.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.29.0,<1.30.0)"] -pipes = ["mypy-boto3-pipes (>=1.29.0,<1.30.0)"] -polly = ["mypy-boto3-polly (>=1.29.0,<1.30.0)"] -pricing = ["mypy-boto3-pricing (>=1.29.0,<1.30.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.29.0,<1.30.0)"] -proton = ["mypy-boto3-proton (>=1.29.0,<1.30.0)"] -qldb = ["mypy-boto3-qldb (>=1.29.0,<1.30.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.29.0,<1.30.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.29.0,<1.30.0)"] -ram = ["mypy-boto3-ram (>=1.29.0,<1.30.0)"] -rbin = ["mypy-boto3-rbin (>=1.29.0,<1.30.0)"] -rds = ["mypy-boto3-rds (>=1.29.0,<1.30.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.29.0,<1.30.0)"] -redshift = ["mypy-boto3-redshift (>=1.29.0,<1.30.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.29.0,<1.30.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.29.0,<1.30.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.29.0,<1.30.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.29.0,<1.30.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.29.0,<1.30.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.29.0,<1.30.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.29.0,<1.30.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.29.0,<1.30.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.29.0,<1.30.0)"] -route53 = ["mypy-boto3-route53 (>=1.29.0,<1.30.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.29.0,<1.30.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.29.0,<1.30.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.29.0,<1.30.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.29.0,<1.30.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.29.0,<1.30.0)"] -rum = ["mypy-boto3-rum (>=1.29.0,<1.30.0)"] -s3 = ["mypy-boto3-s3 (>=1.29.0,<1.30.0)"] -s3control = ["mypy-boto3-s3control (>=1.29.0,<1.30.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.29.0,<1.30.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.29.0,<1.30.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.29.0,<1.30.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.29.0,<1.30.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.29.0,<1.30.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.29.0,<1.30.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.29.0,<1.30.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.29.0,<1.30.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.29.0,<1.30.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.29.0,<1.30.0)"] -schemas = ["mypy-boto3-schemas (>=1.29.0,<1.30.0)"] -sdb = ["mypy-boto3-sdb (>=1.29.0,<1.30.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.29.0,<1.30.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.29.0,<1.30.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.29.0,<1.30.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.29.0,<1.30.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.29.0,<1.30.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.29.0,<1.30.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.29.0,<1.30.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.29.0,<1.30.0)"] -ses = ["mypy-boto3-ses (>=1.29.0,<1.30.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.29.0,<1.30.0)"] -shield = ["mypy-boto3-shield (>=1.29.0,<1.30.0)"] -signer = ["mypy-boto3-signer (>=1.29.0,<1.30.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.29.0,<1.30.0)"] -sms = ["mypy-boto3-sms (>=1.29.0,<1.30.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.29.0,<1.30.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.29.0,<1.30.0)"] -snowball = ["mypy-boto3-snowball (>=1.29.0,<1.30.0)"] -sns = ["mypy-boto3-sns (>=1.29.0,<1.30.0)"] -sqs = ["mypy-boto3-sqs (>=1.29.0,<1.30.0)"] -ssm = ["mypy-boto3-ssm (>=1.29.0,<1.30.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.29.0,<1.30.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.29.0,<1.30.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.29.0,<1.30.0)"] -sso = ["mypy-boto3-sso (>=1.29.0,<1.30.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.29.0,<1.30.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.29.0,<1.30.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.29.0,<1.30.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.29.0,<1.30.0)"] -sts = ["mypy-boto3-sts (>=1.29.0,<1.30.0)"] -support = ["mypy-boto3-support (>=1.29.0,<1.30.0)"] -support-app = ["mypy-boto3-support-app (>=1.29.0,<1.30.0)"] -swf = ["mypy-boto3-swf (>=1.29.0,<1.30.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.29.0,<1.30.0)"] -textract = ["mypy-boto3-textract (>=1.29.0,<1.30.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.29.0,<1.30.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.29.0,<1.30.0)"] -tnb = ["mypy-boto3-tnb (>=1.29.0,<1.30.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.29.0,<1.30.0)"] -transfer = ["mypy-boto3-transfer (>=1.29.0,<1.30.0)"] -translate = ["mypy-boto3-translate (>=1.29.0,<1.30.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.29.0,<1.30.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.29.0,<1.30.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.29.0,<1.30.0)"] -waf = ["mypy-boto3-waf (>=1.29.0,<1.30.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.29.0,<1.30.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.29.0,<1.30.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.29.0,<1.30.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.29.0,<1.30.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.29.0,<1.30.0)"] -worklink = ["mypy-boto3-worklink (>=1.29.0,<1.30.0)"] -workmail = ["mypy-boto3-workmail (>=1.29.0,<1.30.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.29.0,<1.30.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.29.0,<1.30.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.29.0,<1.30.0)"] -xray = ["mypy-boto3-xray (>=1.29.0,<1.30.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)"] +account = ["mypy-boto3-account (>=1.34.0,<1.35.0)"] +acm = ["mypy-boto3-acm (>=1.34.0,<1.35.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.34.0,<1.35.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)", "mypy-boto3-account (>=1.34.0,<1.35.0)", "mypy-boto3-acm (>=1.34.0,<1.35.0)", "mypy-boto3-acm-pca (>=1.34.0,<1.35.0)", "mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-amp (>=1.34.0,<1.35.0)", "mypy-boto3-amplify (>=1.34.0,<1.35.0)", "mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)", "mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)", "mypy-boto3-apigateway (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)", "mypy-boto3-appconfig (>=1.34.0,<1.35.0)", "mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)", "mypy-boto3-appfabric (>=1.34.0,<1.35.0)", "mypy-boto3-appflow (>=1.34.0,<1.35.0)", "mypy-boto3-appintegrations (>=1.34.0,<1.35.0)", "mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-application-insights (>=1.34.0,<1.35.0)", "mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-appmesh (>=1.34.0,<1.35.0)", "mypy-boto3-apprunner (>=1.34.0,<1.35.0)", "mypy-boto3-appstream (>=1.34.0,<1.35.0)", "mypy-boto3-appsync (>=1.34.0,<1.35.0)", "mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)", "mypy-boto3-athena (>=1.34.0,<1.35.0)", "mypy-boto3-auditmanager (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)", "mypy-boto3-b2bi (>=1.34.0,<1.35.0)", "mypy-boto3-backup (>=1.34.0,<1.35.0)", "mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)", "mypy-boto3-backupstorage (>=1.34.0,<1.35.0)", "mypy-boto3-batch (>=1.34.0,<1.35.0)", "mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-billingconductor (>=1.34.0,<1.35.0)", "mypy-boto3-braket (>=1.34.0,<1.35.0)", "mypy-boto3-budgets (>=1.34.0,<1.35.0)", "mypy-boto3-ce (>=1.34.0,<1.35.0)", "mypy-boto3-chime (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)", "mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)", "mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)", "mypy-boto3-cloud9 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)", "mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)", "mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)", "mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)", "mypy-boto3-codeartifact (>=1.34.0,<1.35.0)", "mypy-boto3-codebuild (>=1.34.0,<1.35.0)", "mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)", "mypy-boto3-codecommit (>=1.34.0,<1.35.0)", "mypy-boto3-codedeploy (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)", "mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-codepipeline (>=1.34.0,<1.35.0)", "mypy-boto3-codestar (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)", "mypy-boto3-comprehend (>=1.34.0,<1.35.0)", "mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)", "mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)", "mypy-boto3-config (>=1.34.0,<1.35.0)", "mypy-boto3-connect (>=1.34.0,<1.35.0)", "mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)", "mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)", "mypy-boto3-connectcases (>=1.34.0,<1.35.0)", "mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)", "mypy-boto3-controltower (>=1.34.0,<1.35.0)", "mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)", "mypy-boto3-cur (>=1.34.0,<1.35.0)", "mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)", "mypy-boto3-databrew (>=1.34.0,<1.35.0)", "mypy-boto3-dataexchange (>=1.34.0,<1.35.0)", "mypy-boto3-datapipeline (>=1.34.0,<1.35.0)", "mypy-boto3-datasync (>=1.34.0,<1.35.0)", "mypy-boto3-datazone (>=1.34.0,<1.35.0)", "mypy-boto3-dax (>=1.34.0,<1.35.0)", "mypy-boto3-detective (>=1.34.0,<1.35.0)", "mypy-boto3-devicefarm (>=1.34.0,<1.35.0)", "mypy-boto3-devops-guru (>=1.34.0,<1.35.0)", "mypy-boto3-directconnect (>=1.34.0,<1.35.0)", "mypy-boto3-discovery (>=1.34.0,<1.35.0)", "mypy-boto3-dlm (>=1.34.0,<1.35.0)", "mypy-boto3-dms (>=1.34.0,<1.35.0)", "mypy-boto3-docdb (>=1.34.0,<1.35.0)", "mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)", "mypy-boto3-drs (>=1.34.0,<1.35.0)", "mypy-boto3-ds (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)", "mypy-boto3-ebs (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)", "mypy-boto3-ecr (>=1.34.0,<1.35.0)", "mypy-boto3-ecr-public (>=1.34.0,<1.35.0)", "mypy-boto3-ecs (>=1.34.0,<1.35.0)", "mypy-boto3-efs (>=1.34.0,<1.35.0)", "mypy-boto3-eks (>=1.34.0,<1.35.0)", "mypy-boto3-eks-auth (>=1.34.0,<1.35.0)", "mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)", "mypy-boto3-elasticache (>=1.34.0,<1.35.0)", "mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)", "mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)", "mypy-boto3-elb (>=1.34.0,<1.35.0)", "mypy-boto3-elbv2 (>=1.34.0,<1.35.0)", "mypy-boto3-emr (>=1.34.0,<1.35.0)", "mypy-boto3-emr-containers (>=1.34.0,<1.35.0)", "mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-entityresolution (>=1.34.0,<1.35.0)", "mypy-boto3-es (>=1.34.0,<1.35.0)", "mypy-boto3-events (>=1.34.0,<1.35.0)", "mypy-boto3-evidently (>=1.34.0,<1.35.0)", "mypy-boto3-finspace (>=1.34.0,<1.35.0)", "mypy-boto3-finspace-data (>=1.34.0,<1.35.0)", "mypy-boto3-firehose (>=1.34.0,<1.35.0)", "mypy-boto3-fis (>=1.34.0,<1.35.0)", "mypy-boto3-fms (>=1.34.0,<1.35.0)", "mypy-boto3-forecast (>=1.34.0,<1.35.0)", "mypy-boto3-forecastquery (>=1.34.0,<1.35.0)", "mypy-boto3-frauddetector (>=1.34.0,<1.35.0)", "mypy-boto3-freetier (>=1.34.0,<1.35.0)", "mypy-boto3-fsx (>=1.34.0,<1.35.0)", "mypy-boto3-gamelift (>=1.34.0,<1.35.0)", "mypy-boto3-glacier (>=1.34.0,<1.35.0)", "mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)", "mypy-boto3-glue (>=1.34.0,<1.35.0)", "mypy-boto3-grafana (>=1.34.0,<1.35.0)", "mypy-boto3-greengrass (>=1.34.0,<1.35.0)", "mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)", "mypy-boto3-groundstation (>=1.34.0,<1.35.0)", "mypy-boto3-guardduty (>=1.34.0,<1.35.0)", "mypy-boto3-health (>=1.34.0,<1.35.0)", "mypy-boto3-healthlake (>=1.34.0,<1.35.0)", "mypy-boto3-honeycode (>=1.34.0,<1.35.0)", "mypy-boto3-iam (>=1.34.0,<1.35.0)", "mypy-boto3-identitystore (>=1.34.0,<1.35.0)", "mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)", "mypy-boto3-importexport (>=1.34.0,<1.35.0)", "mypy-boto3-inspector (>=1.34.0,<1.35.0)", "mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)", "mypy-boto3-inspector2 (>=1.34.0,<1.35.0)", "mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-iot (>=1.34.0,<1.35.0)", "mypy-boto3-iot-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-roborunner (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)", "mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)", "mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)", "mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)", "mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)", "mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)", "mypy-boto3-iotwireless (>=1.34.0,<1.35.0)", "mypy-boto3-ivs (>=1.34.0,<1.35.0)", "mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)", "mypy-boto3-ivschat (>=1.34.0,<1.35.0)", "mypy-boto3-kafka (>=1.34.0,<1.35.0)", "mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-kendra (>=1.34.0,<1.35.0)", "mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)", "mypy-boto3-keyspaces (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)", "mypy-boto3-kms (>=1.34.0,<1.35.0)", "mypy-boto3-lakeformation (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)", "mypy-boto3-lex-models (>=1.34.0,<1.35.0)", "mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-lightsail (>=1.34.0,<1.35.0)", "mypy-boto3-location (>=1.34.0,<1.35.0)", "mypy-boto3-logs (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)", "mypy-boto3-m2 (>=1.34.0,<1.35.0)", "mypy-boto3-machinelearning (>=1.34.0,<1.35.0)", "mypy-boto3-macie2 (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)", "mypy-boto3-medialive (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)", "mypy-boto3-mediatailor (>=1.34.0,<1.35.0)", "mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)", "mypy-boto3-memorydb (>=1.34.0,<1.35.0)", "mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)", "mypy-boto3-mgh (>=1.34.0,<1.35.0)", "mypy-boto3-mgn (>=1.34.0,<1.35.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)", "mypy-boto3-mobile (>=1.34.0,<1.35.0)", "mypy-boto3-mq (>=1.34.0,<1.35.0)", "mypy-boto3-mturk (>=1.34.0,<1.35.0)", "mypy-boto3-mwaa (>=1.34.0,<1.35.0)", "mypy-boto3-neptune (>=1.34.0,<1.35.0)", "mypy-boto3-neptunedata (>=1.34.0,<1.35.0)", "mypy-boto3-network-firewall (>=1.34.0,<1.35.0)", "mypy-boto3-networkmanager (>=1.34.0,<1.35.0)", "mypy-boto3-nimble (>=1.34.0,<1.35.0)", "mypy-boto3-oam (>=1.34.0,<1.35.0)", "mypy-boto3-omics (>=1.34.0,<1.35.0)", "mypy-boto3-opensearch (>=1.34.0,<1.35.0)", "mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)", "mypy-boto3-opsworks (>=1.34.0,<1.35.0)", "mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)", "mypy-boto3-organizations (>=1.34.0,<1.35.0)", "mypy-boto3-osis (>=1.34.0,<1.35.0)", "mypy-boto3-outposts (>=1.34.0,<1.35.0)", "mypy-boto3-panorama (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)", "mypy-boto3-personalize (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-events (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-pi (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)", "mypy-boto3-pipes (>=1.34.0,<1.35.0)", "mypy-boto3-polly (>=1.34.0,<1.35.0)", "mypy-boto3-pricing (>=1.34.0,<1.35.0)", "mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)", "mypy-boto3-proton (>=1.34.0,<1.35.0)", "mypy-boto3-qbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-qconnect (>=1.34.0,<1.35.0)", "mypy-boto3-qldb (>=1.34.0,<1.35.0)", "mypy-boto3-qldb-session (>=1.34.0,<1.35.0)", "mypy-boto3-quicksight (>=1.34.0,<1.35.0)", "mypy-boto3-ram (>=1.34.0,<1.35.0)", "mypy-boto3-rbin (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-rds-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-rekognition (>=1.34.0,<1.35.0)", "mypy-boto3-repostspace (>=1.34.0,<1.35.0)", "mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)", "mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)", "mypy-boto3-resource-groups (>=1.34.0,<1.35.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)", "mypy-boto3-robomaker (>=1.34.0,<1.35.0)", "mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)", "mypy-boto3-route53 (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)", "mypy-boto3-route53domains (>=1.34.0,<1.35.0)", "mypy-boto3-route53resolver (>=1.34.0,<1.35.0)", "mypy-boto3-rum (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-s3control (>=1.34.0,<1.35.0)", "mypy-boto3-s3outposts (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-savingsplans (>=1.34.0,<1.35.0)", "mypy-boto3-scheduler (>=1.34.0,<1.35.0)", "mypy-boto3-schemas (>=1.34.0,<1.35.0)", "mypy-boto3-sdb (>=1.34.0,<1.35.0)", "mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)", "mypy-boto3-securityhub (>=1.34.0,<1.35.0)", "mypy-boto3-securitylake (>=1.34.0,<1.35.0)", "mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)", "mypy-boto3-service-quotas (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)", "mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)", "mypy-boto3-ses (>=1.34.0,<1.35.0)", "mypy-boto3-sesv2 (>=1.34.0,<1.35.0)", "mypy-boto3-shield (>=1.34.0,<1.35.0)", "mypy-boto3-signer (>=1.34.0,<1.35.0)", "mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)", "mypy-boto3-sms (>=1.34.0,<1.35.0)", "mypy-boto3-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)", "mypy-boto3-snowball (>=1.34.0,<1.35.0)", "mypy-boto3-sns (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)", "mypy-boto3-ssm (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)", "mypy-boto3-sso (>=1.34.0,<1.35.0)", "mypy-boto3-sso-admin (>=1.34.0,<1.35.0)", "mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)", "mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)", "mypy-boto3-storagegateway (>=1.34.0,<1.35.0)", "mypy-boto3-sts (>=1.34.0,<1.35.0)", "mypy-boto3-support (>=1.34.0,<1.35.0)", "mypy-boto3-support-app (>=1.34.0,<1.35.0)", "mypy-boto3-swf (>=1.34.0,<1.35.0)", "mypy-boto3-synthetics (>=1.34.0,<1.35.0)", "mypy-boto3-textract (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-query (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-write (>=1.34.0,<1.35.0)", "mypy-boto3-tnb (>=1.34.0,<1.35.0)", "mypy-boto3-transcribe (>=1.34.0,<1.35.0)", "mypy-boto3-transfer (>=1.34.0,<1.35.0)", "mypy-boto3-translate (>=1.34.0,<1.35.0)", "mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)", "mypy-boto3-voice-id (>=1.34.0,<1.35.0)", "mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)", "mypy-boto3-waf (>=1.34.0,<1.35.0)", "mypy-boto3-waf-regional (>=1.34.0,<1.35.0)", "mypy-boto3-wafv2 (>=1.34.0,<1.35.0)", "mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)", "mypy-boto3-wisdom (>=1.34.0,<1.35.0)", "mypy-boto3-workdocs (>=1.34.0,<1.35.0)", "mypy-boto3-worklink (>=1.34.0,<1.35.0)", "mypy-boto3-workmail (>=1.34.0,<1.35.0)", "mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)", "mypy-boto3-xray (>=1.34.0,<1.35.0)"] +amp = ["mypy-boto3-amp (>=1.34.0,<1.35.0)"] +amplify = ["mypy-boto3-amplify (>=1.34.0,<1.35.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.34.0,<1.35.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.34.0,<1.35.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.34.0,<1.35.0)"] +appflow = ["mypy-boto3-appflow (>=1.34.0,<1.35.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.34.0,<1.35.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.34.0,<1.35.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.34.0,<1.35.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.34.0,<1.35.0)"] +appstream = ["mypy-boto3-appstream (>=1.34.0,<1.35.0)"] +appsync = ["mypy-boto3-appsync (>=1.34.0,<1.35.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)"] +athena = ["mypy-boto3-athena (>=1.34.0,<1.35.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.34.0,<1.35.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.34.0,<1.35.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.34.0,<1.35.0)"] +backup = ["mypy-boto3-backup (>=1.34.0,<1.35.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.34.0,<1.35.0)"] +batch = ["mypy-boto3-batch (>=1.34.0,<1.35.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.34.0,<1.35.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] +boto3 = ["boto3 (==1.34.0)", "botocore (==1.34.0)"] +braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] +budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] +ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] +chime = ["mypy-boto3-chime (>=1.34.0,<1.35.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.34.0,<1.35.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.34.0,<1.35.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.34.0,<1.35.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.34.0,<1.35.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.34.0,<1.35.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.34.0,<1.35.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.34.0,<1.35.0)"] +codestar = ["mypy-boto3-codestar (>=1.34.0,<1.35.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.34.0,<1.35.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)"] +config = ["mypy-boto3-config (>=1.34.0,<1.35.0)"] +connect = ["mypy-boto3-connect (>=1.34.0,<1.35.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.34.0,<1.35.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)"] +controltower = ["mypy-boto3-controltower (>=1.34.0,<1.35.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)"] +cur = ["mypy-boto3-cur (>=1.34.0,<1.35.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)"] +databrew = ["mypy-boto3-databrew (>=1.34.0,<1.35.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.34.0,<1.35.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.34.0,<1.35.0)"] +datasync = ["mypy-boto3-datasync (>=1.34.0,<1.35.0)"] +datazone = ["mypy-boto3-datazone (>=1.34.0,<1.35.0)"] +dax = ["mypy-boto3-dax (>=1.34.0,<1.35.0)"] +detective = ["mypy-boto3-detective (>=1.34.0,<1.35.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.34.0,<1.35.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.34.0,<1.35.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.34.0,<1.35.0)"] +discovery = ["mypy-boto3-discovery (>=1.34.0,<1.35.0)"] +dlm = ["mypy-boto3-dlm (>=1.34.0,<1.35.0)"] +dms = ["mypy-boto3-dms (>=1.34.0,<1.35.0)"] +docdb = ["mypy-boto3-docdb (>=1.34.0,<1.35.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)"] +drs = ["mypy-boto3-drs (>=1.34.0,<1.35.0)"] +ds = ["mypy-boto3-ds (>=1.34.0,<1.35.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.34.0,<1.35.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)"] +ebs = ["mypy-boto3-ebs (>=1.34.0,<1.35.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.34.0,<1.35.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)"] +ecr = ["mypy-boto3-ecr (>=1.34.0,<1.35.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.34.0,<1.35.0)"] +ecs = ["mypy-boto3-ecs (>=1.34.0,<1.35.0)"] +efs = ["mypy-boto3-efs (>=1.34.0,<1.35.0)"] +eks = ["mypy-boto3-eks (>=1.34.0,<1.35.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.34.0,<1.35.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.34.0,<1.35.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)"] +elb = ["mypy-boto3-elb (>=1.34.0,<1.35.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.34.0,<1.35.0)"] +emr = ["mypy-boto3-emr (>=1.34.0,<1.35.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.34.0,<1.35.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.34.0,<1.35.0)"] +es = ["mypy-boto3-es (>=1.34.0,<1.35.0)"] +essential = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +events = ["mypy-boto3-events (>=1.34.0,<1.35.0)"] +evidently = ["mypy-boto3-evidently (>=1.34.0,<1.35.0)"] +finspace = ["mypy-boto3-finspace (>=1.34.0,<1.35.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.34.0,<1.35.0)"] +firehose = ["mypy-boto3-firehose (>=1.34.0,<1.35.0)"] +fis = ["mypy-boto3-fis (>=1.34.0,<1.35.0)"] +fms = ["mypy-boto3-fms (>=1.34.0,<1.35.0)"] +forecast = ["mypy-boto3-forecast (>=1.34.0,<1.35.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.34.0,<1.35.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.34.0,<1.35.0)"] +freetier = ["mypy-boto3-freetier (>=1.34.0,<1.35.0)"] +fsx = ["mypy-boto3-fsx (>=1.34.0,<1.35.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.34.0,<1.35.0)"] +glacier = ["mypy-boto3-glacier (>=1.34.0,<1.35.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)"] +glue = ["mypy-boto3-glue (>=1.34.0,<1.35.0)"] +grafana = ["mypy-boto3-grafana (>=1.34.0,<1.35.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.34.0,<1.35.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.34.0,<1.35.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.34.0,<1.35.0)"] +health = ["mypy-boto3-health (>=1.34.0,<1.35.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.34.0,<1.35.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.34.0,<1.35.0)"] +iam = ["mypy-boto3-iam (>=1.34.0,<1.35.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.34.0,<1.35.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)"] +importexport = ["mypy-boto3-importexport (>=1.34.0,<1.35.0)"] +inspector = ["mypy-boto3-inspector (>=1.34.0,<1.35.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.34.0,<1.35.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)"] +iot = ["mypy-boto3-iot (>=1.34.0,<1.35.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.34.0,<1.35.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.34.0,<1.35.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.34.0,<1.35.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.34.0,<1.35.0)"] +ivs = ["mypy-boto3-ivs (>=1.34.0,<1.35.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.34.0,<1.35.0)"] +kafka = ["mypy-boto3-kafka (>=1.34.0,<1.35.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)"] +kendra = ["mypy-boto3-kendra (>=1.34.0,<1.35.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.34.0,<1.35.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.34.0,<1.35.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)"] +kms = ["mypy-boto3-kms (>=1.34.0,<1.35.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.34.0,<1.35.0)"] +lambda = ["mypy-boto3-lambda (>=1.34.0,<1.35.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.34.0,<1.35.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.34.0,<1.35.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.34.0,<1.35.0)"] +location = ["mypy-boto3-location (>=1.34.0,<1.35.0)"] +logs = ["mypy-boto3-logs (>=1.34.0,<1.35.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)"] +m2 = ["mypy-boto3-m2 (>=1.34.0,<1.35.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.34.0,<1.35.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.34.0,<1.35.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)"] +medialive = ["mypy-boto3-medialive (>=1.34.0,<1.35.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.34.0,<1.35.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.34.0,<1.35.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.34.0,<1.35.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.34.0,<1.35.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)"] +mgh = ["mypy-boto3-mgh (>=1.34.0,<1.35.0)"] +mgn = ["mypy-boto3-mgn (>=1.34.0,<1.35.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)"] +mobile = ["mypy-boto3-mobile (>=1.34.0,<1.35.0)"] +mq = ["mypy-boto3-mq (>=1.34.0,<1.35.0)"] +mturk = ["mypy-boto3-mturk (>=1.34.0,<1.35.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.34.0,<1.35.0)"] +neptune = ["mypy-boto3-neptune (>=1.34.0,<1.35.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.34.0,<1.35.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.34.0,<1.35.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.34.0,<1.35.0)"] +nimble = ["mypy-boto3-nimble (>=1.34.0,<1.35.0)"] +oam = ["mypy-boto3-oam (>=1.34.0,<1.35.0)"] +omics = ["mypy-boto3-omics (>=1.34.0,<1.35.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.34.0,<1.35.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.34.0,<1.35.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)"] +organizations = ["mypy-boto3-organizations (>=1.34.0,<1.35.0)"] +osis = ["mypy-boto3-osis (>=1.34.0,<1.35.0)"] +outposts = ["mypy-boto3-outposts (>=1.34.0,<1.35.0)"] +panorama = ["mypy-boto3-panorama (>=1.34.0,<1.35.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)"] +personalize = ["mypy-boto3-personalize (>=1.34.0,<1.35.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.34.0,<1.35.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)"] +pi = ["mypy-boto3-pi (>=1.34.0,<1.35.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.34.0,<1.35.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)"] +pipes = ["mypy-boto3-pipes (>=1.34.0,<1.35.0)"] +polly = ["mypy-boto3-polly (>=1.34.0,<1.35.0)"] +pricing = ["mypy-boto3-pricing (>=1.34.0,<1.35.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)"] +proton = ["mypy-boto3-proton (>=1.34.0,<1.35.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.34.0,<1.35.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.34.0,<1.35.0)"] +qldb = ["mypy-boto3-qldb (>=1.34.0,<1.35.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.34.0,<1.35.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.34.0,<1.35.0)"] +ram = ["mypy-boto3-ram (>=1.34.0,<1.35.0)"] +rbin = ["mypy-boto3-rbin (>=1.34.0,<1.35.0)"] +rds = ["mypy-boto3-rds (>=1.34.0,<1.35.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.34.0,<1.35.0)"] +redshift = ["mypy-boto3-redshift (>=1.34.0,<1.35.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.34.0,<1.35.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.34.0,<1.35.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.34.0,<1.35.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.34.0,<1.35.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.34.0,<1.35.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)"] +route53 = ["mypy-boto3-route53 (>=1.34.0,<1.35.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.34.0,<1.35.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.34.0,<1.35.0)"] +rum = ["mypy-boto3-rum (>=1.34.0,<1.35.0)"] +s3 = ["mypy-boto3-s3 (>=1.34.0,<1.35.0)"] +s3control = ["mypy-boto3-s3control (>=1.34.0,<1.35.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.34.0,<1.35.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.34.0,<1.35.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.34.0,<1.35.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.34.0,<1.35.0)"] +schemas = ["mypy-boto3-schemas (>=1.34.0,<1.35.0)"] +sdb = ["mypy-boto3-sdb (>=1.34.0,<1.35.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.34.0,<1.35.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.34.0,<1.35.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.34.0,<1.35.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)"] +ses = ["mypy-boto3-ses (>=1.34.0,<1.35.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.34.0,<1.35.0)"] +shield = ["mypy-boto3-shield (>=1.34.0,<1.35.0)"] +signer = ["mypy-boto3-signer (>=1.34.0,<1.35.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)"] +sms = ["mypy-boto3-sms (>=1.34.0,<1.35.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.34.0,<1.35.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)"] +snowball = ["mypy-boto3-snowball (>=1.34.0,<1.35.0)"] +sns = ["mypy-boto3-sns (>=1.34.0,<1.35.0)"] +sqs = ["mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +ssm = ["mypy-boto3-ssm (>=1.34.0,<1.35.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)"] +sso = ["mypy-boto3-sso (>=1.34.0,<1.35.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.34.0,<1.35.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.34.0,<1.35.0)"] +sts = ["mypy-boto3-sts (>=1.34.0,<1.35.0)"] +support = ["mypy-boto3-support (>=1.34.0,<1.35.0)"] +support-app = ["mypy-boto3-support-app (>=1.34.0,<1.35.0)"] +swf = ["mypy-boto3-swf (>=1.34.0,<1.35.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.34.0,<1.35.0)"] +textract = ["mypy-boto3-textract (>=1.34.0,<1.35.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.34.0,<1.35.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.34.0,<1.35.0)"] +tnb = ["mypy-boto3-tnb (>=1.34.0,<1.35.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.34.0,<1.35.0)"] +transfer = ["mypy-boto3-transfer (>=1.34.0,<1.35.0)"] +translate = ["mypy-boto3-translate (>=1.34.0,<1.35.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.34.0,<1.35.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)"] +waf = ["mypy-boto3-waf (>=1.34.0,<1.35.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.34.0,<1.35.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.34.0,<1.35.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.34.0,<1.35.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.34.0,<1.35.0)"] +worklink = ["mypy-boto3-worklink (>=1.34.0,<1.35.0)"] +workmail = ["mypy-boto3-workmail (>=1.34.0,<1.35.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.34.0,<1.35.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)"] +xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] [[package]] name = "botocore" -version = "1.32.0" +version = "1.34.0" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "botocore-1.32.0-py3-none-any.whl", hash = "sha256:9c1e143feb6a04235cec342d2acb31a0f44df3c89f309f839e03e38a75f3f44e"}, - {file = "botocore-1.32.0.tar.gz", hash = "sha256:95fe3357b9ddc4559941dbea0f0a6b8fc043305f013b7ae2a85dff0c3b36ee92"}, + {file = "botocore-1.34.0-py3-none-any.whl", hash = "sha256:6ec19f6c9f61c3df22fb3e083940ac7946a3d96128db1f370f10aea702bb157f"}, + {file = "botocore-1.34.0.tar.gz", hash = "sha256:711b406de910585395466ca649bceeea87a04300ddf74d9a2e20727c7f27f2f1"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = [ - {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, - {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, -] +urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.19.12)"] +crt = ["awscrt (==0.19.17)"] [[package]] name = "botocore-stubs" @@ -578,7 +591,6 @@ files = [ [package.dependencies] types-awscrt = "*" -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "cachecontrol" @@ -602,24 +614,24 @@ redis = ["redis (>=2.10.5)"] [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] @@ -833,34 +845,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.6" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"}, + {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"}, + {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"}, + {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"}, + {file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"}, + {file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"}, + {file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"}, ] [package.dependencies] @@ -1103,13 +1115,13 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"] [[package]] name = "feedparser" -version = "6.0.10" +version = "6.0.11" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" optional = false python-versions = ">=3.6" files = [ - {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, - {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, + {file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"}, + {file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"}, ] [package.dependencies] @@ -1117,31 +1129,29 @@ sgmllib3k = "*" [[package]] name = "filelock" -version = "3.12.3" +version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, - {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.11\""} - [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "firebase-admin" -version = "6.2.0" +version = "6.3.0" description = "Firebase Admin Python SDK" optional = false python-versions = ">=3.7" files = [ - {file = "firebase_admin-6.2.0-py3-none-any.whl", hash = "sha256:e3c42351fb6194d7279a6fd9209a947005fb4ee7e9037d19762e6cb3da4a82e1"}, - {file = "firebase_admin-6.2.0.tar.gz", hash = "sha256:e3b334d18bbea039f2f3e8a792ad6870d2a7cc79a13ed10659dedd63f5b475e4"}, + {file = "firebase_admin-6.3.0-py3-none-any.whl", hash = "sha256:fcada47664f38b6da67fd924108b98029370554c9f762895d3f83e912cac5ab9"}, + {file = "firebase_admin-6.3.0.tar.gz", hash = "sha256:f040625b8cd3a15f99f84a797fe288ad5993c4034c355b7df3c37a99d39400e6"}, ] [package.dependencies] @@ -1166,7 +1176,6 @@ files = [ [package.dependencies] blinker = ">=1.6.2" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" Werkzeug = ">=3.0.0" @@ -1208,13 +1217,13 @@ Flask = ">=0.9" [[package]] name = "flask-pydantic-spec" -version = "0.5.0" +version = "0.6.0" description = "generate OpenAPI document and validate request & response with Python annotations." optional = false python-versions = ">=3.8" files = [ - {file = "flask_pydantic_spec-0.5.0-py3-none-any.whl", hash = "sha256:3add259e5736d8e1b2b6a9db307ee55b2b3a0154d81633d4c3bcc933b20acb9a"}, - {file = "flask_pydantic_spec-0.5.0.tar.gz", hash = "sha256:8d1f3d173c2a288e61e01bd64752374c6007338a5cd0ab78a5a8ba860c27295e"}, + {file = "flask_pydantic_spec-0.6.0-py3-none-any.whl", hash = "sha256:480689102eed43900b2164d07af930221a2379253ed06337d280a47224990035"}, + {file = "flask_pydantic_spec-0.6.0.tar.gz", hash = "sha256:f20d63cba821dfaaa92fa19c1e1975e0acf544ea6a3cc9eb5cbac62a81117790"}, ] [package.dependencies] @@ -1226,13 +1235,13 @@ flask = ["flask"] [[package]] name = "freezegun" -version = "1.2.2" +version = "1.4.0" description = "Let your Python tests travel through time" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -1293,12 +1302,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1400,8 +1409,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1740,43 +1749,6 @@ files = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -[[package]] -name = "importlib-metadata" -version = "6.0.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "importlib-resources" -version = "5.10.2" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_resources-5.10.2-py3-none-any.whl", hash = "sha256:7d543798b0beca10b6a01ac7cafda9f822c54db9e8376a6bf57e0cbd74d486b6"}, - {file = "importlib_resources-5.10.2.tar.gz", hash = "sha256:e4a96c8cc0339647ff9a5e0550d9f276fc5a01ffa276012b58ec108cfd7b8484"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [[package]] name = "inflection" version = "0.5.1" @@ -1837,13 +1809,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1887,9 +1859,7 @@ files = [ [package.dependencies] attrs = ">=22.2.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} referencing = ">=0.28.4" rpds-py = ">=0.7.1" @@ -1909,17 +1879,16 @@ files = [ ] [package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.28.0" [[package]] name = "jwcrypto" -version = "1.5.0" +version = "1.5.1" description = "Implementation of JOSE Web standards" optional = false python-versions = ">= 3.6" files = [ - {file = "jwcrypto-1.5.0.tar.gz", hash = "sha256:2c1dc51cf8e38ddf324795dfe9426dee9dd46caf47f535ccbc18781fba810b8d"}, + {file = "jwcrypto-1.5.1.tar.gz", hash = "sha256:48bb9bf433777136253579e52b75ffe0f9a4a721d133d01f45a0b91ed5f4f1ae"}, ] [package.dependencies] @@ -2326,38 +2295,38 @@ files = [ [[package]] name = "mypy" -version = "1.7.0" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"}, - {file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"}, - {file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"}, - {file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"}, - {file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"}, - {file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"}, - {file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"}, - {file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"}, - {file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"}, - {file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"}, - {file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"}, - {file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"}, - {file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"}, - {file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"}, - {file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"}, - {file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"}, - {file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"}, - {file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"}, - {file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -2373,13 +2342,13 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-cloudformation" -version = "1.29.0" -description = "Type annotations for boto3.CloudFormation 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.CloudFormation 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudformation-1.29.0.tar.gz", hash = "sha256:91b7202a439d31f7e6645f34ea810f1900f23214900fdf6de210a0704c14da70"}, - {file = "mypy_boto3_cloudformation-1.29.0-py3-none-any.whl", hash = "sha256:b719c35be8b4d5606e9b4fd66d4d0c0e3d5eaf9508a72099053c8e0640b652af"}, + {file = "mypy-boto3-cloudformation-1.34.0.tar.gz", hash = "sha256:9b25df9ef15d9dc8e4e892cc07aa9343f15f2ed5eb7d33eb5eb65adfa63f538f"}, + {file = "mypy_boto3_cloudformation-1.34.0-py3-none-any.whl", hash = "sha256:4e63a2bca1882971881d65983acd774c2fc636bbc5dc8c3e1f4a41c539cf3c90"}, ] [package.dependencies] @@ -2387,13 +2356,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.29.0" -description = "Type annotations for boto3.DynamoDB 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.DynamoDB 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-dynamodb-1.29.0.tar.gz", hash = "sha256:e9b0f1cf1d66d2cbc5d7177832dcd08d85cfa84983934aa361bfc3ca57e06edc"}, - {file = "mypy_boto3_dynamodb-1.29.0-py3-none-any.whl", hash = "sha256:a54d9bf0a9449423fa909586a6003e55ba1d64fc77107c228f4091020f83d134"}, + {file = "mypy-boto3-dynamodb-1.34.0.tar.gz", hash = "sha256:c0d98d7e83b0bc22e5039f703889fb96202d818171c4206fd31e665a37654e84"}, + {file = "mypy_boto3_dynamodb-1.34.0-py3-none-any.whl", hash = "sha256:76869c3fec882ddeeaca485074e302bf38c3b61103664d665dfed9425234ff75"}, ] [package.dependencies] @@ -2401,13 +2370,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ec2" -version = "1.29.0" -description = "Type annotations for boto3.EC2 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.EC2 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-ec2-1.29.0.tar.gz", hash = "sha256:1e636794205fee5069701a32919c03b40da1d031dbcd6bfca097c9001774dfc1"}, - {file = "mypy_boto3_ec2-1.29.0-py3-none-any.whl", hash = "sha256:9684add91b80889880ba1403f28e32d5f6dcf540f318de59913e0e9fa10965be"}, + {file = "mypy-boto3-ec2-1.34.0.tar.gz", hash = "sha256:f19d4fe9b4ae4173af6ec841d1d2d38e53b0c5eec4f4e6fcbad06d0658e13070"}, + {file = "mypy_boto3_ec2-1.34.0-py3-none-any.whl", hash = "sha256:678f58876bcbb21fcae455ed1ba75a542c026a36b0dd464dae7b379afdcecd52"}, ] [package.dependencies] @@ -2415,13 +2384,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-lambda" -version = "1.29.0" -description = "Type annotations for boto3.Lambda 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.Lambda 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.29.0.tar.gz", hash = "sha256:e4537261f7b675b1c165a7dc04d4b661f2f338a45e57bd2bee92d9a41a9cd407"}, - {file = "mypy_boto3_lambda-1.29.0-py3-none-any.whl", hash = "sha256:cc3f4dee77181feb2a1ec90f72258a32bdc75f83d01b3c637ca791073279d3e5"}, + {file = "mypy-boto3-lambda-1.34.0.tar.gz", hash = "sha256:e74c0ce548da747a8c6e643c39dad8aa54d67e057f57740ec780a7e565590627"}, + {file = "mypy_boto3_lambda-1.34.0-py3-none-any.whl", hash = "sha256:109a7e126e84d6da6cacf8ab5c7c6f2be022417fe7bfb7f9b019767d7034f73b"}, ] [package.dependencies] @@ -2429,13 +2398,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-logs" -version = "1.29.0" -description = "Type annotations for boto3.CloudWatchLogs 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.CloudWatchLogs 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-logs-1.29.0.tar.gz", hash = "sha256:c30f51dafad4578d74930f3dc7fa0e0cba7250546a0388ed614f8226e6180ffd"}, - {file = "mypy_boto3_logs-1.29.0-py3-none-any.whl", hash = "sha256:0f74e18b773e099d99050337a67127a1dd80441f810286d2bf1acdfc5c70a1ea"}, + {file = "mypy-boto3-logs-1.34.0.tar.gz", hash = "sha256:a852bf6c48733a51c324ca97da042bfe4c66b0d33aabe042fb27d3092572d55b"}, + {file = "mypy_boto3_logs-1.34.0-py3-none-any.whl", hash = "sha256:cb2d29096d3b07d7d508fa1f236f9cd15c292d41c8807aba7347627868e7ebdc"}, ] [package.dependencies] @@ -2443,13 +2412,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-rds" -version = "1.29.0" -description = "Type annotations for boto3.RDS 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.RDS 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-rds-1.29.0.tar.gz", hash = "sha256:2e7688620ec81a637fbb129ed4165592b118f255089de98013d3b95fb14bcf89"}, - {file = "mypy_boto3_rds-1.29.0-py3-none-any.whl", hash = "sha256:3cab2b07a29c06ad1f469bcd98a8796f23ae423f7f03a93d43b3a0cf4cb9877c"}, + {file = "mypy-boto3-rds-1.34.0.tar.gz", hash = "sha256:9909f5f23ccb01830262a2e16c9e8a19acc46021d754838099d7bdb295911e94"}, + {file = "mypy_boto3_rds-1.34.0-py3-none-any.whl", hash = "sha256:aa8d15c28f140c8bc5e079c405e92c20c68f968a319eb21b69a5692610f86fe5"}, ] [package.dependencies] @@ -2457,13 +2426,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-s3" -version = "1.29.0" -description = "Type annotations for boto3.S3 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.S3 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.29.0.tar.gz", hash = "sha256:3c8473974e304aa512abbf6a47454d9834674e89db414545e2f0cb4fcdd227c9"}, - {file = "mypy_boto3_s3-1.29.0-py3-none-any.whl", hash = "sha256:f5040429b0c3814c6ec9c1a59256976186acb7376fd3b56c4e7e5d03272bb1a8"}, + {file = "mypy-boto3-s3-1.34.0.tar.gz", hash = "sha256:7644a00e096ebb1c3292551059f64ff8329625dacd40827ced9481b14d64c733"}, + {file = "mypy_boto3_s3-1.34.0-py3-none-any.whl", hash = "sha256:633876d2a96dbb924f9667084316c1759bff40c19a9a38313d5a4e825c5fc641"}, ] [package.dependencies] @@ -2471,13 +2440,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-sqs" -version = "1.29.0" -description = "Type annotations for boto3.SQS 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.SQS 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-sqs-1.29.0.tar.gz", hash = "sha256:0835256e3aabd27b2acf613c1b82a22b9de18412a0b07bd04d6d214c3f063906"}, - {file = "mypy_boto3_sqs-1.29.0-py3-none-any.whl", hash = "sha256:db88751bd7765f51c2b1f9061545ddb06639d301c3d981d3b3fa4b367f0ca8ea"}, + {file = "mypy-boto3-sqs-1.34.0.tar.gz", hash = "sha256:0bf8995f58919ab295398100e72eaa7da898adcfd9d339a42f3c48ce473419d5"}, + {file = "mypy_boto3_sqs-1.34.0-py3-none-any.whl", hash = "sha256:94d8aea4ae75605f70e58e440d706e04d5c614101ddb2f0c73d306d776d10995"}, ] [package.dependencies] @@ -2597,13 +2566,13 @@ requests = ["requests (>=2.4.0,<3.0.0)"] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -2629,91 +2598,98 @@ uritemplate = ">=4.1,<5.0" [[package]] name = "pillow" -version = "10.1.0" +version = "10.2.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, - {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, - {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, - {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, - {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, - {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, - {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, - {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, - {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, - {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, - {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, - {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, - {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, - {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, - {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, - {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, - {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, - {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, - {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, - {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, - {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, - {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, - {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, - {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, - {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, - {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] +typing = ["typing-extensions"] +xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2737,13 +2713,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.6.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, ] [package.dependencies] @@ -2803,8 +2779,6 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -2847,7 +2821,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2856,8 +2829,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2933,43 +2904,43 @@ files = [ [[package]] name = "pycryptodome" -version = "3.19.0" +version = "3.20.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"}, - {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"}, - {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"}, - {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, - {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, - {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, - {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, - {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, - {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, - {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, - {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, - {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, - {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, - {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] [[package]] @@ -3028,13 +2999,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyfakefs" -version = "5.3.0" +version = "5.3.2" description = "pyfakefs implements a fake file system that mocks the Python file system modules." optional = false python-versions = ">=3.7" files = [ - {file = "pyfakefs-5.3.0-py3-none-any.whl", hash = "sha256:33c1f891078c727beec465e75cb314120635e2298456493cc2cc0539e2130cbb"}, - {file = "pyfakefs-5.3.0.tar.gz", hash = "sha256:e3e35f65ce55ee8ecc5e243d55cfdbb5d0aa24938f6e04e19f0fab062f255020"}, + {file = "pyfakefs-5.3.2-py3-none-any.whl", hash = "sha256:5a62194cfa24542a3c9080b66ce65d78b2e977957edfd3cd6fe98e8349bcca32"}, + {file = "pyfakefs-5.3.2.tar.gz", hash = "sha256:a83776a3c1046d4d103f2f530029aa6cdff5f0386dffd59c15ee16926135493c"}, ] [[package]] @@ -3156,13 +3127,13 @@ requests = ["requests"] [[package]] name = "pymarc" -version = "5.1.0" +version = "5.1.1" description = "Read, write and modify MARC bibliographic data" optional = false python-versions = ">=3.7" files = [ - {file = "pymarc-5.1.0-py3-none-any.whl", hash = "sha256:dfbfdc7368df487fd190316eeeed7c59bf3823cdb64da0e0e9f3664f6fed5f2d"}, - {file = "pymarc-5.1.0.tar.gz", hash = "sha256:9ab6fcce9f04ee22c3e149379ad2206dc2a5df4674d00279eb867da2f7081308"}, + {file = "pymarc-5.1.1-py3-none-any.whl", hash = "sha256:e7b0c4f0c7a53208150c1f93229a2c654f1d3c985f6dc465890201aff1e28a62"}, + {file = "pymarc-5.1.1.tar.gz", hash = "sha256:9fa3d5f80d35d82e7f4ad353809bb8e5325eacd4400dea4323e6fe4cba7484c8"}, ] [[package]] @@ -3268,24 +3239,24 @@ files = [ [[package]] name = "pyspellchecker" -version = "0.7.2" +version = "0.8.0" description = "Pure python spell checker based on work by Peter Norvig" optional = false python-versions = ">=3.6" files = [ - {file = "pyspellchecker-0.7.2-py3-none-any.whl", hash = "sha256:b5ef23437702b8d03626f814b9646779b572d378b325ad252d8a8e616b3d76db"}, - {file = "pyspellchecker-0.7.2.tar.gz", hash = "sha256:bc51ffb2c18ba26eaa1340756ebf96d0d886ed6a31d6f8e7a0094ad49d24550a"}, + {file = "pyspellchecker-0.8.0-py3-none-any.whl", hash = "sha256:6a06129c38ff23ae2e250d4a3e7a7cebb990496a3c0fe60b28cc4e8c09312167"}, + {file = "pyspellchecker-0.8.0.tar.gz", hash = "sha256:0c13f129a18fb13dd028d1da9f3197f838cb6ec68b67a89092fe8406b2ec3170"}, ] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -3437,7 +3408,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3445,16 +3415,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3471,7 +3433,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3479,7 +3440,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3895,20 +3855,20 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.7.0" +version = "0.9.0" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, - {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, + {file = "s3transfer-0.9.0-py3-none-any.whl", hash = "sha256:01d4d2c35a016db8cb14f9a4d5e84c1f8c96e7ffc211422555eed45c11fa7eb1"}, + {file = "s3transfer-0.9.0.tar.gz", hash = "sha256:9e1b186ec8bb5907a1e82b51237091889a9973a2bb799a924bcd9f301ff79d3d"}, ] [package.dependencies] -botocore = ">=1.12.36,<2.0a.0" +botocore = ">=1.33.2,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "sgmllib3k" @@ -3944,57 +3904,57 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.50" +version = "1.4.51" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.50-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:54138aa80d2dedd364f4e8220eef284c364d3270aaef621570aa2bd99902e2e8"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-win32.whl", hash = "sha256:6c78e3fb4a58e900ec433b6b5f4efe1a0bf81bbb366ae7761c6e0051dd310ee3"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-win_amd64.whl", hash = "sha256:d55f7a33e8631e15af1b9e67c9387c894fedf6deb1a19f94be8731263c51d515"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:324b1fdd50e960a93a231abb11d7e0f227989a371e3b9bd4f1259920f15d0304"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-win32.whl", hash = "sha256:8bdab03ff34fc91bfab005e96f672ae207d87e0ac7ee716d74e87e7046079d8b"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-win_amd64.whl", hash = "sha256:52e01d60b06f03b0a5fc303c8aada405729cbc91a56a64cead8cb7c0b9b13c1a"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:77fde9bf74f4659864c8e26ac08add8b084e479b9a18388e7db377afc391f926"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-win32.whl", hash = "sha256:e86c920b7d362cfa078c8b40e7765cbc34efb44c1007d7557920be9ddf138ec7"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-win_amd64.whl", hash = "sha256:6b3df20fbbcbcd1c1d43f49ccf3eefb370499088ca251ded632b8cbaee1d497d"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:fb9adc4c6752d62c6078c107d23327aa3023ef737938d0135ece8ffb67d07030"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-win32.whl", hash = "sha256:1b9c4359d3198f341480e57494471201e736de459452caaacf6faa1aca852bd8"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-win_amd64.whl", hash = "sha256:35e4520f7c33c77f2636a1e860e4f8cafaac84b0b44abe5de4c6c8890b6aaa6d"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f5b1fb2943d13aba17795a770d22a2ec2214fc65cff46c487790192dda3a3ee7"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-win32.whl", hash = "sha256:7b4396452273aedda447e5aebe68077aa7516abf3b3f48408793e771d696f397"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-win_amd64.whl", hash = "sha256:752f9df3dddbacb5f42d8405b2d5885675a93501eb5f86b88f2e47a839cf6337"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:35c7ed095a4b17dbc8813a2bfb38b5998318439da8e6db10a804df855e3a9e3a"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-win32.whl", hash = "sha256:8a219688297ee5e887a93ce4679c87a60da4a5ce62b7cb4ee03d47e9e767f558"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-win_amd64.whl", hash = "sha256:a648770db002452703b729bdcf7d194e904aa4092b9a4d6ab185b48d13252f63"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4be4da121d297ce81e1ba745a0a0521c6cf8704634d7b520e350dce5964c71ac"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-win32.whl", hash = "sha256:a7c9b9dca64036008962dd6b0d9fdab2dfdbf96c82f74dbd5d86006d8d24a30f"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-win_amd64.whl", hash = "sha256:df200762efbd672f7621b253721644642ff04a6ff957236e0e2fe56d9ca34d2c"}, - {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:1a09d5bd1a40d76ad90e5570530e082ddc000e1d92de495746f6257dc08f166b"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-win32.whl", hash = "sha256:7af40425ac535cbda129d9915edcaa002afe35d84609fd3b9d6a8c46732e02ee"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-win_amd64.whl", hash = "sha256:8d1d7d63e5d2f4e92a39ae1e897a5d551720179bb8d1254883e7113d3826d43c"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eaeeb2464019765bc4340214fca1143081d49972864773f3f1e95dba5c7edc7d"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-win32.whl", hash = "sha256:50e074aea505f4427151c286955ea025f51752fa42f9939749336672e0674c81"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-win_amd64.whl", hash = "sha256:3b0cd89a7bd03f57ae58263d0f828a072d1b440c8c2949f38f3b446148321171"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a33cb3f095e7d776ec76e79d92d83117438b6153510770fcd57b9c96f9ef623d"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-win32.whl", hash = "sha256:8e702e7489f39375601c7ea5a0bef207256828a2bc5986c65cb15cd0cf097a87"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-win_amd64.whl", hash = "sha256:0525c4905b4b52d8ccc3c203c9d7ab2a80329ffa077d4bacf31aefda7604dc65"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:1980e6eb6c9be49ea8f89889989127daafc43f0b1b6843d71efab1514973cca0"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-win32.whl", hash = "sha256:d0a83afab5e062abffcdcbcc74f9d3ba37b2385294dd0927ad65fc6ebe04e054"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-win_amd64.whl", hash = "sha256:a61184c7289146c8cff06b6b41807c6994c6d437278e72cf00ff7fe1c7a263d1"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:3f0ef620ecbab46e81035cf3dedfb412a7da35340500ba470f9ce43a1e6c423b"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-win32.whl", hash = "sha256:f2e5b6f5cf7c18df66d082604a1d9c7a2d18f7d1dbe9514a2afaccbb51cc4fc3"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-win_amd64.whl", hash = "sha256:5e180fff133d21a800c4f050733d59340f40d42364fcb9d14f6a67764bdc48d2"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7d8139ca0b9f93890ab899da678816518af74312bb8cd71fb721436a93a93298"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-win32.whl", hash = "sha256:cecb66492440ae8592797dd705a0cbaa6abe0555f4fa6c5f40b078bd2740fc6b"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-win_amd64.whl", hash = "sha256:39b02b645632c5fe46b8dd30755682f629ffbb62ff317ecc14c998c21b2896ff"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b03850c290c765b87102959ea53299dc9addf76ca08a06ea98383348ae205c99"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-win32.whl", hash = "sha256:b00cf0471888823b7a9f722c6c41eb6985cf34f077edcf62695ac4bed6ec01ee"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-win_amd64.whl", hash = "sha256:a055ba17f4675aadcda3005df2e28a86feb731fdcc865e1f6b4f209ed1225cba"}, + {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, ] [package.dependencies] @@ -4004,7 +3964,7 @@ sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\ [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] @@ -4014,14 +3974,14 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy2-stubs" @@ -4064,30 +4024,30 @@ files = [ [[package]] name = "tox" -version = "4.11.3" +version = "4.12.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"}, - {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"}, + {file = "tox-4.12.0-py3-none-any.whl", hash = "sha256:c94bf5852ba41f3d9f1e3470ccf3390e0b7bdc938095be3cd96dce25ab5062a0"}, + {file = "tox-4.12.0.tar.gz", hash = "sha256:76adc53a3baff7bde80d6ad7f63235735cfc5bc42e8cb6fccfbf62cb5ffd4d92"}, ] [package.dependencies] -cachetools = ">=5.3.1" +cachetools = ">=5.3.2" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.12.3" -packaging = ">=23.1" -platformdirs = ">=3.10" +filelock = ">=3.13.1" +packaging = ">=23.2" +platformdirs = ">=4.1" pluggy = ">=1.3" pyproject-api = ">=1.6.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.24.3" +virtualenv = ">=20.25" [package.extras] -docs = ["furo (>=2023.8.19)", "sphinx (>=7.2.4)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] [[package]] name = "tox-docker" @@ -4107,13 +4067,13 @@ tox = ">=3.0.0,<5.0" [[package]] name = "tox-gh-actions" -version = "3.1.3" +version = "3.2.0" description = "Seamless integration of tox into GitHub Actions" optional = false python-versions = ">=3.7" files = [ - {file = "tox-gh-actions-3.1.3.tar.gz", hash = "sha256:ffd4151fe8b62c6f401a2fc5a01317835d7ab380923f6e0d063c300750308328"}, - {file = "tox_gh_actions-3.1.3-py2.py3-none-any.whl", hash = "sha256:5954766fe2ed0e284f3cdc87535dfdf68d0f803f1011b17ff8cf52ed3156e6c1"}, + {file = "tox-gh-actions-3.2.0.tar.gz", hash = "sha256:ac6fa3b8da51bc90dd77985fd55f09e746c6558c55910c0a93d643045a2b0ccc"}, + {file = "tox_gh_actions-3.2.0-py2.py3-none-any.whl", hash = "sha256:821b66a4751a788fa3e9617bd796d696507b08c6e1d929ee4faefba06b73b694"}, ] [package.dependencies] @@ -4166,13 +4126,13 @@ files = [ [[package]] name = "types-flask-cors" -version = "4.0.0.1" +version = "4.0.0.20240106" description = "Typing stubs for Flask-Cors" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-Flask-Cors-4.0.0.1.tar.gz", hash = "sha256:68dbf195dec2a21932690751f6e7669f9eb7def6b15bfaaf507f96cc04bef5ec"}, - {file = "types_Flask_Cors-4.0.0.1-py3-none-any.whl", hash = "sha256:2846ceaceaf6dbbc023442ff85a15c9c1dbdf3c65fe3ed4b331f5c6b58f81c21"}, + {file = "types-Flask-Cors-4.0.0.20240106.tar.gz", hash = "sha256:e49b29c4ba1de9496f7f43aab90f32393dd2599f87fab448cdebc481facca1a3"}, + {file = "types_Flask_Cors-4.0.0.20240106-py3-none-any.whl", hash = "sha256:bc07707c8eae86b04d177bc19f4cbb128af6fc490de9c4ba4a4348e45c77406f"}, ] [package.dependencies] @@ -4180,13 +4140,13 @@ Flask = ">=2.0.0" [[package]] name = "types-jsonschema" -version = "4.19.0.4" +version = "4.20.0.20240105" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.19.0.4.tar.gz", hash = "sha256:994feb6632818259c4b5dbd733867824cb475029a6abc2c2b5201a2268b6e7d2"}, - {file = "types_jsonschema-4.19.0.4-py3-none-any.whl", hash = "sha256:b73c3f4ba3cd8108602d1198a438e2698d5eb6b9db206ed89a33e24729b0abe7"}, + {file = "types-jsonschema-4.20.0.20240105.tar.gz", hash = "sha256:4a71af7e904498e7ad055149f6dc1eee04153b59a99ad7dd17aa3769c9bc5982"}, + {file = "types_jsonschema-4.20.0.20240105-py3-none-any.whl", hash = "sha256:26706cd70a273e59e718074c4e756608a25ba61327a7f9a4493ebd11941e5ad4"}, ] [package.dependencies] @@ -4205,35 +4165,35 @@ files = [ [[package]] name = "types-pillow" -version = "10.1.0.2" +version = "10.2.0.20240111" description = "Typing stubs for Pillow" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.1.0.2.tar.gz", hash = "sha256:525c1c5ee67b0ac1721c40d2bc618226ef2123c347e527e14e05b920721a13b9"}, - {file = "types_Pillow-10.1.0.2-py3-none-any.whl", hash = "sha256:131078ffa547bf9a201d39ffcdc65633e108148085f4f1b07d4647fcfec6e923"}, + {file = "types-Pillow-10.2.0.20240111.tar.gz", hash = "sha256:e8d359bfdc5a149a3c90a7e153cb2d0750ddf7fc3508a20dfadabd8a9435e354"}, + {file = "types_Pillow-10.2.0.20240111-py3-none-any.whl", hash = "sha256:1f4243b30c143b56b0646626f052e4269123e550f9096cdfb5fbd999daee7dbb"}, ] [[package]] name = "types-psycopg2" -version = "2.9.21.16" +version = "2.9.21.20240106" description = "Typing stubs for psycopg2" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"}, - {file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"}, + {file = "types-psycopg2-2.9.21.20240106.tar.gz", hash = "sha256:0d0a350449714ba28448c4f10a0a3aec36e9e3efd1450730e227e17b704a4bea"}, + {file = "types_psycopg2-2.9.21.20240106-py3-none-any.whl", hash = "sha256:c20cf8236757f8ca4519068548f0c6c159158c9262cc7264c3f2f67f1f511b61"}, ] [[package]] name = "types-pyopenssl" -version = "23.3.0.0" +version = "23.3.0.20240106" description = "Typing stubs for pyOpenSSL" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, - {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, + {file = "types-pyOpenSSL-23.3.0.20240106.tar.gz", hash = "sha256:3d6f3462bec0c260caadf93fbb377225c126661b779c7d9ab99b6dad5ca10db9"}, + {file = "types_pyOpenSSL-23.3.0.20240106-py3-none-any.whl", hash = "sha256:47a7eedbd18b7bcad17efebf1c53416148f5a173918a6d75027e75e32fe039ae"}, ] [package.dependencies] @@ -4241,13 +4201,13 @@ cryptography = ">=35.0.0" [[package]] name = "types-python-dateutil" -version = "2.8.19.14" +version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, - {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, + {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, + {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, ] [[package]] @@ -4310,13 +4270,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -4368,22 +4328,22 @@ files = [ [[package]] name = "virtualenv" -version = "20.24.3" +version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"}, - {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] @@ -4472,16 +4432,6 @@ files = [ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecee4132c6cd2ce5308e21672015ddfed1ff975ad0ac8d27168ea82e71413f55"}, - {file = "wrapt-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2020f391008ef874c6d9e208b24f28e31bcb85ccff4f335f15a3251d222b92d9"}, - {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2feecf86e1f7a86517cab34ae6c2f081fd2d0dac860cb0c0ded96d799d20b335"}, - {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:240b1686f38ae665d1b15475966fe0472f78e71b1b4903c143a842659c8e4cb9"}, - {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9008dad07d71f68487c91e96579c8567c98ca4c3881b9b113bc7b33e9fd78b8"}, - {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6447e9f3ba72f8e2b985a1da758767698efa72723d5b59accefd716e9e8272bf"}, - {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:acae32e13a4153809db37405f5eba5bac5fbe2e2ba61ab227926a22901051c0a"}, - {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49ef582b7a1152ae2766557f0550a9fcbf7bbd76f43fbdc94dd3bf07cc7168be"}, - {file = "wrapt-1.14.1-cp311-cp311-win32.whl", hash = "sha256:358fe87cc899c6bb0ddc185bf3dbfa4ba646f05b1b0b9b5a27c2cb92c2cea204"}, - {file = "wrapt-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:26046cd03936ae745a502abf44dac702a5e6880b2b01c29aea8ddf3353b68224"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, @@ -4552,22 +4502,7 @@ files = [ [package.dependencies] lxml = ">=3.8" -[[package]] -name = "zipp" -version = "3.11.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, - {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [metadata] lock-version = "2.0" -python-versions = ">=3.8,<4" -content-hash = "d28aa66dac9948e18d5d89e17291ed4b9b0efede33068efcc14c8c1f1af56ca4" +python-versions = ">=3.10,<4" +content-hash = "bcd28f5925d0af63b549c2ccda1a61527913a206f8ddb9854b5818349d4ad8de" diff --git a/pyproject.toml b/pyproject.toml index 665ff2111..7954a673e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ exclude_also = [ [tool.coverage.run] branch = true relative_files = true -source = ["api", "core"] +source = ["api", "core", "scripts"] [tool.isort] known_first_party = ["api", "core", "customlists"] @@ -67,25 +67,34 @@ disallow_untyped_defs = true module = [ "api.admin.announcement_list_validator", "api.admin.config", + "api.admin.controller.catalog_services", + "api.admin.controller.collection_self_tests", + "api.admin.controller.collection_settings", "api.admin.controller.discovery_service_library_registrations", "api.admin.controller.discovery_services", "api.admin.controller.integration_settings", "api.admin.controller.library_settings", + "api.admin.controller.patron_auth_service_self_tests", "api.admin.controller.patron_auth_services", + "api.admin.dashboard_stats", "api.admin.form_data", "api.admin.model.dashboard_statistics", "api.adobe_vendor_id", "api.axis", "api.circulation", + "api.controller.marc", "api.discovery.*", "api.enki", "api.integration.*", "api.lcp.hash", + "api.marc", "api.odl", "api.odl2", "api.opds_for_distributors", "core.feed.*", "core.integration.*", + "core.marc", + "core.migration.*", "core.model.announcements", "core.model.collection", "core.model.hassessioncache", @@ -99,12 +108,15 @@ module = [ "core.util.authentication_for_opds", "core.util.base64", "core.util.cache", + "core.util.log", "core.util.notifications", "core.util.problem_detail", "core.util.string_helpers", + "core.util.uuid", "core.util.worker_pools", "core.util.xmlparser", "tests.fixtures.authenticator", + "tests.fixtures.webserver", "tests.migration.*", ] no_implicit_reexport = true @@ -183,12 +195,12 @@ click = "^8.1.3" contextlib2 = "21.6.0" dependency-injector = "^4.41" expiringdict = "1.2.2" -feedparser = "6.0.10" +feedparser = "6.0.11" firebase-admin = "^6.0.1" Flask = "^3.0" Flask-Babel = "^4.0" Flask-Cors = "4.0.0" -flask-pydantic-spec = "^0.5.0" +flask-pydantic-spec = "^0.6.0" fuzzywuzzy = "0.18.0" # fuzzywuzzy is for author name manipulations html-sanitizer = "^2.1.0" isbnlib = "^3.10.14" @@ -210,11 +222,11 @@ pydantic = {version = "^1.10.9", extras = ["dotenv", "email"]} pyinstrument = "^4.6" PyJWT = "^2.8" PyLD = "2.0.3" -pymarc = "5.1.0" +pymarc = "5.1.1" pyOpenSSL = "^23.1.0" pyparsing = "3.1.1" -pyspellchecker = "0.7.2" -python = ">=3.8,<4" +pyspellchecker = "0.8.0" +python = ">=3.10,<4" python-dateutil = "2.8.2" python3-saml = "^1.16" # python-saml is required for SAML authentication pytz = "^2023.3" @@ -226,7 +238,6 @@ textblob = "0.17.1" types-pyopenssl = "^23.1.0.3" types-pyyaml = "^6.0.12.9" # We import typing_extensions, so we can use new annotation features. -# - ParamSpec (Python 3.10) # - Self (Python 3.11) typing_extensions = {version = "^4.5.0", python = "<3.11"} unicodecsv = "0.14.1" # this is used, but can probably be removed on py3 @@ -246,7 +257,7 @@ tox-gh-actions = "^3.0" [tool.poetry.group.dev.dependencies] boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "logs", "s3"]} -freezegun = "~1.2.2" +freezegun = "~1.4.0" Jinja2 = "^3.1.2" mypy = "^1.4.1" psycopg2-binary = "~2.9.5" diff --git a/scripts.py b/scripts.py index 1fa5e8257..1a99db4b7 100644 --- a/scripts.py +++ b/scripts.py @@ -1,14 +1,17 @@ import argparse +import datetime import logging import os import sys import time +from collections.abc import Sequence from datetime import timedelta from pathlib import Path -from typing import Optional +from typing import Any -from sqlalchemy import inspect +from sqlalchemy import inspect, select from sqlalchemy.engine import Connection +from sqlalchemy.exc import NoResultFound from sqlalchemy.orm import Session from alembic import command, config @@ -20,7 +23,6 @@ from api.config import CannotLoadConfiguration, Configuration from api.lanes import create_default_lanes from api.local_analytics_exporter import LocalAnalyticsExporter -from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from api.nyt import NYTBestSellerAPI from api.opds_for_distributors import ( @@ -30,21 +32,27 @@ ) from api.overdrive import OverdriveAPI from core.external_search import ExternalSearchIndex +from core.integration.goals import Goals from core.lane import Lane -from core.marc import MARCExporter +from core.marc import Annotator as MarcAnnotator +from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings from core.model import ( LOCK_ID_DB_INIT, - CachedMARCFile, CirculationEvent, + Collection, ConfigurationSetting, Contribution, DataSource, + DiscoveryServiceRegistration, Edition, - ExternalIntegration, Hold, Identifier, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + Library, LicensePool, Loan, + MarcFile, Patron, SessionManager, get_one, @@ -52,7 +60,6 @@ ) from core.scripts import ( IdentifierInputScript, - LaneSweeperScript, LibraryInputScript, OPDSImportScript, PatronInputScript, @@ -144,20 +151,14 @@ def q(self): ) -class CacheMARCFiles(LaneSweeperScript): +class CacheMARCFiles(LibraryInputScript): """Generate and cache MARC files for each input library.""" name = "Cache MARC files" @classmethod - def arg_parser(cls, _db): - parser = LaneSweeperScript.arg_parser(_db) - parser.add_argument( - "--max-depth", - help="Stop processing lanes once you reach this depth.", - type=int, - default=0, - ) + def arg_parser(cls, _db: Session) -> argparse.ArgumentParser: # type: ignore[override] + parser = super().arg_parser(_db) parser.add_argument( "--force", help="Generate new MARC files even if MARC files have already been generated recently enough", @@ -166,97 +167,179 @@ def arg_parser(cls, _db): ) return parser - def __init__(self, _db=None, cmd_args=None, *args, **kwargs): + def __init__( + self, + _db: Session | None = None, + cmd_args: Sequence[str] | None = None, + exporter: MARCExporter | None = None, + *args: Any, + **kwargs: Any, + ) -> None: super().__init__(_db, *args, **kwargs) + self.force = False self.parse_args(cmd_args) + self.storage_service = self.services.storage.public() + + self.cm_base_url = ConfigurationSetting.sitewide( + self._db, Configuration.BASE_URL_KEY + ).value - def parse_args(self, cmd_args=None): + self.exporter = exporter or MARCExporter(self._db, self.storage_service) + + def parse_args(self, cmd_args: Sequence[str] | None = None) -> argparse.Namespace: parser = self.arg_parser(self._db) parsed = parser.parse_args(cmd_args) - self.max_depth = parsed.max_depth self.force = parsed.force return parsed - def should_process_library(self, library): - integration = ExternalIntegration.lookup( - self._db, - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - library, + def settings( + self, library: Library + ) -> tuple[MarcExporterSettings, MarcExporterLibrarySettings]: + integration_query = ( + select(IntegrationLibraryConfiguration) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, + IntegrationConfiguration.protocol == MARCExporter.__name__, + IntegrationLibraryConfiguration.library == library, + ) ) - return integration is not None + integration = self._db.execute(integration_query).scalar_one() - def process_library(self, library): - if self.should_process_library(library): - super().process_library(library) - self.log.info("Processed library %s" % library.name) - - def should_process_lane(self, lane): - if isinstance(lane, Lane): - if self.max_depth is not None and lane.depth > self.max_depth: - return False - if lane.size == 0: - return False - return True - - def process_lane(self, lane, exporter=None): - # Generate a MARC file for this lane, if one has not been generated recently enough. - if isinstance(lane, Lane): - library = lane.library - else: - library = lane.get_library(self._db) - - annotator = MARCLibraryAnnotator(library) - exporter = exporter or MARCExporter.from_config(library) - - update_frequency = ConfigurationSetting.for_library_and_externalintegration( - self._db, MARCExporter.UPDATE_FREQUENCY, library, exporter.integration - ).int_value - if update_frequency is None: - update_frequency = MARCExporter.DEFAULT_UPDATE_FREQUENCY - - last_update = None - files_q = ( - self._db.query(CachedMARCFile) - .filter(CachedMARCFile.library == library) - .filter( - CachedMARCFile.lane == (lane if isinstance(lane, Lane) else None), + library_settings = MARCExporter.library_settings_load(integration) + settings = MARCExporter.settings_load(integration.parent) + + return settings, library_settings + + def process_libraries(self, libraries: Sequence[Library]) -> None: + if not self.storage_service: + self.log.info("No storage service was found.") + return + + super().process_libraries(libraries) + + def get_collections(self, library: Library) -> Sequence[Collection]: + return self._db.scalars( + select(Collection).where( + Collection.libraries.contains(library), + Collection.export_marc_records == True, ) - .order_by(CachedMARCFile.end_time.desc()) + ).all() + + def get_web_client_urls( + self, library: Library, url: str | None = None + ) -> list[str]: + """Find web client URLs configured by the registry for this library.""" + urls = [ + s.web_client + for s in self._db.execute( + select(DiscoveryServiceRegistration.web_client).where( + DiscoveryServiceRegistration.library == library, + DiscoveryServiceRegistration.web_client != None, + ) + ).all() + ] + + if url: + urls.append(url) + + return urls + + def process_library( + self, library: Library, annotator_cls: type[MarcAnnotator] = MarcAnnotator + ) -> None: + try: + settings, library_settings = self.settings(library) + except NoResultFound: + return + + self.log.info("Processing library %s" % library.name) + + update_frequency = int(settings.update_frequency) + + # Find the collections for this library. + collections = self.get_collections(library) + + # Find web client URLs configured by the registry for this library. + web_client_urls = self.get_web_client_urls( + library, library_settings.web_client_url ) - if files_q.count() > 0: - last_update = files_q.first().end_time + annotator = annotator_cls( + self.cm_base_url, + library.short_name or "", + web_client_urls, + library_settings.organization_code, + library_settings.include_summary, + library_settings.include_genres, + ) + + # We set the creation time to be the start of the batch. Any updates that happen during the batch will be + # included in the next batch. + creation_time = utc_now() + + for collection in collections: + self.process_collection( + library, + collection, + annotator, + update_frequency, + creation_time, + ) + + def last_updated( + self, library: Library, collection: Collection + ) -> datetime.datetime | None: + """Find the most recent MarcFile creation time.""" + last_updated_file = self._db.execute( + select(MarcFile.created) + .where( + MarcFile.library == library, + MarcFile.collection == collection, + ) + .order_by(MarcFile.created.desc()) + ).first() + + return last_updated_file.created if last_updated_file else None + + def process_collection( + self, + library: Library, + collection: Collection, + annotator: MarcAnnotator, + update_frequency: int, + creation_time: datetime.datetime, + ) -> None: + last_update = self.last_updated(library, collection) + if ( not self.force and last_update - and (last_update > utc_now() - timedelta(days=update_frequency)) + and (last_update > creation_time - timedelta(days=update_frequency)) ): self.log.info( - "Skipping lane %s because last update was less than %d days ago" - % (lane.display_name, update_frequency) + f"Skipping collection {collection.name} because last update was less than {update_frequency} days ago" ) return - # Find the storage service - storage_service = self.services.storage.public() - if not storage_service: - self.log.info("No storage service was found.") - return - # First update the file with ALL the records. - records = exporter.records(lane, annotator, storage_service) + self.exporter.records( + library, collection, annotator, creation_time=creation_time + ) # Then create a new file with changes since the last update. - start_time = None if last_update: - # Allow one day of overlap to ensure we don't miss anything due to script timing. - start_time = last_update - timedelta(days=1) - - records = exporter.records( - lane, annotator, storage_service, start_time=start_time + self.exporter.records( + library, + collection, + annotator, + creation_time=creation_time, + since_time=last_update, ) + self._db.commit() + self.log.info("Processed collection %s" % collection.name) + class AdobeAccountIDResetScript(PatronInputScript): @classmethod @@ -403,7 +486,7 @@ class InstanceInitializationScript: """ def __init__(self) -> None: - self._log: Optional[logging.Logger] = None + self._log: logging.Logger | None = None self._container = container_instance() # Call init_resources() to initialize the logging configuration. diff --git a/tests/api/admin/controller/test_catalog_services.py b/tests/api/admin/controller/test_catalog_services.py index 6a6038ad9..fa8f5d76d 100644 --- a/tests/api/admin/controller/test_catalog_services.py +++ b/tests/api/admin/controller/test_catalog_services.py @@ -1,7 +1,9 @@ import json +from contextlib import nullcontext import flask import pytest +from flask import Response from werkzeug.datastructures import ImmutableMultiDict from api.admin.exceptions import AdminNotAuthorized @@ -9,201 +11,206 @@ CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, MISSING_SERVICE, + MISSING_SERVICE_NAME, MULTIPLE_SERVICES_FOR_LIBRARY, + NO_PROTOCOL_FOR_NEW_SERVICE, UNKNOWN_PROTOCOL, ) -from core.marc import MARCExporter -from core.model import ( - AdminRole, - ConfigurationSetting, - ExternalIntegration, - create, - get_one, -) -from tests.fixtures.api_admin import SettingsControllerFixture +from api.integration.registry.catalog_services import CatalogServicesRegistry +from core.integration.goals import Goals +from core.marc import MARCExporter, MarcExporterLibrarySettings +from core.model import AdminRole, IntegrationConfiguration, get_one +from core.util.problem_detail import ProblemDetail +from tests.fixtures.api_admin import AdminControllerFixture class TestCatalogServicesController: def test_catalog_services_get_with_no_services( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - with settings_ctrl_fixture.request_context_with_admin("/"): + with admin_ctrl_fixture.request_context_with_admin("/"): + pytest.raises( + AdminNotAuthorized, + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services, + ) + + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) - assert response.get("catalog_services") == [] - protocols = response.get("protocols") + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + assert data.get("catalog_services") == [] + protocols = data.get("protocols") + assert isinstance(protocols, list) assert 1 == len(protocols) - assert MARCExporter.NAME == protocols[0].get("name") - assert "settings" in protocols[0] - assert "library_settings" in protocols[0] - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - settings_ctrl_fixture.ctrl.db.session.flush() - pytest.raises( - AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services, + assert protocols[0].get("name") == CatalogServicesRegistry().get_protocol( + MARCExporter ) + assert "settings" in protocols[0] + assert "library_settings" in protocols[0] def test_catalog_services_get_with_marc_exporter( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - integration, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.MARC_EXPORT, - goal=ExternalIntegration.CATALOG_GOAL, + db = admin_ctrl_fixture.ctrl.db + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + library_settings = MarcExporterLibrarySettings( + include_summary=True, include_genres=True, organization_code="US-MaBoDPL" + ) + + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + integration = db.integration_configuration( + protocol, + Goals.CATALOG_GOAL, name="name", ) - integration.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - MARCExporter.MARC_ORGANIZATION_CODE, - settings_ctrl_fixture.ctrl.db.default_library(), - integration, - ).value = "US-MaBoDPL" - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - MARCExporter.INCLUDE_SUMMARY, - settings_ctrl_fixture.ctrl.db.default_library(), - integration, - ).value = "false" - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - MARCExporter.INCLUDE_SIMPLIFIED_GENRES, - settings_ctrl_fixture.ctrl.db.default_library(), - integration, - ).value = "true" - with settings_ctrl_fixture.request_context_with_admin("/"): + integration.libraries += [db.default_library()] + library_settings_integration = integration.for_library(db.default_library()) + MARCExporter.library_settings_update( + library_settings_integration, library_settings + ) + + with admin_ctrl_fixture.request_context_with_admin("/"): response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) - [service] = response.get("catalog_services") + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + services = data.get("catalog_services") + assert isinstance(services, list) + assert len(services) == 1 + service = services[0] assert integration.id == service.get("id") assert integration.name == service.get("name") assert integration.protocol == service.get("protocol") [library] = service.get("libraries") assert ( - settings_ctrl_fixture.ctrl.db.default_library().short_name + admin_ctrl_fixture.ctrl.db.default_library().short_name == library.get("short_name") ) - assert "US-MaBoDPL" == library.get(MARCExporter.MARC_ORGANIZATION_CODE) - assert "false" == library.get(MARCExporter.INCLUDE_SUMMARY) - assert "true" == library.get(MARCExporter.INCLUDE_SIMPLIFIED_GENRES) + assert "US-MaBoDPL" == library.get("organization_code") + assert library.get("include_summary") is True + assert library.get("include_genres") is True + @pytest.mark.parametrize( + "post_data,expected,admin,raises", + [ + pytest.param({}, None, False, AdminNotAuthorized, id="not admin"), + pytest.param({}, NO_PROTOCOL_FOR_NEW_SERVICE, True, None, id="no protocol"), + pytest.param( + {"protocol": "Unknown"}, + UNKNOWN_PROTOCOL, + True, + None, + id="unknown protocol", + ), + pytest.param( + {"protocol": "MARCExporter", "id": "123"}, + MISSING_SERVICE, + True, + None, + id="unknown id", + ), + pytest.param( + {"protocol": "MARCExporter", "id": ""}, + CANNOT_CHANGE_PROTOCOL, + True, + None, + id="cannot change protocol", + ), + pytest.param( + {"protocol": "MARCExporter"}, + MISSING_SERVICE_NAME, + True, + None, + id="no name", + ), + pytest.param( + {"protocol": "MARCExporter", "name": "existing integration"}, + INTEGRATION_NAME_ALREADY_IN_USE, + True, + None, + id="name already in use", + ), + pytest.param( + { + "protocol": "MARCExporter", + "name": "new name", + "libraries": json.dumps([{"short_name": "default"}]), + }, + MULTIPLE_SERVICES_FOR_LIBRARY, + True, + None, + id="multiple services for library", + ), + ], + ) def test_catalog_services_post_errors( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + admin_ctrl_fixture: AdminControllerFixture, + post_data: dict[str, str], + expected: ProblemDetail | None, + admin: bool, + raises: type[Exception] | None, ): - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("protocol", "Unknown"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == UNKNOWN_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", "123"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == MISSING_SERVICE - - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol="fake protocol", - goal=ExternalIntegration.CATALOG_GOAL, - name="name", - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(service.id)), - ("protocol", ExternalIntegration.MARC_EXPORT), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == CANNOT_CHANGE_PROTOCOL + if admin: + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", str(service.name)), - ("protocol", ExternalIntegration.MARC_EXPORT), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == INTEGRATION_NAME_ALREADY_IN_USE + context_manager = pytest.raises(raises) if raises is not None else nullcontext() - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.MARC_EXPORT, - goal=ExternalIntegration.CATALOG_GOAL, + db = admin_ctrl_fixture.ctrl.db + service = db.integration_configuration( + "fake protocol", + Goals.CATALOG_GOAL, + name="existing integration", ) + service.libraries += [db.default_library()] - # This should be the last test to check since rolling back database - # changes in the test can cause it to crash. - service.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + if post_data.get("id") == "": + post_data["id"] = str(service.id) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - ME = MARCExporter - flask.request.form = ImmutableMultiDict( - [ - ("name", "new name"), - ("protocol", ME.NAME), - ( - "libraries", - json.dumps( - [ - { - "short_name": settings_ctrl_fixture.ctrl.db.default_library().short_name, - ME.INCLUDE_SUMMARY: "false", - ME.INCLUDE_SIMPLIFIED_GENRES: "true", - } - ] - ), - ), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response.uri == MULTIPLE_SERVICES_FOR_LIBRARY.uri + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): + flask.request.form = ImmutableMultiDict(post_data) + with context_manager: + response = ( + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + ) + assert isinstance(response, ProblemDetail) + assert isinstance(expected, ProblemDetail) + assert response.uri == expected.uri + assert response.status_code == expected.status_code + assert response.title == expected.title def test_catalog_services_post_create( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - ME = MARCExporter + db = admin_ctrl_fixture.ctrl.db + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "exporter name"), - ("protocol", ME.NAME), + ("protocol", protocol), ( "libraries", json.dumps( [ { - "short_name": settings_ctrl_fixture.ctrl.db.default_library().short_name, - ME.INCLUDE_SUMMARY: "false", - ME.INCLUDE_SIMPLIFIED_GENRES: "true", + "short_name": db.default_library().short_name, + "include_summary": "false", + "include_genres": "true", } ] ), @@ -211,67 +218,55 @@ def test_catalog_services_post_create( ] ) response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) + assert isinstance(response, Response) assert response.status_code == 201 service = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - goal=ExternalIntegration.CATALOG_GOAL, + db.session, + IntegrationConfiguration, + goal=Goals.CATALOG_GOAL, ) - assert isinstance(service, ExternalIntegration) + assert isinstance(service, IntegrationConfiguration) - assert service.id == int(response.get_data()) - assert ME.NAME == service.protocol - assert "exporter name" == service.name - assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries - assert ( - "false" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SUMMARY, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) - assert ( - "true" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SIMPLIFIED_GENRES, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) + assert int(response.get_data()) == service.id + assert service.protocol == protocol + assert service.name == "exporter name" + assert service.libraries == [db.default_library()] + + settings = MARCExporter.library_settings_load(service.library_configurations[0]) + assert settings.include_summary is False + assert settings.include_genres is True def test_catalog_services_post_edit( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - ME = MARCExporter + db = admin_ctrl_fixture.ctrl.db + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ME.NAME, - goal=ExternalIntegration.CATALOG_GOAL, + service = db.integration_configuration( + protocol, + Goals.CATALOG_GOAL, name="name", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "exporter name"), ("id", str(service.id)), - ("protocol", ME.NAME), + ("protocol", protocol), ( "libraries", json.dumps( [ { - "short_name": settings_ctrl_fixture.ctrl.db.default_library().short_name, - ME.INCLUDE_SUMMARY: "false", - ME.INCLUDE_SIMPLIFIED_GENRES: "true", + "short_name": db.default_library().short_name, + "include_summary": "true", + "include_genres": "false", } ] ), @@ -279,60 +274,44 @@ def test_catalog_services_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) + assert isinstance(response, Response) assert response.status_code == 200 - assert service.id == int(response.get_data()) - assert ME.NAME == service.protocol - assert "exporter name" == service.name - assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries - assert ( - "false" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SUMMARY, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) - assert ( - "true" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SIMPLIFIED_GENRES, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) + assert int(response.get_data()) == service.id + assert service.protocol == protocol + assert service.name == "exporter name" + assert service.libraries == [db.default_library()] - def test_catalog_services_delete( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - ME = MARCExporter - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ME.NAME, - goal=ExternalIntegration.CATALOG_GOAL, + settings = MARCExporter.library_settings_load(service.library_configurations[0]) + assert settings.include_summary is True + assert settings.include_genres is False + + def test_catalog_services_delete(self, admin_ctrl_fixture: AdminControllerFixture): + db = admin_ctrl_fixture.ctrl.db + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + + service = db.integration_configuration( + protocol, + Goals.CATALOG_GOAL, name="name", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) + with admin_ctrl_fixture.request_context_with_admin("/", method="DELETE"): pytest.raises( AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_delete, + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_delete, service.id, ) - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - response = settings_ctrl_fixture.manager.admin_catalog_services_controller.process_delete( + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + response = admin_ctrl_fixture.manager.admin_catalog_services_controller.process_delete( service.id ) + assert isinstance(response, Response) assert response.status_code == 200 - none_service = get_one( - settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, id=service.id - ) + none_service = get_one(db.session, IntegrationConfiguration, id=service.id) assert none_service is None diff --git a/tests/api/admin/controller/test_collection_self_tests.py b/tests/api/admin/controller/test_collection_self_tests.py index 8dd7e0a5f..f48a3c12a 100644 --- a/tests/api/admin/controller/test_collection_self_tests.py +++ b/tests/api/admin/controller/test_collection_self_tests.py @@ -1,174 +1,179 @@ -from flask_babel import lazy_gettext as _ - -from api.admin.problem_details import * -from api.axis import Axis360API +from unittest.mock import MagicMock + +import pytest +from _pytest.monkeypatch import MonkeyPatch + +from api.admin.controller.collection_self_tests import CollectionSelfTestsController +from api.admin.problem_details import ( + FAILED_TO_RUN_SELF_TESTS, + MISSING_IDENTIFIER, + MISSING_SERVICE, + UNKNOWN_PROTOCOL, +) +from api.integration.registry.license_providers import LicenseProvidersRegistry from api.selftest import HasCollectionSelfTests -from core.opds_import import OPDSImportMonitor +from core.selftest import HasSelfTestsIntegrationConfiguration +from core.util.problem_detail import ProblemDetail, ProblemError from tests.api.mockapi.axis import MockAxis360API -from tests.fixtures.api_admin import SettingsControllerFixture +from tests.fixtures.database import DatabaseTransactionFixture + + +@pytest.fixture +def controller(db: DatabaseTransactionFixture) -> CollectionSelfTestsController: + return CollectionSelfTestsController(db.session) class TestCollectionSelfTests: def test_collection_self_tests_with_no_identifier( - self, settings_ctrl_fixture: SettingsControllerFixture + self, controller: CollectionSelfTestsController ): - with settings_ctrl_fixture.request_context_with_admin("/"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - None - ) - assert response.title == MISSING_IDENTIFIER.title - assert response.detail == MISSING_IDENTIFIER.detail - assert response.status_code == 400 + response = controller.process_collection_self_tests(None) + assert isinstance(response, ProblemDetail) + assert response.title == MISSING_IDENTIFIER.title + assert response.detail == MISSING_IDENTIFIER.detail + assert response.status_code == 400 def test_collection_self_tests_with_no_collection_found( - self, settings_ctrl_fixture: SettingsControllerFixture + self, controller: CollectionSelfTestsController ): - with settings_ctrl_fixture.request_context_with_admin("/"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - -1 - ) - assert response == NO_SUCH_COLLECTION - assert response.status_code == 404 + with pytest.raises(ProblemError) as excinfo: + controller.self_tests_process_get(-1) + assert excinfo.value.problem_detail == MISSING_SERVICE - def test_collection_self_tests_test_get( - self, settings_ctrl_fixture: SettingsControllerFixture + def test_collection_self_tests_with_unknown_protocol( + self, db: DatabaseTransactionFixture, controller: CollectionSelfTestsController ): - old_prior_test_results = HasCollectionSelfTests.prior_test_results - setattr( - HasCollectionSelfTests, - "prior_test_results", - settings_ctrl_fixture.mock_prior_test_results, + collection = db.collection(protocol="test") + assert collection.integration_configuration.id is not None + with pytest.raises(ProblemError) as excinfo: + controller.self_tests_process_get(collection.integration_configuration.id) + assert excinfo.value.problem_detail == UNKNOWN_PROTOCOL + + def test_collection_self_tests_with_unsupported_protocol( + self, db: DatabaseTransactionFixture, controller: CollectionSelfTestsController + ): + registry = LicenseProvidersRegistry() + registry.register(object, canonical="mock_api") # type: ignore[arg-type] + collection = db.collection(protocol="mock_api") + controller = CollectionSelfTestsController(db.session, registry) + assert collection.integration_configuration.id is not None + result = controller.self_tests_process_get( + collection.integration_configuration.id ) + + assert result.status_code == 200 + assert isinstance(result.json, dict) + assert result.json["self_test_results"]["self_test_results"] == { + "disabled": True, + "exception": "Self tests are not supported for this integration.", + } + + def test_collection_self_tests_test_get( + self, + db: DatabaseTransactionFixture, + controller: CollectionSelfTestsController, + monkeypatch: MonkeyPatch, + ): collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), + db.session, + db.default_library(), + ) + + self_test_results = dict( + duration=0.9, + start="2018-08-08T16:04:05Z", + end="2018-08-08T16:05:05Z", + results=[], + ) + mock = MagicMock(return_value=self_test_results) + monkeypatch.setattr( + HasSelfTestsIntegrationConfiguration, "load_self_test_results", mock ) # Make sure that HasSelfTest.prior_test_results() was called and that # it is in the response's collection object. - with settings_ctrl_fixture.request_context_with_admin("/"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - responseCollection = response.get("self_test_results") + assert collection.integration_configuration.id is not None + response = controller.self_tests_process_get( + collection.integration_configuration.id + ) - assert responseCollection.get("id") == collection.id - assert responseCollection.get("name") == collection.name - assert responseCollection.get("protocol") == collection.protocol - assert ( - responseCollection.get("self_test_results") - == settings_ctrl_fixture.self_test_results - ) + data = response.json + assert isinstance(data, dict) + test_results = data.get("self_test_results") + assert isinstance(test_results, dict) - setattr(HasCollectionSelfTests, "prior_test_results", old_prior_test_results) + assert test_results.get("id") == collection.integration_configuration.id + assert test_results.get("name") == collection.name + assert test_results.get("protocol") == collection.protocol + assert test_results.get("self_test_results") == self_test_results + assert mock.call_count == 1 def test_collection_self_tests_failed_post( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + db: DatabaseTransactionFixture, + controller: CollectionSelfTestsController, + monkeypatch: MonkeyPatch, ): - # This makes HasSelfTests.run_self_tests return no values - old_run_self_tests = HasCollectionSelfTests.run_self_tests - setattr( - HasCollectionSelfTests, - "run_self_tests", - settings_ctrl_fixture.mock_failed_run_self_tests, + collection = MockAxis360API.mock_collection( + db.session, + db.default_library(), ) - collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), + # This makes HasSelfTests.run_self_tests return no values + self_test_results = (None, None) + mock = MagicMock(return_value=self_test_results) + monkeypatch.setattr( + HasSelfTestsIntegrationConfiguration, "run_self_tests", mock ) # Failed to run self tests - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) + assert collection.integration_configuration.id is not None - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.failed_run_self_tests_called_with - assert response.title == FAILED_TO_RUN_SELF_TESTS.title - assert response.detail == "Failed to run self tests for this collection." - assert response.status_code == 400 + with pytest.raises(ProblemError) as excinfo: + controller.self_tests_process_post(collection.integration_configuration.id) - setattr(HasCollectionSelfTests, "run_self_tests", old_run_self_tests) + assert excinfo.value.problem_detail == FAILED_TO_RUN_SELF_TESTS + + def test_collection_self_tests_run_self_tests_unsupported_collection( + self, + db: DatabaseTransactionFixture, + ): + registry = LicenseProvidersRegistry() + registry.register(object, canonical="mock_api") # type: ignore[arg-type] + collection = db.collection(protocol="mock_api") + controller = CollectionSelfTestsController(db.session, registry) + response = controller.run_self_tests(collection.integration_configuration) + assert response is None def test_collection_self_tests_post( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + db: DatabaseTransactionFixture, ): - old_run_self_tests = HasCollectionSelfTests.run_self_tests - setattr( - HasCollectionSelfTests, - "run_self_tests", - settings_ctrl_fixture.mock_run_self_tests, - ) + mock = MagicMock() - collection = settings_ctrl_fixture.ctrl.db.collection() - # Successfully ran new self tests for the OPDSImportMonitor provider API - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.run_self_tests_called_with - assert response.response == _("Successfully ran new self tests") - assert response._status == "200 OK" - - # The provider API class and the collection should be passed to - # the run_self_tests method of the provider API class. - assert run_self_tests_args[1] == OPDSImportMonitor - assert run_self_tests_args[3] == collection + class MockApi(HasCollectionSelfTests): + def __new__(cls, *args, **kwargs): + nonlocal mock + return mock(*args, **kwargs) - collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), - ) - # Successfully ran new self tests - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.run_self_tests_called_with - assert response.response == _("Successfully ran new self tests") - assert response._status == "200 OK" - - # The provider API class and the collection should be passed to - # the run_self_tests method of the provider API class. - assert run_self_tests_args[1] == Axis360API - assert run_self_tests_args[3] == collection + @property + def collection(self) -> None: + return None - collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), + registry = LicenseProvidersRegistry() + registry.register(MockApi, canonical="Foo") # type: ignore[arg-type] + + collection = db.collection(protocol="Foo") + controller = CollectionSelfTestsController(db.session, registry) + + assert collection.integration_configuration.id is not None + response = controller.self_tests_process_post( + collection.integration_configuration.id ) - collection.protocol = "Non existing protocol" - # clearing out previous call to mocked run_self_tests - settings_ctrl_fixture.run_self_tests_called_with = (None, None) - - # No protocol found so run_self_tests was not called - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.run_self_tests_called_with - assert response.title == FAILED_TO_RUN_SELF_TESTS.title - assert response.detail == "Failed to run self tests for this collection." - assert response.status_code == 400 - - # The method returns None but it was not called - assert run_self_tests_args == None - - setattr(HasCollectionSelfTests, "run_self_tests", old_run_self_tests) + + assert response.get_data(as_text=True) == "Successfully ran new self tests" + assert response.status_code == 200 + + mock.assert_called_once_with(db.session, collection) + mock()._run_self_tests.assert_called_once_with(db.session) + assert mock().store_self_test_results.call_count == 1 diff --git a/tests/api/admin/controller/test_collection_settings.py b/tests/api/admin/controller/test_collection_settings.py deleted file mode 100644 index c8f342f2a..000000000 --- a/tests/api/admin/controller/test_collection_settings.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import Any -from unittest.mock import PropertyMock, create_autospec, patch - -from api.admin.controller.collection_settings import CollectionSettingsController -from api.controller import CirculationManager -from core.integration.goals import Goals -from core.integration.registry import IntegrationRegistry -from tests.fixtures.database import DatabaseTransactionFixture - - -class TestCollectionSettingsController: - def test_duplicate_protocol_settings(self, db: DatabaseTransactionFixture): - """Dedupe protocol settings using the last settings of the same value""" - manager = create_autospec(spec=CirculationManager) - manager._db = PropertyMock(return_value=db.session) - - class MockProviderAPI: - NAME = "NAME" - SETTINGS = [ - dict(key="k1", value="v1"), - dict(key="k2", value="v2"), # This should get overwritten - dict(key="k2", value="v3"), # Only this should remain - ] - - controller = CollectionSettingsController(manager) - with patch.object( - controller, "registry", IntegrationRegistry[Any](Goals.LICENSE_GOAL) - ) as registry: - registry.register(MockProviderAPI, canonical=MockProviderAPI.NAME) - protocols = controller._get_collection_protocols() - - k2_list = list(filter(lambda x: x["key"] == "k2", protocols[0]["settings"])) - assert len(k2_list) == 1 - assert k2_list[0]["value"] == "v3" - - class MockProviderAPIMulti: - NAME = "NAME" - SETTINGS = [ - dict(key="k1", value="v0"), # This should get overwritten - dict(key="k1", value="v1"), # Only this should remain - dict(key="k2", value="v1"), # This should get overwritten - dict(key="k2", value="v2"), # This should get overwritten - dict(key="k2", value="v4"), # Only this should remain - ] - - with patch.object( - controller, "registry", IntegrationRegistry[Any](Goals.LICENSE_GOAL) - ) as registry: - registry.register(MockProviderAPIMulti, canonical=MockProviderAPIMulti.NAME) - protocols = controller._get_collection_protocols() - - k2_list = list(filter(lambda x: x["key"] == "k2", protocols[0]["settings"])) - assert len(k2_list) == 1 - assert k2_list[0]["value"] == "v4" - - k1_list = list(filter(lambda x: x["key"] == "k1", protocols[0]["settings"])) - assert len(k1_list) == 1 - assert k1_list[0]["value"] == "v1" diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index 244494454..6685cbdb8 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -2,24 +2,24 @@ import flask import pytest +from flask import Response from werkzeug.datastructures import ImmutableMultiDict from api.admin.exceptions import AdminNotAuthorized from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, CANNOT_DELETE_COLLECTION_WITH_CHILDREN, - COLLECTION_NAME_ALREADY_IN_USE, INCOMPLETE_CONFIGURATION, - MISSING_COLLECTION, - MISSING_COLLECTION_NAME, + INTEGRATION_NAME_ALREADY_IN_USE, MISSING_PARENT, + MISSING_SERVICE, + MISSING_SERVICE_NAME, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, UNKNOWN_PROTOCOL, ) from api.integration.registry.license_providers import LicenseProvidersRegistry -from api.selftest import HasCollectionSelfTests from core.model import ( Admin, AdminRole, @@ -28,87 +28,87 @@ create, get_one, ) -from core.selftest import HasSelfTests -from tests.fixtures.api_admin import SettingsControllerFixture +from core.util.problem_detail import ProblemDetail +from tests.fixtures.api_admin import AdminControllerFixture from tests.fixtures.database import DatabaseTransactionFixture class TestCollectionSettings: def test_collections_get_with_no_collections( - self, settings_ctrl_fixture: SettingsControllerFixture - ): + self, admin_ctrl_fixture: AdminControllerFixture + ) -> None: + db = admin_ctrl_fixture.ctrl.db # Delete any existing collections created by the test setup. - for collection in settings_ctrl_fixture.ctrl.db.session.query(Collection): - settings_ctrl_fixture.ctrl.db.session.delete(collection) + db.session.delete(db.default_collection()) - with settings_ctrl_fixture.request_context_with_admin("/"): + with admin_ctrl_fixture.request_context_with_admin("/"): response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) - assert response.get("collections") == [] + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + assert data.get("collections") == [] - names = {p.get("name") for p in response.get("protocols")} + names = {p.get("name") for p in data.get("protocols", {})} expected_names = {k for k, v in LicenseProvidersRegistry()} assert names == expected_names def test_collections_get_collections_with_multiple_collections( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - old_prior_test_results = HasSelfTests.prior_test_results - setattr( - HasCollectionSelfTests, - "prior_test_results", - settings_ctrl_fixture.mock_prior_test_results, - ) - session = settings_ctrl_fixture.ctrl.db.session + self, admin_ctrl_fixture: AdminControllerFixture + ) -> None: + session = admin_ctrl_fixture.ctrl.db.session + db = admin_ctrl_fixture.ctrl.db - [c1] = settings_ctrl_fixture.ctrl.db.default_library().collections + [c1] = db.default_library().collections - c2 = settings_ctrl_fixture.ctrl.db.collection( + c2 = db.collection( name="Collection 2", protocol=ExternalIntegration.OVERDRIVE, + external_account_id="1234", + settings=dict( + overdrive_client_secret="b", + overdrive_client_key="user", + overdrive_website_id="100", + ), ) - c2.external_account_id = "1234" - DatabaseTransactionFixture.set_settings( - c2.integration_configuration, - overdrive_client_secret="b", - overdrive_client_key="user", - overdrive_website_id="100", - ) - - c3 = settings_ctrl_fixture.ctrl.db.collection( + c3 = db.collection( name="Collection 3", protocol=ExternalIntegration.OVERDRIVE, + external_account_id="5678", ) - c3.external_account_id = "5678" c3.parent = c2 - l1 = settings_ctrl_fixture.ctrl.db.library(short_name="L1") - c3.libraries += [l1, settings_ctrl_fixture.ctrl.db.default_library()] + l1 = db.library(short_name="L1") + c3.libraries += [l1, db.default_library()] assert isinstance(l1.id, int) - l1_config = c3.integration_configuration.for_library(l1.id, create=True) + l1_config = c3.integration_configuration.for_library(l1.id) + assert l1_config is not None DatabaseTransactionFixture.set_settings(l1_config, ebook_loan_duration="14") # Commit the config changes session.commit() - l1_librarian, ignore = create( - settings_ctrl_fixture.ctrl.db.session, Admin, email="admin@l1.org" - ) + l1_librarian, ignore = create(session, Admin, email="admin@l1.org") l1_librarian.add_role(AdminRole.LIBRARIAN, l1) - with settings_ctrl_fixture.request_context_with_admin("/"): - controller = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller - ) + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + + with admin_ctrl_fixture.request_context_with_admin("/"): + controller = admin_ctrl_fixture.manager.admin_collection_settings_controller response = controller.process_collections() + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) # The system admin can see all collections. coll2, coll3, coll1 = sorted( - response.get("collections"), key=lambda c: c.get("name") + data.get("collections", []), key=lambda c: c.get("name", "") ) - assert c1.id == coll1.get("id") - assert c2.id == coll2.get("id") - assert c3.id == coll3.get("id") + assert c1.integration_configuration.id == coll1.get("id") + assert c2.integration_configuration.id == coll2.get("id") + assert c3.integration_configuration.id == coll3.get("id") assert c1.name == coll1.get("name") assert c2.name == coll2.get("name") @@ -118,29 +118,21 @@ def test_collections_get_collections_with_multiple_collections( assert c2.protocol == coll2.get("protocol") assert c3.protocol == coll3.get("protocol") - assert settings_ctrl_fixture.self_test_results == coll1.get( - "self_test_results" - ) - assert settings_ctrl_fixture.self_test_results == coll2.get( - "self_test_results" - ) - assert settings_ctrl_fixture.self_test_results == coll3.get( - "self_test_results" - ) - settings1 = coll1.get("settings", {}) settings2 = coll2.get("settings", {}) settings3 = coll3.get("settings", {}) - assert c1.external_account_id == settings1.get("external_account_id") - assert c2.external_account_id == settings2.get("external_account_id") - assert c3.external_account_id == settings3.get("external_account_id") + assert ( + settings1.get("external_account_id") == "http://opds.example.com/feed" + ) + assert settings2.get("external_account_id") == "1234" + assert settings3.get("external_account_id") == "5678" assert c2.integration_configuration.settings_dict[ "overdrive_client_secret" ] == settings2.get("overdrive_client_secret") - assert c2.id == coll3.get("parent_id") + assert c2.integration_configuration.id == coll3.get("parent_id") coll3_libraries = coll3.get("libraries") assert 2 == len(coll3_libraries) @@ -149,218 +141,179 @@ def test_collections_get_collections_with_multiple_collections( ) assert "L1" == coll3_l1.get("short_name") assert "14" == coll3_l1.get("ebook_loan_duration") - assert ( - settings_ctrl_fixture.ctrl.db.default_library().short_name - == coll3_default.get("short_name") - ) + assert db.default_library().short_name == coll3_default.get("short_name") - with settings_ctrl_fixture.request_context_with_admin("/", admin=l1_librarian): + with admin_ctrl_fixture.request_context_with_admin("/", admin=l1_librarian): # A librarian only sees collections associated with their library. response = controller.process_collections() - [coll3] = response.get("collections") - assert c3.id == coll3.get("id") + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + [coll3] = data.get("collections", []) + assert c3.integration_configuration.id == coll3.get("id") coll3_libraries = coll3.get("libraries") assert 1 == len(coll3_libraries) assert "L1" == coll3_libraries[0].get("short_name") assert "14" == coll3_libraries[0].get("ebook_loan_duration") - setattr(HasCollectionSelfTests, "prior_test_results", old_prior_test_results) - + @pytest.mark.parametrize( + "post_data,expected,detailed", + [ + pytest.param( + {"protocol": "Overdrive"}, + MISSING_SERVICE_NAME, + False, + id="missing_name", + ), + pytest.param( + {"name": "collection"}, + NO_PROTOCOL_FOR_NEW_SERVICE, + False, + id="missing_protocol", + ), + pytest.param( + {"name": "collection", "protocol": "Unknown"}, + UNKNOWN_PROTOCOL, + False, + id="unknown_protocol", + ), + pytest.param( + {"id": "123456789", "name": "collection", "protocol": "Bibliotheca"}, + MISSING_SERVICE, + False, + id="missing_service", + ), + pytest.param( + {"name": "Collection 1", "protocol": "Bibliotheca"}, + INTEGRATION_NAME_ALREADY_IN_USE, + False, + id="name_in_use", + ), + pytest.param( + {"id": "", "name": "Collection 1", "protocol": "Bibliotheca"}, + CANNOT_CHANGE_PROTOCOL, + False, + id="change_protocol", + ), + pytest.param( + { + "name": "Collection 2", + "protocol": "Bibliotheca", + "parent_id": "1234", + }, + PROTOCOL_DOES_NOT_SUPPORT_PARENTS, + False, + id="protocol_does_not_support_parents", + ), + pytest.param( + {"name": "Collection 2", "protocol": "Overdrive", "parent_id": "1234"}, + MISSING_PARENT, + False, + id="missing_parent", + ), + pytest.param( + { + "name": "collection", + "protocol": "OPDS Import", + "external_account_id": "http://url.test", + "data_source": "test", + "libraries": json.dumps([{"short_name": "nosuchlibrary"}]), + }, + NO_SUCH_LIBRARY, + True, + id="no_such_library", + ), + pytest.param( + {"name": "collection", "protocol": "OPDS Import"}, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_opds", + ), + pytest.param( + { + "name": "collection", + "protocol": "Overdrive", + "external_account_id": "1234", + "overdrive_client_key": "user", + "overdrive_client_secret": "password", + }, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_overdrive", + ), + pytest.param( + { + "name": "collection", + "protocol": "Bibliotheca", + "external_account_id": "1234", + "password": "password", + }, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_bibliotheca", + ), + pytest.param( + { + "name": "collection", + "protocol": "Axis 360", + "username": "user", + "password": "password", + }, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_axis", + ), + ], + ) def test_collections_post_errors( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + admin_ctrl_fixture: AdminControllerFixture, + post_data: dict[str, str], + expected: ProblemDetail, + detailed: bool, ): - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("protocol", "Overdrive"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_COLLECTION_NAME - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == NO_PROTOCOL_FOR_NEW_SERVICE - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection"), - ("protocol", "Unknown"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == UNKNOWN_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", "123456789"), - ("name", "collection"), - ("protocol", "Bibliotheca"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_COLLECTION + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - collection = settings_ctrl_fixture.ctrl.db.collection( + collection = admin_ctrl_fixture.ctrl.db.collection( name="Collection 1", protocol=ExternalIntegration.OVERDRIVE ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Collection 1"), - ("protocol", "Bibliotheca"), - ] - ) + if "id" in post_data and post_data["id"] == "": + post_data["id"] = str(collection.integration_configuration.id) + + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): + flask.request.form = ImmutableMultiDict(post_data) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) - assert response == COLLECTION_NAME_ALREADY_IN_USE - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + if detailed: + assert isinstance(response, ProblemDetail) + assert response.status_code == expected.status_code + assert response.uri == expected.uri + else: + assert response == expected + + def test_collections_post_errors_no_permissions( + self, admin_ctrl_fixture: AdminControllerFixture + ): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), ("name", "Collection 1"), ("protocol", "Overdrive"), ] ) pytest.raises( AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections, + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections, ) - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(collection.id)), - ("name", "Collection 1"), - ("protocol", "Bibliotheca"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == CANNOT_CHANGE_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Collection 2"), - ("protocol", "Bibliotheca"), - ("parent_id", "1234"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == PROTOCOL_DOES_NOT_SUPPORT_PARENTS - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Collection 2"), - ("protocol", "Overdrive"), - ("parent_id", "1234"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_PARENT - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection"), - ("protocol", "OPDS Import"), - ("external_account_id", "http://url.test"), - ("data_source", "test"), - ("libraries", json.dumps([{"short_name": "nosuchlibrary"}])), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == NO_SUCH_LIBRARY.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "OPDS Import"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "Overdrive"), - ("external_account_id", "1234"), - ("overdrive_client_key", "user"), - ("overdrive_client_secret", "password"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "Bibliotheca"), - ("external_account_id", "1234"), - ("password", "password"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "Axis 360"), - ("username", "user"), - ("password", "password"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - def test_collections_post_create( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - db = settings_ctrl_fixture.ctrl.db + def test_collections_post_create(self, admin_ctrl_fixture: AdminControllerFixture): + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + db = admin_ctrl_fixture.ctrl.db l1 = db.library( name="Library 1", short_name="L1", @@ -374,7 +327,7 @@ def test_collections_post_create( short_name="L3", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "New Collection"), @@ -395,18 +348,20 @@ def test_collections_post_create( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) + assert isinstance(response, Response) assert response.status_code == 201 # The collection was created and configured properly. - collection = get_one( - settings_ctrl_fixture.ctrl.db.session, Collection, name="New Collection" - ) + collection = Collection.by_name(db.session, name="New Collection") assert isinstance(collection, Collection) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) assert "New Collection" == collection.name - assert "acctid" == collection.external_account_id + assert ( + "acctid" + == collection.integration_configuration.settings_dict["external_account_id"] + ) assert ( "username" == collection.integration_configuration.settings_dict[ @@ -442,12 +397,12 @@ def test_collections_post_create( assert "l2_ils" == l2_settings.settings_dict["ils_name"] # This collection will be a child of the first collection. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "Child Collection"), ("protocol", "Overdrive"), - ("parent_id", str(collection.id)), + ("parent_id", str(collection.integration_configuration.id)), ( "libraries", json.dumps([{"short_name": "L3", "ils_name": "l3_ils"}]), @@ -456,18 +411,20 @@ def test_collections_post_create( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) + assert isinstance(response, Response) assert response.status_code == 201 # The collection was created and configured properly. - child = get_one( - settings_ctrl_fixture.ctrl.db.session, Collection, name="Child Collection" - ) + child = Collection.by_name(db.session, name="Child Collection") assert isinstance(child, Collection) - assert child.id == int(response.response[0]) + assert child.integration_configuration.id == int(response.get_data()) assert "Child Collection" == child.name - assert "child-acctid" == child.external_account_id + assert ( + "child-acctid" + == child.integration_configuration.settings_dict["external_account_id"] + ) # The settings that are inherited from the parent weren't set. assert "username" not in child.integration_configuration.settings_dict @@ -481,23 +438,23 @@ def test_collections_post_create( assert l3_settings is not None assert "l3_ils" == l3_settings.settings_dict["ils_name"] - def test_collections_post_edit( - self, settings_ctrl_fixture: SettingsControllerFixture - ): + def test_collections_post_edit(self, admin_ctrl_fixture: AdminControllerFixture): # The collection exists. - collection = settings_ctrl_fixture.ctrl.db.collection( + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + db = admin_ctrl_fixture.ctrl.db + collection = db.collection( name="Collection 1", protocol=ExternalIntegration.OVERDRIVE ) - l1 = settings_ctrl_fixture.ctrl.db.library( + l1 = db.library( name="Library 1", short_name="L1", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.OVERDRIVE), ("external_account_id", "1234"), @@ -512,11 +469,12 @@ def test_collections_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The collection has been changed. assert "user2" == collection.integration_configuration.settings_dict.get( @@ -540,10 +498,10 @@ def test_collections_post_edit( assert l1_settings is not None assert "the_ils" == l1_settings.settings_dict.get("ils_name") - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.OVERDRIVE), ("external_account_id", "1234"), @@ -554,11 +512,12 @@ def test_collections_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The collection is the same. assert "user2" == collection.integration_configuration.settings_dict.get( @@ -573,42 +532,41 @@ def test_collections_post_edit( # have been deleted. assert collection.integration_configuration.library_configurations == [] - parent = settings_ctrl_fixture.ctrl.db.collection( - name="Parent", protocol=ExternalIntegration.OVERDRIVE - ) + parent = db.collection(name="Parent", protocol=ExternalIntegration.OVERDRIVE) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.OVERDRIVE), - ("parent_id", str(parent.id)), + ("parent_id", str(parent.integration_configuration.id)), ("external_account_id", "1234"), ("libraries", json.dumps([])), ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The collection now has a parent. assert parent == collection.parent - library = settings_ctrl_fixture.ctrl.db.default_library() - collection2 = settings_ctrl_fixture.ctrl.db.collection( + library = db.default_library() + collection2 = db.collection( name="Collection 2", protocol=ExternalIntegration.ODL ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection2.id)), + ("id", str(collection2.integration_configuration.id)), ("name", "Collection 2"), ("protocol", ExternalIntegration.ODL), - ("external_account_id", "1234"), + ("external_account_id", "http://test.com/feed"), ("username", "user"), ("password", "password"), ("data_source", "datasource"), @@ -628,11 +586,12 @@ def test_collections_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - settings_ctrl_fixture.ctrl.db.session.refresh(collection2) + admin_ctrl_fixture.ctrl.db.session.refresh(collection2) assert len(collection2.integration_configuration.library_configurations) == 1 # The library configuration value was correctly coerced to int assert ( @@ -642,35 +601,25 @@ def test_collections_post_edit( == 200 ) - def _base_collections_post_request(self, collection): - """A template for POST requests to the collections controller.""" - return [ - ("id", str(collection.id)), - ("name", "Collection 1"), - ("protocol", ExternalIntegration.AXIS_360), - ("external_account_id", "1234"), - ("username", "user2"), - ("password", "password"), - ("url", "http://axis.test/"), - ] - def test_collections_post_edit_library_specific_configuration( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): # The collection exists. - collection = settings_ctrl_fixture.ctrl.db.collection( + db = admin_ctrl_fixture.ctrl.db + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + collection = db.collection( name="Collection 1", protocol=ExternalIntegration.AXIS_360 ) - l1 = settings_ctrl_fixture.ctrl.db.library( + l1 = db.library( name="Library 1", short_name="L1", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.AXIS_360), ("external_account_id", "1234"), @@ -684,7 +633,7 @@ def test_collections_post_edit_library_specific_configuration( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 @@ -695,10 +644,10 @@ def test_collections_post_edit_library_specific_configuration( assert "14" == l1_settings.settings_dict.get("ebook_loan_duration") # Remove the connection between collection and library. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.AXIS_360), ("external_account_id", "1234"), @@ -709,58 +658,58 @@ def test_collections_post_edit_library_specific_configuration( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The settings associated with the collection+library were removed # when the connection between collection and library was deleted. assert isinstance(l1.id, int) - assert None == collection.integration_configuration.for_library(l1.id) + assert collection.integration_configuration.for_library(l1.id) is None assert [] == collection.libraries - def test_collection_delete(self, settings_ctrl_fixture: SettingsControllerFixture): - collection = settings_ctrl_fixture.ctrl.db.collection() - assert False == collection.marked_for_deletion + def test_collection_delete(self, admin_ctrl_fixture: AdminControllerFixture): + db = admin_ctrl_fixture.ctrl.db + collection = db.collection() + assert collection.marked_for_deletion is False - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) + with admin_ctrl_fixture.request_context_with_admin("/", method="DELETE"): pytest.raises( AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_delete, - collection.id, + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_delete, + collection.integration_configuration.id, ) - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - response = settings_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( - collection.id + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + assert collection.integration_configuration.id is not None + response = admin_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( + collection.integration_configuration.id ) assert response.status_code == 200 + assert isinstance(response, Response) # The collection should still be available because it is not immediately deleted. # The collection will be deleted in the background by a script, but it is # now marked for deletion - fetchedCollection = get_one( - settings_ctrl_fixture.ctrl.db.session, Collection, id=collection.id - ) - assert collection == fetchedCollection - assert True == fetchedCollection.marked_for_deletion + fetched_collection = get_one(db.session, Collection, id=collection.id) + assert fetched_collection == collection + assert fetched_collection.marked_for_deletion is True def test_collection_delete_cant_delete_parent( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - parent = settings_ctrl_fixture.ctrl.db.collection( - protocol=ExternalIntegration.OVERDRIVE - ) - child = settings_ctrl_fixture.ctrl.db.collection( - protocol=ExternalIntegration.OVERDRIVE - ) + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + db = admin_ctrl_fixture.ctrl.db + parent = db.collection(protocol=ExternalIntegration.OVERDRIVE) + child = db.collection(protocol=ExternalIntegration.OVERDRIVE) child.parent = parent - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - response = settings_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( - parent.id + with admin_ctrl_fixture.request_context_with_admin("/", method="DELETE"): + assert parent.integration_configuration.id is not None + response = admin_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( + parent.integration_configuration.id ) - assert CANNOT_DELETE_COLLECTION_WITH_CHILDREN == response + assert response == CANNOT_DELETE_COLLECTION_WITH_CHILDREN diff --git a/tests/api/admin/controller/test_custom_lists.py b/tests/api/admin/controller/test_custom_lists.py index 14ee6b90a..eff19c8ce 100644 --- a/tests/api/admin/controller/test_custom_lists.py +++ b/tests/api/admin/controller/test_custom_lists.py @@ -1,5 +1,4 @@ import json -from typing import Optional from unittest import mock import feedparser @@ -894,16 +893,16 @@ def test_custom_list_delete_errors( @define class ShareLocallySetup: - shared_with: Optional[Library] = None - primary_library: Optional[Library] = None - collection1: Optional[Collection] = None - list: Optional[CustomList] = None + shared_with: Library | None = None + primary_library: Library | None = None + collection1: Collection | None = None + list: CustomList | None = None def _setup_share_locally(self, admin_librarian_fixture: AdminLibrarianFixture): shared_with = admin_librarian_fixture.ctrl.db.library("shared_with") primary_library = admin_librarian_fixture.ctrl.db.library("primary") collection1 = admin_librarian_fixture.ctrl.db.collection("c1") - primary_library.collections.append(collection1) + collection1.libraries.append(primary_library) data_source = DataSource.lookup( admin_librarian_fixture.ctrl.db.session, DataSource.LIBRARY_STAFF @@ -949,7 +948,7 @@ def test_share_locally_success( self, admin_librarian_fixture: AdminLibrarianFixture ): s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) response = self._share_locally( s.list, s.primary_library, admin_librarian_fixture ) @@ -970,11 +969,11 @@ def test_share_locally_with_invalid_entries( self, admin_librarian_fixture: AdminLibrarianFixture ): s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) # Second collection with work in list collection2 = admin_librarian_fixture.ctrl.db.collection() - s.primary_library.collections.append(collection2) + collection2.libraries.append(s.primary_library) w = admin_librarian_fixture.ctrl.db.work(collection=collection2) s.list.add_entry(w) @@ -987,7 +986,7 @@ def test_share_locally_with_invalid_entries( def test_share_locally_get(self, admin_librarian_fixture: AdminLibrarianFixture): """Does the GET method fetch shared lists""" s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) resp = self._share_locally(s.list, s.primary_library, admin_librarian_fixture) assert resp["successes"] == 1 @@ -1022,7 +1021,7 @@ def test_share_locally_get(self, admin_librarian_fixture: AdminLibrarianFixture) def test_share_locally_delete(self, admin_librarian_fixture: AdminLibrarianFixture): """Test the deleting of a lists shared status""" s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) resp = self._share_locally(s.list, s.primary_library, admin_librarian_fixture) assert resp["successes"] == 1 diff --git a/tests/api/admin/controller/test_lanes.py b/tests/api/admin/controller/test_lanes.py index 6d54d245b..577d5429e 100644 --- a/tests/api/admin/controller/test_lanes.py +++ b/tests/api/admin/controller/test_lanes.py @@ -38,7 +38,7 @@ class TestLanesController: def test_lanes_get(self, alm_fixture: AdminLibraryManagerFixture): library = alm_fixture.ctrl.db.library() collection = alm_fixture.ctrl.db.collection() - library.collections += [collection] + collection.libraries.append(library) english = alm_fixture.ctrl.db.lane( "English", library=library, languages=["eng"] diff --git a/tests/api/admin/controller/test_library.py b/tests/api/admin/controller/test_library.py index 096d1703d..6cc878577 100644 --- a/tests/api/admin/controller/test_library.py +++ b/tests/api/admin/controller/test_library.py @@ -4,7 +4,6 @@ import datetime import json from io import BytesIO -from typing import Dict, List from unittest.mock import MagicMock import flask @@ -52,10 +51,10 @@ def logo_properties(self): } def library_form( - self, library: Library, fields: Dict[str, str | List[str]] | None = None + self, library: Library, fields: dict[str, str | list[str]] | None = None ): fields = fields or {} - defaults: Dict[str, str | List[str]] = { + defaults: dict[str, str | list[str]] = { "uuid": str(library.uuid), "name": "The New York Public Library", "short_name": str(library.short_name), diff --git a/tests/api/admin/controller/test_patron.py b/tests/api/admin/controller/test_patron.py index d41662c52..75efd3f00 100644 --- a/tests/api/admin/controller/test_patron.py +++ b/tests/api/admin/controller/test_patron.py @@ -1,5 +1,3 @@ -from typing import Optional - import flask import pytest from werkzeug.datastructures import ImmutableMultiDict @@ -126,7 +124,7 @@ def test_reset_adobe_id(self, patron_controller_fixture: PatronControllerFixture # This PatronController will always return a specific # PatronData object, no matter what is asked for. class MockPatronController(PatronController): - mock_patrondata: Optional[PatronData] = None + mock_patrondata: PatronData | None = None def _load_patrondata(self, authenticator): self.called_with = authenticator diff --git a/tests/api/admin/controller/test_patron_auth.py b/tests/api/admin/controller/test_patron_auth.py index 38a382ad9..6d03b7a03 100644 --- a/tests/api/admin/controller/test_patron_auth.py +++ b/tests/api/admin/controller/test_patron_auth.py @@ -1,7 +1,8 @@ from __future__ import annotations import json -from typing import TYPE_CHECKING, Any, Callable, List, Tuple +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock import flask @@ -17,8 +18,8 @@ INTEGRATION_NAME_ALREADY_IN_USE, INVALID_CONFIGURATION_OPTION, INVALID_LIBRARY_IDENTIFIER_RESTRICTION_REGULAR_EXPRESSION, - MISSING_PATRON_AUTH_NAME, MISSING_SERVICE, + MISSING_SERVICE_NAME, MULTIPLE_BASIC_AUTH_SERVICES, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, @@ -36,6 +37,7 @@ from core.integration.goals import Goals from core.model import AdminRole, Library, get_one from core.model.integration import IntegrationConfiguration +from core.problem_details import INVALID_INPUT from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: @@ -84,7 +86,7 @@ def post(form: ImmutableMultiDict[str, str]) -> Response | ProblemDetail: @pytest.fixture -def common_args() -> List[Tuple[str, str]]: +def common_args() -> list[tuple[str, str]]: return [ ("test_identifier", "user"), ("test_password", "pass"), @@ -106,7 +108,6 @@ def test_patron_auth_services_get_with_no_services( protocols = response.get("protocols") assert isinstance(protocols, list) assert 8 == len(protocols) - assert SimpleAuthenticationProvider.__module__ == protocols[0].get("name") assert "settings" in protocols[0] assert "library_settings" in protocols[0] @@ -366,9 +367,9 @@ def test_patron_auth_services_post_missing_patron_auth_name( ) response = post_response(form) assert isinstance(response, ProblemDetail) - assert response.uri == MISSING_PATRON_AUTH_NAME.uri + assert response == MISSING_SERVICE_NAME - def test_patron_auth_services_post_missing_patron_auth_no_such_library( + def test_patron_auth_services_post_no_such_library( self, post_response: Callable[..., Response | ProblemDetail], common_args: list[tuple[str, str]], @@ -385,6 +386,24 @@ def test_patron_auth_services_post_missing_patron_auth_no_such_library( assert isinstance(response, ProblemDetail) assert response.uri == NO_SUCH_LIBRARY.uri + def test_patron_auth_services_post_missing_short_name( + self, + post_response: Callable[..., Response | ProblemDetail], + common_args: list[tuple[str, str]], + ): + form = ImmutableMultiDict( + [ + ("name", "testing auth name"), + ("protocol", SimpleAuthenticationProvider.__module__), + ("libraries", json.dumps([{}])), + ] + + common_args + ) + response = post_response(form) + assert isinstance(response, ProblemDetail) + assert response.uri == INVALID_INPUT.uri + assert response.detail == "Invalid library settings, missing short_name." + def test_patron_auth_services_post_missing_patron_auth_multiple_basic( self, post_response: Callable[..., Response | ProblemDetail], @@ -448,7 +467,7 @@ def test_patron_auth_services_post_invalid_library_identifier_restriction_regex( def test_patron_auth_services_post_not_authorized( self, - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], settings_ctrl_fixture: SettingsControllerFixture, post_response: Callable[..., Response | ProblemDetail], ): @@ -463,7 +482,7 @@ def test_patron_auth_services_post_not_authorized( def test_patron_auth_services_post_create( self, - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], default_library: Library, post_response: Callable[..., Response | ProblemDetail], db: DatabaseTransactionFixture, @@ -504,6 +523,7 @@ def test_patron_auth_services_post_create( assert settings.test_password == "pass" [library_config] = auth_service.library_configurations assert library_config.library == default_library + assert "short_name" not in library_config.settings_dict assert ( library_config.settings_dict["library_identifier_restriction_criteria"] == "^1234" @@ -543,7 +563,7 @@ def test_patron_auth_services_post_create( def test_patron_auth_services_post_edit( self, post_response: Callable[..., Response | ProblemDetail], - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], settings_ctrl_fixture: SettingsControllerFixture, create_simple_auth_integration: SimpleAuthIntegrationFixture, db: DatabaseTransactionFixture, @@ -607,7 +627,7 @@ def test_patron_auth_services_post_edit( def test_patron_auth_service_delete( self, - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], settings_ctrl_fixture: SettingsControllerFixture, create_simple_auth_integration: SimpleAuthIntegrationFixture, ): diff --git a/tests/api/admin/controller/test_quicksight.py b/tests/api/admin/controller/test_quicksight.py index 5240ae494..db2bc4f68 100644 --- a/tests/api/admin/controller/test_quicksight.py +++ b/tests/api/admin/controller/test_quicksight.py @@ -1,8 +1,10 @@ +import uuid +from typing import cast from unittest import mock import pytest -from core.model import create +from core.model import Library, create from core.model.admin import Admin, AdminRole from core.util.problem_detail import ProblemError from tests.fixtures.api_admin import AdminControllerFixture @@ -54,8 +56,9 @@ def test_generate_quicksight_url( ) generate_method.return_value = {"Status": 201, "EmbedUrl": "https://embed"} + random_uuid = str(uuid.uuid4()) with quicksight_fixture.request_context_with_admin( - f"/?library_ids={default.id},{library1.id},30000", + f"/?library_uuids={default.uuid},{library1.uuid},{random_uuid}", admin=system_admin, ) as ctx: response = ctrl.generate_quicksight_url("primary") @@ -75,8 +78,10 @@ def test_generate_quicksight_url( }, SessionTags=[ dict( - Key="library_name", - Value="|".join([str(library1.name), str(default.name)]), + Key="library_short_name_0", + Value="|".join( + [str(library1.short_name), str(default.short_name)] + ), ) ], ) @@ -86,7 +91,7 @@ def test_generate_quicksight_url( admin1.add_role(AdminRole.LIBRARY_MANAGER, library1) with quicksight_fixture.request_context_with_admin( - f"/?library_ids=1,{library1.id}", + f"/?library_uuids={default.uuid},{library1.uuid}", admin=admin1, ) as ctx: generate_method.reset_mock() @@ -100,7 +105,79 @@ def test_generate_quicksight_url( "Dashboard": {"InitialDashboardId": "uuid2"} }, SessionTags=[ - dict(Key="library_name", Value="|".join([str(library1.name)])) + dict( + Key="library_short_name_0", + Value="|".join([str(library1.short_name)]), + ) + ], + ) + + def test_generate_quicksight_url_with_a_large_number_of_libraries( + self, quicksight_fixture: QuickSightControllerFixture + ): + ctrl = quicksight_fixture.manager.admin_quicksight_controller + db = quicksight_fixture.ctrl.db + + system_admin, _ = create(db.session, Admin, email="admin@email.com") + system_admin.add_role(AdminRole.SYSTEM_ADMIN) + default = db.default_library() + + libraries: list[Library] = [] + for x in range(0, 37): + libraries.append(db.library(short_name="TL" + str(x).zfill(4))) + + with mock.patch( + "api.admin.controller.quicksight.boto3" + ) as mock_boto, mock.patch( + "api.admin.controller.quicksight.Configuration.quicksight_authorized_arns" + ) as mock_qs_arns: + arns = dict( + primary=[ + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid1", + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid2", + ], + ) + mock_qs_arns.return_value = arns + generate_method: mock.MagicMock = ( + mock_boto.client().generate_embed_url_for_anonymous_user + ) + generate_method.return_value = {"Status": 201, "EmbedUrl": "https://embed"} + + random_uuid = str(uuid.uuid4()) + with quicksight_fixture.request_context_with_admin( + f"/?library_uuids={','.join(cast(list[str], [x.uuid for x in libraries ]))}", + admin=system_admin, + ) as ctx: + response = ctrl.generate_quicksight_url("primary") + + # Assert the right client was created, with a region + assert mock_boto.client.call_args == mock.call( + "quicksight", region_name="us-west-1" + ) + # Assert the reqest and response formats + assert response["embedUrl"] == "https://embed" + assert generate_method.call_args == mock.call( + AwsAccountId="aws-account-id", + Namespace="default", + AuthorizedResourceArns=arns["primary"], + ExperienceConfiguration={ + "Dashboard": {"InitialDashboardId": "uuid1"} + }, + SessionTags=[ + dict( + Key="library_short_name_0", + Value="|".join( + cast(list[str], [x.short_name for x in libraries[0:36]]) + ), + ), + dict( + Key="library_short_name_1", + Value="|".join( + cast( + list[str], [x.short_name for x in libraries[36:37]] + ) + ), + ), ], ) @@ -129,7 +206,7 @@ def test_generate_quicksight_url_errors( mock_qs_arns.return_value = arns with quicksight_fixture.request_context_with_admin( - f"/?library_ids={library.id}", + f"/?library_uuids={library.uuid}", admin=admin, ) as ctx: with pytest.raises(ProblemError) as raised: @@ -148,7 +225,7 @@ def test_generate_quicksight_url_errors( ) with quicksight_fixture.request_context_with_admin( - f"/?library_ids={library_not_allowed.id}", + f"/?library_uuids={library_not_allowed.uuid}", admin=admin, ) as ctx: mock_qs_arns.return_value = arns @@ -160,7 +237,7 @@ def test_generate_quicksight_url_errors( ) with quicksight_fixture.request_context_with_admin( - f"/?library_ids={library.id}", + f"/?library_uuids={library.uuid}", admin=admin, ) as ctx: # Bad response from boto @@ -171,7 +248,7 @@ def test_generate_quicksight_url_errors( ctrl.generate_quicksight_url("primary") assert ( raised.value.problem_detail.detail - == "Error while fetching the Quisksight Embed url." + == "Error while fetching the Quicksight Embed url." ) # 200 status, but no url @@ -182,7 +259,7 @@ def test_generate_quicksight_url_errors( ctrl.generate_quicksight_url("primary") assert ( raised.value.problem_detail.detail - == "Error while fetching the Quisksight Embed url." + == "Error while fetching the Quicksight Embed url." ) # Boto threw an error @@ -193,7 +270,7 @@ def test_generate_quicksight_url_errors( ctrl.generate_quicksight_url("primary") assert ( raised.value.problem_detail.detail - == "Error while fetching the Quisksight Embed url." + == "Error while fetching the Quicksight Embed url." ) def test_get_dashboard_names(self, quicksight_fixture: QuickSightControllerFixture): diff --git a/tests/api/admin/controller/test_search_services.py b/tests/api/admin/controller/test_search_services.py index 8b6fcebdf..ebd470ea0 100644 --- a/tests/api/admin/controller/test_search_services.py +++ b/tests/api/admin/controller/test_search_services.py @@ -135,7 +135,7 @@ def test_search_services_post_errors(self, settings_ctrl_fixture): session, ExternalIntegration, protocol="test", - goal=ExternalIntegration.LICENSE_GOAL, + goal="test", name="name", ) diff --git a/tests/api/admin/controller/test_settings.py b/tests/api/admin/controller/test_settings.py index 12da7a33d..12f380ab6 100644 --- a/tests/api/admin/controller/test_settings.py +++ b/tests/api/admin/controller/test_settings.py @@ -304,8 +304,8 @@ def settings_class(cls): == "Required field 'key' is missing." ) - config = _set_configuration_library( + result = _set_configuration_library( config, dict(short_name="short-name", key="key", value="value"), Protocol1 ) - assert config.library == library - assert config.settings_dict == dict(key="key", value="value") + assert result.library == library + assert result.settings_dict == dict(key="key", value="value") diff --git a/tests/api/admin/test_config.py b/tests/api/admin/test_config.py index 1780cc698..fb4a5a845 100644 --- a/tests/api/admin/test_config.py +++ b/tests/api/admin/test_config.py @@ -1,6 +1,5 @@ import logging import os -from typing import Optional from unittest.mock import MagicMock, patch import pytest @@ -12,7 +11,7 @@ class TestAdminUI: @staticmethod - def _set_env(monkeypatch, key: str, value: Optional[str]): + def _set_env(monkeypatch, key: str, value: str | None): if value: monkeypatch.setenv(key, value) elif key in os.environ: @@ -47,9 +46,9 @@ def test_package_version_cached(self, monkeypatch): def test_env_package_version( self, monkeypatch, - package_version: Optional[str], + package_version: str | None, resolves: bool, - expected_result: Optional[str], + expected_result: str | None, ): with patch( "api.admin.config.Configuration.resolve_package_version" @@ -112,8 +111,8 @@ def test_resolve_package_version(self, caplog): def test_package_url( self, monkeypatch, - package_name: Optional[str], - package_version: Optional[str], + package_name: str | None, + package_version: str | None, mode: OperationalMode, expected_result_startswith: str, ): @@ -145,8 +144,8 @@ def test_package_url( def test_package_development_directory( self, monkeypatch, - package_name: Optional[str], - package_version: Optional[str], + package_name: str | None, + package_version: str | None, expected_result: str, ): self._set_env(monkeypatch, "TPP_CIRCULATION_ADMIN_PACKAGE_NAME", package_name) diff --git a/tests/api/admin/test_dashboard_stats.py b/tests/api/admin/test_dashboard_stats.py index 95b2de774..edbef2fe7 100644 --- a/tests/api/admin/test_dashboard_stats.py +++ b/tests/api/admin/test_dashboard_stats.py @@ -95,7 +95,7 @@ def test_stats_patrons(admin_statistics_session: AdminStatisticsSessionFixture): assert 1 == patron_data.loans assert 1 == patron_data.holds - # These patrons are in a different library.. + # These patrons are in a different library. l2 = db.library() patron4 = db.patron(library=l2) pool.loan_to(patron4, end=utc_now() + timedelta(days=5)) @@ -156,7 +156,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture for inventory_data in [library_inventory, summary_inventory]: assert 0 == inventory_data.titles assert 0 == inventory_data.available_titles - assert 0 == inventory_data.self_hosted_titles assert 0 == inventory_data.open_access_titles assert 0 == inventory_data.licensed_titles assert 0 == inventory_data.unlimited_license_titles @@ -193,7 +192,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture for inventory_data in [library_inventory, summary_inventory]: assert 2 == inventory_data.titles assert 1 == inventory_data.available_titles - assert 0 == inventory_data.self_hosted_titles assert 0 == inventory_data.open_access_titles assert 2 == inventory_data.licensed_titles assert 0 == inventory_data.unlimited_license_titles @@ -215,7 +213,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture summary_inventory = response.inventory_summary assert 2 == library_inventory.titles assert 1 == library_inventory.available_titles - assert 0 == library_inventory.self_hosted_titles assert 0 == library_inventory.open_access_titles assert 2 == library_inventory.licensed_titles assert 0 == library_inventory.unlimited_license_titles @@ -225,7 +222,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture assert 3 == summary_inventory.titles assert 2 == summary_inventory.available_titles - assert 0 == summary_inventory.self_hosted_titles assert 0 == summary_inventory.open_access_titles assert 3 == summary_inventory.licensed_titles assert 0 == summary_inventory.unlimited_license_titles @@ -245,7 +241,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture for inventory_data in [library_inventory, summary_inventory]: assert 2 == inventory_data.titles assert 1 == inventory_data.available_titles - assert 0 == inventory_data.self_hosted_titles assert 0 == inventory_data.open_access_titles assert 2 == inventory_data.licensed_titles assert 0 == inventory_data.unlimited_license_titles @@ -265,7 +260,6 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu assert empty_inventory == InventoryStatistics( titles=0, available_titles=0, - self_hosted_titles=0, open_access_titles=0, licensed_titles=0, unlimited_license_titles=0, @@ -287,7 +281,6 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu assert new_metered_inventory == InventoryStatistics( titles=2, available_titles=2, - self_hosted_titles=0, open_access_titles=0, licensed_titles=2, unlimited_license_titles=0, @@ -301,6 +294,8 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu # Initially, there is no inventory. response = session.get_statistics() assert response.inventory_summary == empty_inventory + assert {} == response.inventory_by_medium + assert 0 == len(response.libraries) default_library = db.library("Default Library", "default") default_collection = db.collection(name="Default Collection") @@ -325,27 +320,48 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu expected_summary_inventory = expected_library_inventory.copy() response = session.get_statistics() - assert ( - expected_library_inventory - == response.libraries_by_key.get(default_library.short_name).inventory_summary - ) + library_stats_data = response.libraries_by_key.get(default_library.short_name) + assert 1 == len(response.collections) + assert 1 == len(response.inventory_by_medium) + assert 1 == len(library_stats_data.collection_ids) + assert 1 == len(library_stats_data.inventory_by_medium) assert expected_summary_inventory == response.inventory_summary + assert "Book" in response.inventory_by_medium + assert expected_summary_inventory == response.inventory_by_medium.get("Book") + assert expected_library_inventory == library_stats_data.inventory_summary + assert "Book" in library_stats_data.inventory_by_medium + assert expected_library_inventory == library_stats_data.inventory_by_medium.get( + "Book" + ) c2 = db.collection() c3 = db.collection() c3.libraries += [default_library] # c2 adds a 5/10 metered license title. - _, pool = db.edition( + edition, pool = db.edition( with_license_pool=True, with_open_access_download=False, data_source_name=DataSource.OVERDRIVE, collection=c2, ) + edition.medium = "Audio" pool.open_access = False pool.licenses_owned = 10 pool.licenses_available = 5 + # We currently have active BiblioBoard editions with no (null) medium, + # so let's add one of those to make sure we handle those. + edition, pool = db.edition( + with_license_pool=True, + with_open_access_download=False, + collection=c2, + ) + edition.medium = None + pool.open_access = False + pool.licenses_owned = 2 + pool.licenses_available = 0 + # c3 does not add a title, since no licenses owned. _, pool = db.edition( with_license_pool=True, @@ -379,7 +395,8 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu pool.licenses_owned = 5 pool.licenses_available = 5 - added_library_inventory = empty_inventory.copy( + c1_previous_book_inventory = expected_library_inventory + c1_added_book_inventory = empty_inventory.copy( update={ "titles": 1, "available_titles": 0, @@ -389,18 +406,52 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu "metered_licenses_available": 0, } ) - added_summary_inventory = empty_inventory.copy( + + c2_audio_inventory = empty_inventory.copy( update={ - "titles": 3, - "available_titles": 2, - "licensed_titles": 3, - "metered_license_titles": 3, - "metered_licenses_owned": 18, - "metered_licenses_available": 10, + "titles": 1, + "available_titles": 1, + "licensed_titles": 1, + "metered_license_titles": 1, + "metered_licenses_owned": 10, + "metered_licenses_available": 5, } ) - expected_library_inventory += added_library_inventory - expected_summary_inventory += added_summary_inventory + c2_book_inventory = empty_inventory.copy( + update={ + "titles": 1, + "available_titles": 1, + "licensed_titles": 1, + "metered_license_titles": 1, + "metered_licenses_owned": 5, + "metered_licenses_available": 5, + } + ) + c2_no_medium_inventory = empty_inventory.copy( + update={ + "titles": 1, + "available_titles": 0, + "licensed_titles": 1, + "metered_license_titles": 1, + "metered_licenses_owned": 2, + "metered_licenses_available": 0, + } + ) + + c3_book_inventory = empty_inventory.copy() + + # All collections are included in summaries, since our admin is a sysadmin. + expected_library_inventory = ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + ) + expected_summary_inventory = ( + c1_previous_book_inventory + + c1_added_book_inventory + + c3_book_inventory + + c2_audio_inventory + + c2_book_inventory + + c2_no_medium_inventory + ) response = session.get_statistics() library_stats_data = response.libraries_by_key.get(default_library.short_name) @@ -408,59 +459,135 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu library_collections_by_id = { id_: all_collections_by_id[id_] for id_ in library_stats_data.collection_ids } - assert expected_library_inventory == library_stats_data.inventory_summary + assert 3 == len(response.collections) + assert expected_summary_inventory == response.inventory_summary + assert 3 == len(response.inventory_by_medium) + assert "Audio" in response.inventory_by_medium + assert "Book" in response.inventory_by_medium + assert "None" in response.inventory_by_medium + assert c2_audio_inventory == response.inventory_by_medium.get("Audio") + assert c2_no_medium_inventory == response.inventory_by_medium.get("None") + assert ( + c1_previous_book_inventory + + c1_added_book_inventory + + c2_book_inventory + + c3_book_inventory + == response.inventory_by_medium.get("Book") + ) + assert expected_summary_inventory == ( + response.inventory_by_medium.get("Audio") + + response.inventory_by_medium.get("Book") + + response.inventory_by_medium.get("None") + ) + + assert expected_library_inventory == library_stats_data.inventory_summary assert 2 == len(library_stats_data.collection_ids) - assert 3 == len(response.collections) + assert 1 == len(library_stats_data.inventory_by_medium) + assert "Book" in library_stats_data.inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + == library_stats_data.inventory_by_medium.get("Book") + ) + assert expected_library_inventory == library_stats_data.inventory_by_medium.get( + "Book" + ) for collections in [library_collections_by_id, all_collections_by_id]: - default_inventory = collections[default_collection.id].inventory - c3_inventory = collections[c3.id].inventory - assert 1 == default_inventory.licensed_titles - assert 1 == default_inventory.open_access_titles - assert 3 == default_inventory.metered_licenses_owned - assert 0 == default_inventory.metered_licenses_available - - assert 0 == c3_inventory.licensed_titles - assert 0 == c3_inventory.open_access_titles - assert 0 == c3_inventory.metered_licenses_owned - assert 0 == c3_inventory.metered_licenses_available - - # assert None == library_collections_data.get(c2.name) - # c2_data = total_collections_data.get(c2.name) + default_stats = collections[default_collection.id] + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_stats.inventory + ) + + default_inventory_by_medium = default_stats.inventory_by_medium + assert "Audio" not in default_inventory_by_medium + assert "Book" in default_inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_inventory_by_medium["Book"] + ) + + c3_stats = collections[c3.id] + assert c3_book_inventory == c3_stats.inventory + + c3_inventory_by_medium = c3_stats.inventory_by_medium + assert "Book" not in c3_inventory_by_medium + assert "Audio" not in c3_inventory_by_medium + assert library_collections_by_id.get(c2.id) is None - c2_inventory = all_collections_by_id[c2.id].inventory - assert 2 == c2_inventory.licensed_titles - assert 0 == c2_inventory.open_access_titles - assert 15 == c2_inventory.metered_licenses_owned - assert 10 == c2_inventory.metered_licenses_available + + c2_stats = all_collections_by_id[c2.id] + assert ( + c2_audio_inventory + c2_book_inventory + c2_no_medium_inventory + == c2_stats.inventory + ) + + c2_inventory_by_medium = c2_stats.inventory_by_medium + assert "Book" in c2_inventory_by_medium + assert "Audio" in c2_inventory_by_medium + assert "None" in c2_inventory_by_medium + assert c2_audio_inventory == c2_inventory_by_medium["Audio"] + assert c2_book_inventory == c2_inventory_by_medium["Book"] + assert c2_no_medium_inventory == c2_inventory_by_medium["None"] admin.remove_role(AdminRole.SYSTEM_ADMIN) admin.add_role(AdminRole.LIBRARY_MANAGER, default_library) # c2 is no longer included in the totals since the admin user's # library is not associated with it. + expected_library_inventory = ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + ) + expected_summary_inventory = expected_library_inventory + response = session.get_statistics() library_stats_data = response.libraries_by_key.get(default_library.short_name) all_collections_by_id = {c.id: c for c in response.collections} library_collections_by_id = { id: all_collections_by_id[id] for id in library_stats_data.collection_ids } + assert 2 == len(response.collections) + + assert expected_summary_inventory == response.inventory_summary + assert 1 == len(response.inventory_by_medium) + assert "Book" in response.inventory_by_medium + assert expected_summary_inventory == response.inventory_by_medium.get("Book") + + assert expected_library_inventory == library_stats_data.inventory_summary + assert 2 == len(library_stats_data.collection_ids) + assert 1 == len(library_stats_data.inventory_by_medium) + assert "Book" in library_stats_data.inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + == library_stats_data.inventory_by_medium.get("Book") + ) + assert expected_library_inventory == library_stats_data.inventory_by_medium.get( + "Book" + ) + for collections in [library_collections_by_id, all_collections_by_id]: assert 2 == len(collections) assert collections.get(c2.id) is None - default_inventory = collections[default_collection.id].inventory - assert 1 == default_inventory.licensed_titles - assert 1 == default_inventory.open_access_titles - assert 3 == default_inventory.metered_licenses_owned - assert 0 == default_inventory.metered_licenses_available - - c3_inventory = collections[c3.id].inventory - assert 0 == c3_inventory.licensed_titles - assert 0 == c3_inventory.open_access_titles - assert 0 == c3_inventory.metered_licenses_owned - assert 0 == c3_inventory.metered_licenses_available + default_stats = collections[default_collection.id] + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_stats.inventory + ) + + default_inventory_by_medium = default_stats.inventory_by_medium + assert 1 == len(default_inventory_by_medium) + assert "Book" in default_inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_inventory_by_medium["Book"] + ) + + c3_stats = collections[c3.id] + assert c3_book_inventory == c3_stats.inventory + + assert 0 == len(c3_stats.inventory_by_medium) def test_stats_parent_collection_permissions( diff --git a/tests/api/admin/test_form_data.py b/tests/api/admin/test_form_data.py index 7241f4ca7..7cfb3a706 100644 --- a/tests/api/admin/test_form_data.py +++ b/tests/api/admin/test_form_data.py @@ -1,5 +1,3 @@ -from typing import List, Optional - from werkzeug.datastructures import ImmutableMultiDict from api.admin.form_data import ProcessFormData @@ -12,21 +10,21 @@ class MockSettings(BaseSettings): - field1: List[str] = FormField( + field1: list[str] = FormField( [], form=ConfigurationFormItem( label="Field 1", type=ConfigurationFormItemType.LIST, ), ) - field2: List[str] = FormField( + field2: list[str] = FormField( [], form=ConfigurationFormItem( label="Field 2", type=ConfigurationFormItemType.MENU, ), ) - field3: Optional[str] = FormField( + field3: str | None = FormField( None, form=ConfigurationFormItem( label="Field 3", diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 717cda2b4..0ae8ffde5 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -1,6 +1,7 @@ import logging +from collections.abc import Generator from pathlib import Path -from typing import Any, Generator, Optional +from typing import Any import flask import pytest @@ -11,7 +12,7 @@ from api.admin import routes from api.admin.controller import setup_admin_controllers from api.admin.problem_details import * -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.util.problem_detail import ProblemDetail, ProblemError from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.api_controller import ControllerFixture @@ -93,8 +94,8 @@ def __init__( self.original_api_app = self.api_routes.app self.resolver = self.original_app.url_map.bind("", "/") - self.controller: Optional[CirculationManagerController] = None - self.real_controller: Optional[CirculationManagerController] = None + self.controller: CirculationManagerController | None = None + self.real_controller: CirculationManagerController | None = None self.routes.app = app # type: ignore # Need to also mock the route app from /api/routes. diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 109f98c01..d24165c8e 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,3 +1,4 @@ +from freezegun.config import configure as fg_configure from pytest import register_assert_rewrite register_assert_rewrite("tests.fixtures.database") @@ -39,3 +40,8 @@ "tests.fixtures.tls_server", "tests.fixtures.vendor_id", ] + +# Make sure if we are using pyinstrument to profile tests, that +# freezegun doesn't interfere with it. +# See: https://github.com/spulec/freezegun#ignore-packages +fg_configure(extend_ignore_list=["pyinstrument"]) diff --git a/tests/finland/__init__.py b/tests/api/controller/__init__.py similarity index 100% rename from tests/finland/__init__.py rename to tests/api/controller/__init__.py diff --git a/tests/api/test_controller_analytics.py b/tests/api/controller/test_analytics.py similarity index 100% rename from tests/api/test_controller_analytics.py rename to tests/api/controller/test_analytics.py diff --git a/tests/api/test_controller_annotation.py b/tests/api/controller/test_annotation.py similarity index 99% rename from tests/api/test_controller_annotation.py rename to tests/api/controller/test_annotation.py index 413a80f4b..5a7b7b6c4 100644 --- a/tests/api/test_controller_annotation.py +++ b/tests/api/controller/test_annotation.py @@ -1,7 +1,6 @@ import datetime import json from time import mktime -from typing import Union from wsgiref.handlers import format_date_time import pytest @@ -139,7 +138,7 @@ def test_get_container_for_work(self, annotation_fixture: AnnotationFixture): assert expected_time == response.headers["Last-Modified"] def test_post_to_container(self, annotation_fixture: AnnotationFixture): - data: dict[str, Union[str, dict]] = dict() + data: dict[str, str | dict] = dict() data["@context"] = AnnotationWriter.JSONLD_CONTEXT data["type"] = "Annotation" data["motivation"] = Annotation.IDLING diff --git a/tests/api/test_controller_base.py b/tests/api/controller/test_base.py similarity index 99% rename from tests/api/test_controller_base.py rename to tests/api/controller/test_base.py index 3e27c146d..bafee80e2 100644 --- a/tests/api/test_controller_base.py +++ b/tests/api/controller/test_base.py @@ -86,7 +86,7 @@ def test_authenticated_patron_from_request( # No authorization header -> 401 error. with patch( - "api.base_controller.BaseCirculationManagerController.authorization_header", + "api.controller.base.BaseCirculationManagerController.authorization_header", lambda x: None, ): with circulation_fixture.request_context_with_library("/"): @@ -101,7 +101,7 @@ def remote_failure(self, header): raise RemoteInitiatedServerError("argh", "service") with patch( - "api.base_controller.BaseCirculationManagerController.authenticated_patron", + "api.controller.base.BaseCirculationManagerController.authenticated_patron", remote_failure, ): with circulation_fixture.request_context_with_library( @@ -118,7 +118,7 @@ def remote_failure(self, header): # Credentials provided but don't identify anyone in particular # -> 401 error. with patch( - "api.base_controller.BaseCirculationManagerController.authenticated_patron", + "api.controller.base.BaseCirculationManagerController.authenticated_patron", lambda self, x: None, ): with circulation_fixture.request_context_with_library( @@ -274,7 +274,7 @@ def test_load_licensepools(self, circulation_fixture: CirculationControllerFixtu library = circulation_fixture.library [c1] = library.collections c2 = circulation_fixture.db.collection() - library.collections.append(c2) + c2.libraries.append(library) # Here's a Collection not affiliated with any Library. c3 = circulation_fixture.db.collection() diff --git a/tests/api/test_controller_crawlfeed.py b/tests/api/controller/test_crawlfeed.py similarity index 96% rename from tests/api/test_controller_crawlfeed.py rename to tests/api/controller/test_crawlfeed.py index 03b7994b4..a868ac55e 100644 --- a/tests/api/test_controller_crawlfeed.py +++ b/tests/api/controller/test_crawlfeed.py @@ -242,7 +242,7 @@ def works(self, _db, facets, pagination, *args, **kwargs): # Good pagination data -> feed_class.page() is called. sort_key = ["sort", "pagination", "key"] - with circulation_fixture.app.test_request_context( + with circulation_fixture.request_context_with_library( "/?size=23&key=%s" % json.dumps(sort_key) ): response = circulation_fixture.manager.opds_feeds._crawlable_feed( @@ -288,7 +288,7 @@ def works(self, _db, facets, pagination, *args, **kwargs): # If a custom Annotator is passed in to _crawlable_feed, it's # propagated to the page() call. mock_annotator = object() - with circulation_fixture.app.test_request_context("/"): + with circulation_fixture.request_context_with_library("/"): response = circulation_fixture.manager.opds_feeds._crawlable_feed( annotator=mock_annotator, **in_kwargs ) @@ -306,3 +306,11 @@ def works(self, _db, facets, pagination, *args, **kwargs): # There is one entry with the expected title. [entry] = feed["entries"] assert entry["title"] == work.title + + # The feed has the expected facet groups. + facet_groups = { + l["facetgroup"] + for l in feed["feed"]["links"] + if l["rel"] == "http://opds-spec.org/facet" + } + assert facet_groups == {"Collection Name", "Distributor"} diff --git a/tests/api/test_controller_fixture.py b/tests/api/controller/test_fixture.py similarity index 100% rename from tests/api/test_controller_fixture.py rename to tests/api/controller/test_fixture.py diff --git a/tests/api/test_controller_index.py b/tests/api/controller/test_index.py similarity index 100% rename from tests/api/test_controller_index.py rename to tests/api/controller/test_index.py diff --git a/tests/api/test_controller_loan.py b/tests/api/controller/test_loan.py similarity index 99% rename from tests/api/test_controller_loan.py rename to tests/api/controller/test_loan.py index 7231409dd..110cab527 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/controller/test_loan.py @@ -1,7 +1,6 @@ import datetime import urllib.parse from decimal import Decimal -from typing import Optional from unittest.mock import MagicMock, patch import feedparser @@ -872,7 +871,7 @@ def test_fulfill_without_single_item_feed(self, loan_fixture: LoanFixture): authenticated = controller.authenticated_patron_from_request() loan_fixture.pool.loan_to(authenticated) with patch( - "api.controller.OPDSAcquisitionFeed.single_entry_loans_feed" + "api.controller.opds_feed.OPDSAcquisitionFeed.single_entry_loans_feed" ) as feed, patch.object(circulation, "fulfill") as fulfill: # Complex setup # The fulfillmentInfo should not be have response type @@ -900,7 +899,7 @@ def test_no_drm_fulfill(self, loan_fixture: LoanFixture): with_license_pool=True, data_source_name=DataSource.OVERDRIVE ) - pool_opt: Optional[LicensePool] = work.active_license_pool() + pool_opt: LicensePool | None = work.active_license_pool() assert pool_opt is not None pool: LicensePool = pool_opt pool.loan_to(patron) @@ -1442,19 +1441,18 @@ def test_loan_duration_settings_impact_on_loans_and_borrow_response( data_source_name=collection_data_source_name, ) + collection.libraries.append(loan_fixture.db.default_library()) if collection_default_loan_period: - library_id = loan_fixture.db.default_library().id - assert isinstance(library_id, int) + lib_config = collection.integration_configuration.for_library( + loan_fixture.db.default_library() + ) + assert lib_config is not None DatabaseTransactionFixture.set_settings( - collection.integration_configuration.for_library( - library_id, create=True - ), + lib_config, collection.loan_period_key(), collection_default_loan_period, ) - loan_fixture.db.default_library().collections.append(collection) - def create_work_and_return_license_pool_and_loan_info(**kwargs): loan_start = kwargs.pop("loan_start", utc_now()) loan_end = kwargs.pop("loan_end", None) diff --git a/tests/api/controller/test_marc.py b/tests/api/controller/test_marc.py new file mode 100644 index 000000000..d4665cfe8 --- /dev/null +++ b/tests/api/controller/test_marc.py @@ -0,0 +1,291 @@ +from __future__ import annotations + +import datetime +from unittest.mock import MagicMock + +import pytest +from flask import Response + +from api.controller.marc import MARCRecordController +from core.integration.goals import Goals +from core.marc import MARCExporter +from core.model import Collection, Library, MarcFile, create +from core.service.storage.s3 import S3Service +from core.util.datetime_helpers import utc_now +from tests.fixtures.database import DatabaseTransactionFixture + + +class MARCRecordControllerFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.mock_s3_service = MagicMock(spec=S3Service) + self.mock_s3_service.generate_url = lambda x: "http://s3.url/" + x + self.controller = MARCRecordController(self.mock_s3_service) + self.library = db.default_library() + self.collection = db.default_collection() + self.collection.export_marc_records = True + + # stub out the library function to return the default library, + # since we don't have a request context + self.controller.library = lambda: self.library + + def integration(self, library: Library | None = None): + library = library or self.library + return self.db.integration_configuration( + MARCExporter.__name__, + Goals.CATALOG_GOAL, + libraries=[library], + ) + + def file( + self, + library: Library | None = None, + collection: Collection | None = None, + key: str | None = None, + created: datetime.datetime | None = None, + since: datetime.datetime | None = None, + ): + key = key or self.db.fresh_str() + created = created or utc_now() + library = library or self.library + collection = collection or self.collection + + return create( + self.db.session, + MarcFile, + library=library, + collection=collection, + created=created, + since=since, + key=key, + ) + + def get_response_html(self, response: Response) -> str: + assert response.status_code == 200 + assert response.content_type == "text/html" + html = response.get_data(as_text=True) + assert ("Download MARC files for %s" % self.library.name) in html + return html + + +@pytest.fixture +def marc_record_controller_fixture( + db: DatabaseTransactionFixture, +) -> MARCRecordControllerFixture: + return MARCRecordControllerFixture(db) + + +class TestMARCRecordController: + def test_has_integration( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + # No integration is configured. + assert not marc_record_controller_fixture.controller.has_integration( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + + # An integration is configured, but not for this library. + other_library = marc_record_controller_fixture.db.library() + marc_record_controller_fixture.integration(library=other_library) + assert not marc_record_controller_fixture.controller.has_integration( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + + # An integration is configured for this library. + marc_record_controller_fixture.integration() + assert marc_record_controller_fixture.controller.has_integration( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + + def test_get_files_no_files( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + assert ( + marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + == {} + ) + + def test_get_files_one_collection( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + + # Only a single full file is given, the most recent one. Even + # though there are older full files, they are ignored. + marc_record_controller_fixture.file(created=now) + marc_record_controller_fixture.file(created=yesterday) + + # There are multiple delta files, and they are all returned. + marc_record_controller_fixture.file(created=now, since=yesterday) + marc_record_controller_fixture.file(created=last_week, since=yesterday) + + files = marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + + assert len(files) == 1 + assert files["Default Collection"].full is not None + assert files["Default Collection"].full.created == now + + assert len(files["Default Collection"].deltas) == 2 + + # The delta files are sorted by their created date, so the latest + # delta file is first. + [delta_now, delta_last_week] = files["Default Collection"].deltas + assert delta_now.created == now + assert delta_now.since == yesterday + assert delta_last_week.created == last_week + assert delta_last_week.since == yesterday + + def test_get_files_collection_removed_from_library( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.file(created=utc_now()) + files = marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + assert len(files) == 1 + + # The collection is removed from the library, so it's not returned. + marc_record_controller_fixture.collection.libraries = [] + + files = marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + assert len(files) == 0 + + def test_get_files_multiple_collections( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + db = marc_record_controller_fixture.db + now = utc_now() + last_week = now - datetime.timedelta(days=7) + + # Add a full file to the default collection. + collection_1 = marc_record_controller_fixture.collection + marc_record_controller_fixture.file(collection=collection_1, created=last_week) + + # Create a second collection, with a full file and a delta. + collection_2 = db.collection(name="Second Collection") + collection_2.export_marc_records = True + collection_2.libraries = [marc_record_controller_fixture.library] + marc_record_controller_fixture.file(collection=collection_2, created=now) + marc_record_controller_fixture.file( + collection=collection_2, created=now, since=last_week + ) + + # Create a third collection that doesn't export MARC records. + collection_3 = db.collection() + collection_3.export_marc_records = False + collection_3.libraries = [marc_record_controller_fixture.library] + marc_record_controller_fixture.file(collection=collection_3, created=now) + + # Create a fourth collection that doesn't belong to the library. + collection_4 = db.collection() + collection_4.export_marc_records = True + collection_4.libraries = [] + marc_record_controller_fixture.file(collection=collection_4, created=now) + + files = marc_record_controller_fixture.controller.get_files( + db.session, + marc_record_controller_fixture.library, + ) + + assert len(files) == 2 + + # The returned collections are sorted by name. + assert list(files.keys()) == [collection_1.name, collection_2.name] + + [collection_1_result, collection_2_result] = files.values() + + assert collection_1_result.full is not None + assert collection_1_result.full.created == last_week + assert len(collection_1_result.deltas) == 0 + + assert collection_2_result.full is not None + assert collection_2_result.full.created == now + assert len(collection_2_result.deltas) == 1 + + def test_download_page_with_full_and_delta( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + + marc_record_controller_fixture.integration() + marc_record_controller_fixture.file(key="full", created=now) + marc_record_controller_fixture.file(key="old_full", created=yesterday) + marc_record_controller_fixture.file(key="delta_1", created=now, since=yesterday) + marc_record_controller_fixture.file( + key="delta_2", created=yesterday, since=last_week + ) + + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + + assert ( + 'Full file - last updated %s' + % now.strftime("%B %-d, %Y") + in html + ) + assert '' not in html + assert "

Update-only files

" in html + assert ( + '
Updates from %s to %s' + % (yesterday.strftime("%B %-d, %Y"), now.strftime("%B %-d, %Y")) + in html + ) + assert ( + 'Updates from %s to %s' + % (last_week.strftime("%B %-d, %Y"), yesterday.strftime("%B %-d, %Y")) + in html + ) + + def test_download_page_with_exporter_but_no_collection( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.integration() + marc_record_controller_fixture.collection.export_marc_records = False + + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "No collections are configured to export MARC records" in html + + def test_download_page_with_exporter_but_no_files( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.integration() + + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "MARC files aren't ready" in html + + def test_download_page_no_exporter( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "No MARC exporter is currently configured" in html + + def test_download_page_no_storage_service( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.integration() + controller = marc_record_controller_fixture.controller + controller.storage_service = None + + response = controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "No storage service is currently configured" in html diff --git a/tests/api/test_controller_multilib.py b/tests/api/controller/test_multilib.py similarity index 76% rename from tests/api/test_controller_multilib.py rename to tests/api/controller/test_multilib.py index 60488f1c5..d3c4acbf8 100644 --- a/tests/api/test_controller_multilib.py +++ b/tests/api/controller/test_multilib.py @@ -1,4 +1,4 @@ -from core.model import Collection, ExternalIntegration, get_one_or_create +from core.model import Collection, ExternalIntegration from core.opds_import import OPDSAPI from tests.fixtures.api_controller import ( CirculationControllerFixture, @@ -16,19 +16,15 @@ def make_default_libraries(_db): return [controller_fixture.db.library() for x in range(2)] def make_default_collection(_db, library): - collection, ignore = get_one_or_create( - controller_fixture.db.session, - Collection, - name=f"{controller_fixture.db.fresh_str()} (for multi-library test)", - ) - collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) - integration = collection.create_integration_configuration( - ExternalIntegration.OPDS_IMPORT + collection, _ = Collection.by_name_and_protocol( + _db, + f"{controller_fixture.db.fresh_str()} (for multi-library test)", + ExternalIntegration.OPDS_IMPORT, ) settings = OPDSAPI.settings_class()( external_account_id="http://url.com", data_source="OPDS" ) - OPDSAPI.settings_update(integration, settings) + OPDSAPI.settings_update(collection.integration_configuration, settings) library.collections.append(collection) return collection diff --git a/tests/api/test_controller_odl_notify.py b/tests/api/controller/test_odl_notify.py similarity index 86% rename from tests/api/test_controller_odl_notify.py rename to tests/api/controller/test_odl_notify.py index 4f855c8f2..de510c1ce 100644 --- a/tests/api/test_controller_odl_notify.py +++ b/tests/api/controller/test_odl_notify.py @@ -7,7 +7,7 @@ from api.odl import ODLAPI from api.odl2 import ODL2API from api.problem_details import INVALID_LOAN_FOR_ODL_NOTIFICATION, NO_ACTIVE_LOAN -from core.model import Collection, get_one_or_create +from core.model import Collection from tests.fixtures.api_controller import ControllerFixture from tests.fixtures.database import DatabaseTransactionFixture @@ -18,27 +18,17 @@ def __init__(self, db: DatabaseTransactionFixture): self.library = self.db.default_library() """Create a mock ODL collection to use in tests.""" - self.collection, ignore = get_one_or_create( - self.db.session, - Collection, - name="Test ODL Collection", - create_method_kwargs=dict( - external_account_id="http://odl", - ), + self.collection, _ = Collection.by_name_and_protocol( + self.db.session, "Test ODL Collection", ODLAPI.label() ) - integration = self.collection.create_external_integration( - protocol=self.integration_protocol() - ) - config = self.collection.create_integration_configuration( - self.integration_protocol() - ) - config.settings_dict = { + self.collection.integration_configuration.settings_dict = { "username": "a", "password": "b", "url": "http://metadata", + "external_integration_id": "http://odl", Collection.DATA_SOURCE_NAME_SETTING: "Feedbooks", } - self.library.collections.append(self.collection) + self.collection.libraries.append(self.library) self.work = self.db.work(with_license_pool=True, collection=self.collection) def setup(self, available, concurrency, left=None, expires=None): diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/controller/test_opds_feed.py similarity index 99% rename from tests/api/test_controller_opdsfeed.py rename to tests/api/controller/test_opds_feed.py index e237c72e6..4ed4885b8 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/controller/test_opds_feed.py @@ -1,12 +1,12 @@ import json -from typing import Any, Dict +from typing import Any from unittest.mock import MagicMock from urllib.parse import quote_plus import feedparser from flask import url_for -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from api.lanes import HasSeriesFacets, JackpotFacets, JackpotWorkList from api.problem_details import REMOTE_INTEGRATION_FAILED from core.app_server import load_facets_from_request @@ -116,7 +116,7 @@ def test_feed( # But the rest of the feed looks good. links = feed["feed"]["links"] - by_rel: Dict[str, Any] = dict() + by_rel: dict[str, Any] = dict() # Put the links into a data structure based on their rel values. for i in links: diff --git a/tests/api/test_controller_patron_access_token.py b/tests/api/controller/test_patron_access_token.py similarity index 96% rename from tests/api/test_controller_patron_access_token.py rename to tests/api/controller/test_patron_access_token.py index ef4cca8d7..9c4a51dff 100644 --- a/tests/api/test_controller_patron_access_token.py +++ b/tests/api/controller/test_patron_access_token.py @@ -8,7 +8,7 @@ from tests.fixtures.database import DatabaseTransactionFixture if TYPE_CHECKING: - from api.controller import PatronAuthTokenController + from api.controller.patron_auth_token import PatronAuthTokenController class PatronAuthTokenControllerFixture(CirculationControllerFixture): diff --git a/tests/api/test_controller_playtime_entries.py b/tests/api/controller/test_playtime_entries.py similarity index 99% rename from tests/api/test_controller_playtime_entries.py rename to tests/api/controller/test_playtime_entries.py index 86a7f7b87..36ab25ae8 100644 --- a/tests/api/test_controller_playtime_entries.py +++ b/tests/api/controller/test_playtime_entries.py @@ -241,7 +241,7 @@ def test_api_validation(self, circulation_fixture: CirculationControllerFixture) assert response.detail == "Collection was not found in the Library." # Identifier not part of collection - library.collections.append(collection) + collection.libraries.append(library) response = circulation_fixture.manager.playtime_entries.track_playtimes( collection.id, identifier.type, identifier.identifier ) diff --git a/tests/api/test_controller_profile.py b/tests/api/controller/test_profile.py similarity index 93% rename from tests/api/test_controller_profile.py rename to tests/api/controller/test_profile.py index fd9119bea..7830dcbb3 100644 --- a/tests/api/test_controller_profile.py +++ b/tests/api/controller/test_profile.py @@ -82,16 +82,7 @@ def test_put(self, profile_fixture: ProfileFixture): ) assert request_patron.synchronize_annotations is None - # This means we can't create annotations for them. - pytest.raises( - ValueError, - Annotation.get_one_or_create, - profile_fixture.db.session, - patron=request_patron, - identifier=identifier, - ) - - # But by sending a PUT request... + # By sending a PUT request... profile_fixture.manager.profiles.protocol() # ...we can change synchronize_annotations to True. diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/controller/test_scopedsession.py similarity index 95% rename from tests/api/test_controller_scopedsession.py rename to tests/api/controller/test_scopedsession.py index f5addd55f..66b7ef50b 100644 --- a/tests/api/test_controller_scopedsession.py +++ b/tests/api/controller/test_scopedsession.py @@ -55,19 +55,15 @@ def make_default_collection(self, session: Session, library): """We need to create a test collection that uses the scoped session. """ - collection, ignore = create( + collection, _ = Collection.by_name_and_protocol( session, - Collection, - name=self.fresh_id() + " (collection for scoped session)", - ) - collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) - integration = collection.create_integration_configuration( - ExternalIntegration.OPDS_IMPORT + self.fresh_id() + " (collection for scoped session)", + ExternalIntegration.OPDS_IMPORT, ) settings = OPDSAPI.settings_class()( external_account_id="http://url.com", data_source="OPDS" ) - OPDSAPI.settings_update(integration, settings) + OPDSAPI.settings_update(collection.integration_configuration, settings) library.collections.append(collection) return collection diff --git a/tests/api/test_controller_staticfile.py b/tests/api/controller/test_staticfile.py similarity index 100% rename from tests/api/test_controller_staticfile.py rename to tests/api/controller/test_staticfile.py diff --git a/tests/api/test_controller_urn_lookup.py b/tests/api/controller/test_urn_lookup.py similarity index 100% rename from tests/api/test_controller_urn_lookup.py rename to tests/api/controller/test_urn_lookup.py diff --git a/tests/api/test_controller_work.py b/tests/api/controller/test_work.py similarity index 99% rename from tests/api/test_controller_work.py rename to tests/api/controller/test_work.py index 29f9dc289..70796b177 100644 --- a/tests/api/test_controller_work.py +++ b/tests/api/controller/test_work.py @@ -1,7 +1,7 @@ import datetime import json import urllib.parse -from typing import Any, Dict +from typing import Any from unittest.mock import MagicMock import feedparser @@ -327,7 +327,7 @@ def test_permalink_does_not_return_fulfillment_links_for_authenticated_patrons_w patron2_loan, _ = pool.loan_to(patron_2) # We want to make sure that the feed doesn't contain any fulfillment links. - active_loans_by_work: Dict[Any, Any] = {} + active_loans_by_work: dict[Any, Any] = {} annotator = LibraryAnnotator( None, None, diff --git a/tests/api/discovery/test_opds_registration.py b/tests/api/discovery/test_opds_registration.py index 94ee5ecda..60e67f23a 100644 --- a/tests/api/discovery/test_opds_registration.py +++ b/tests/api/discovery/test_opds_registration.py @@ -1,9 +1,10 @@ import base64 import json import os +from collections.abc import Callable from dataclasses import dataclass from functools import partial -from typing import Any, Callable, List, Optional +from typing import Any from unittest.mock import MagicMock import pytest @@ -56,7 +57,7 @@ def __init__( ) def create_registration( - self, library: Optional[Library] = None + self, library: Library | None = None ) -> DiscoveryServiceRegistration: obj, _ = create( self.db.session, @@ -719,7 +720,7 @@ class Processed: url_for: Callable[..., str] class Mock(LibraryRegistrationScript): - processed: List[Processed] = [] + processed: list[Processed] = [] def process_library( # type: ignore[override] self, diff --git a/tests/api/feed/fixtures.py b/tests/api/feed/fixtures.py index 002d806ba..03990a3fe 100644 --- a/tests/api/feed/fixtures.py +++ b/tests/api/feed/fixtures.py @@ -1,7 +1,8 @@ import urllib +from collections.abc import Callable from dataclasses import dataclass from functools import partial -from typing import Any, Callable +from typing import Any from unittest.mock import patch import pytest diff --git a/tests/api/feed/test_annotators.py b/tests/api/feed/test_annotators.py index c09d8e90c..3e000914c 100644 --- a/tests/api/feed/test_annotators.py +++ b/tests/api/feed/test_annotators.py @@ -197,7 +197,7 @@ def test_detailed_author(self, db: DatabaseTransactionFixture): work.presentation_edition.add_contributor(c, Contributor.PRIMARY_AUTHOR_ROLE) [same_tag] = VerboseAnnotator.authors(work.presentation_edition)["authors"] - assert same_tag.dict() == author.dict() + assert same_tag.asdict() == author.asdict() def test_duplicate_author_names_are_ignored(self, db: DatabaseTransactionFixture): session = db.session diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 5eec64126..f9b1dff34 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -1,6 +1,5 @@ import datetime from collections import defaultdict -from typing import List from unittest.mock import create_autospec, patch import dateutil @@ -338,7 +337,7 @@ def test_adobe_id_tags_when_vendor_id_configured( # object that renders to the same data. same_tag = annotator_fixture.annotator.adobe_id_tags(patron_identifier) assert same_tag is not element - assert same_tag["drm_licensor"].dict() == element["drm_licensor"].dict() + assert same_tag["drm_licensor"].asdict() == element["drm_licensor"].asdict() # If the Adobe Vendor ID configuration is present but # incomplete, adobe_id_tags does nothing. @@ -712,7 +711,7 @@ def assert_link_on_entry( def get_link_by_rel(rel): if isinstance(entry, WorkEntry): links = entry.computed.other_links + entry.computed.acquisition_links - elif isinstance(entry, List): + elif isinstance(entry, list): links = [e.link for e in entry] else: links = [entry.link] @@ -1427,9 +1426,9 @@ def test_drm_device_registration_feed_tags( # If we remove that attribute, the feed-level tag is the same as the # generic tag. - assert feed_tag["drm_licensor"].dict() != generic_tag["drm_licensor"].dict() + assert feed_tag["drm_licensor"].asdict() != generic_tag["drm_licensor"].asdict() delattr(feed_tag["drm_licensor"], "scheme") - assert feed_tag["drm_licensor"].dict() == generic_tag["drm_licensor"].dict() + assert feed_tag["drm_licensor"].asdict() == generic_tag["drm_licensor"].asdict() def test_borrow_link_raises_unfulfillable_work( self, annotator_fixture: LibraryAnnotatorFixture diff --git a/tests/api/feed/test_loan_and_hold_annotator.py b/tests/api/feed/test_loan_and_hold_annotator.py index 79df7ed50..1af8c79ce 100644 --- a/tests/api/feed/test_loan_and_hold_annotator.py +++ b/tests/api/feed/test_loan_and_hold_annotator.py @@ -211,6 +211,7 @@ def test_annotate_work_entry(self, db: DatabaseTransactionFixture): protocol=ExternalIntegration.OPDS_FOR_DISTRIBUTORS ) work = db.work(with_license_pool=True, collection=opds_for_distributors) + work.active_license_pool().should_track_playtime = True edition = work.presentation_edition edition.medium = EditionConstants.AUDIO_MEDIUM edition.primary_identifier = identifier diff --git a/tests/api/feed/test_opds2_serializer.py b/tests/api/feed/test_opds2_serializer.py index 961926802..ee2a73f60 100644 --- a/tests/api/feed/test_opds2_serializer.py +++ b/tests/api/feed/test_opds2_serializer.py @@ -146,6 +146,11 @@ def test_serialize_work_entry(self): assert metadata["narrator"] == dict(name="narrator2") def test__serialize_acquisition_link(self): + drm_licensor = FeedEntryType() + drm_licensor.add_attributes( + {"vendor": "vendor_name", "clientToken": FeedEntryType(text="token_value")} + ) + serializer = OPDS2Serializer() acquisition = Acquisition( href="http://acquisition", @@ -164,6 +169,7 @@ def test__serialize_acquisition_link(self): ], ), ], + drm_licensor=drm_licensor, ) result = serializer._serialize_acquisition_link(acquisition) @@ -184,6 +190,7 @@ def test__serialize_acquisition_link(self): } ], lcp_hashed_passphrase="LCPPassphrase", + licensor={"clientToken": "token_value", "vendor": "vendor_name"}, ) # Test availability states diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py index 0d4222aa3..a8231bb82 100644 --- a/tests/api/feed/test_opds_acquisition_feed.py +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -1,7 +1,8 @@ import datetime import logging from collections import defaultdict -from typing import Any, Callable, Generator, List, Type +from collections.abc import Callable, Generator +from typing import Any from unittest.mock import MagicMock, patch import pytest @@ -986,10 +987,10 @@ class TestEntrypointLinkInsertionFixture: db: DatabaseTransactionFixture mock: Any no_eps: WorkList - entrypoints: List[MediumEntryPoint] + entrypoints: list[MediumEntryPoint] wl: WorkList lane: Lane - annotator: Type[MockAnnotator] + annotator: type[MockAnnotator] old_add_entrypoint_links: Callable diff --git a/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds b/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds index 5c3269795..ee945e193 100644 --- a/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds +++ b/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds @@ -31,6 +31,26 @@ History remembers Guinevere’s sin, but it was Arthur who transgressed first.Fo 2016-01-01T00:00:00Z
+Camelot's Queen (Volume 2) Audiobook + + + + + +Nicole Evelina + +urn:uuid:04377e87-ab69-41c8-a2a4-812d55dc0953 + +History remembers Guinevere’s sin, but it was Arthur who transgressed first.Forced into a marriage she neither anticipated nor desired, Guinevere finds herself High Queen, ruling and fighting alongside Arthur as they try to subdue the Saxons, Irish and Picts who threaten Britain from every direction. Though her heart still longs for her lost love, Guinevere slowly grows to care for her husband as they join together to defeat their enemies. Meanwhile, within the walls of Camelot their closest allies plot against them. One schemes to make Guinevere his own, another seeks revenge for past transgressions, while a third fixes her eyes on the throne. When the unthinkable happens and Guinevere is feared dead, Arthur installs a new woman in her place, one who will poison his affections toward her, threatening Guinevere’s fragile sanity and eventually driving her into the arms of her champion. Amid this tension a new challenge arises for the king and queen of Camelot: finding the Holy Grail, a sacred relic that promises lasting unity. But peace, as they will soon learn, can be just as dangerous as war. As the court begins to turn on itself, it becomes clear that the quest that was to be Arthur’s lasting legacy may end in the burning fires of condemnation.This highly anticipated sequel to Daughter of Destiny proves there is much more to Guinevere’s story than her marriage and an affair. See the legend you think you know through her eyes and live the adventure of Camelot’s golden days yourself – but be prepared to suffer its downfall as well. + +Copyright held by content provider +Lawson Gartner Pubishing +audio/mpeg +04377e87-ab69-41c8-a2a4-812d55dc0953 +en +2016-01-01T00:00:00Z + + Southern Spirits (Volume 1) diff --git a/tests/api/finland/__init__.py b/tests/api/finland/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/finland/test_ekirjasto.py b/tests/api/finland/test_ekirjasto.py similarity index 99% rename from tests/finland/test_ekirjasto.py rename to tests/api/finland/test_ekirjasto.py index 27ca1e9c0..8c981b58c 100644 --- a/tests/finland/test_ekirjasto.py +++ b/tests/api/finland/test_ekirjasto.py @@ -1,8 +1,8 @@ import datetime import uuid from base64 import b64encode +from collections.abc import Callable from functools import partial -from typing import Callable import jwt import pytest @@ -687,7 +687,7 @@ def test_authenticated_patron_delegate_token_expired( assert patron is None - def test_authenticated_patron_ekirjasto_token_invald( + def test_authenticated_patron_ekirjasto_token_invalid( self, create_provider: Callable[..., MockEkirjastoAuthenticationAPI], controller_fixture: ControllerFixture, diff --git a/tests/finland/test_loan_excel_export.py b/tests/api/finland/test_loan_excel_export.py similarity index 100% rename from tests/finland/test_loan_excel_export.py rename to tests/api/finland/test_loan_excel_export.py diff --git a/tests/finland/test_opensearch_analytics_provider.py b/tests/api/finland/test_opensearch_analytics_provider.py similarity index 74% rename from tests/finland/test_opensearch_analytics_provider.py rename to tests/api/finland/test_opensearch_analytics_provider.py index 014fcae2d..a1034182f 100644 --- a/tests/finland/test_opensearch_analytics_provider.py +++ b/tests/api/finland/test_opensearch_analytics_provider.py @@ -1,14 +1,17 @@ +from unittest.mock import MagicMock, patch + from api.opensearch_analytics_provider import OpenSearchAnalyticsProvider from core.analytics import Analytics from core.local_analytics_provider import LocalAnalyticsProvider # The test set is based on core/test_analytics.py -MOCK_PROTOCOL = "../core/mock_analytics_provider" +MOCK_PROTOCOL = "../../core/mock_analytics_provider" class TestOpenSearchAnalytics: - def test_init_opensource_analytics(self): + @patch("api.opensearch_analytics_provider.OpenSearch") + def test_init_opensource_analytics(self, mock_opensearch=MagicMock()): analytics = Analytics( opensearch_analytics_enabled=True, opensearch_analytics_index_prefix="circulation-events", diff --git a/tests/finland/test_opensearch_analytics_search.py b/tests/api/finland/test_opensearch_analytics_search.py similarity index 100% rename from tests/finland/test_opensearch_analytics_search.py rename to tests/api/finland/test_opensearch_analytics_search.py diff --git a/tests/api/mockapi/axis.py b/tests/api/mockapi/axis.py index 0b8456fe5..39adbbe62 100644 --- a/tests/api/mockapi/axis.py +++ b/tests/api/mockapi/axis.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session from api.axis import Axis360API -from core.model import Library, get_one_or_create +from core.model import Library from core.model.collection import Collection from core.model.configuration import ExternalIntegration from core.util.http import HTTP @@ -13,27 +13,17 @@ def mock_collection( cls, _db: Session, library: Library, name: str = "Test Axis 360 Collection" ) -> Collection: """Create a mock Axis 360 collection for use in tests.""" - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict( - external_account_id="c", - ), + collection, _ = Collection.by_name_and_protocol( + _db, name, ExternalIntegration.AXIS_360 ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.AXIS_360 - ) - config = collection.create_integration_configuration( - ExternalIntegration.AXIS_360 - ) - config.settings_dict = { + collection.integration_configuration.settings_dict = { "username": "a", "password": "b", "url": "http://axis.test/", + "external_account_id": "c", } - config.for_library(library.id, create=True) - library.collections.append(collection) + if library not in collection.libraries: + collection.libraries.append(library) return collection def __init__(self, _db, collection, with_token=True, **kwargs): diff --git a/tests/api/mockapi/bibliotheca.py b/tests/api/mockapi/bibliotheca.py index 36486512d..bbeb73472 100644 --- a/tests/api/mockapi/bibliotheca.py +++ b/tests/api/mockapi/bibliotheca.py @@ -3,7 +3,7 @@ from sqlalchemy.orm import Session from api.bibliotheca import BibliothecaAPI -from core.model import Library, get_one_or_create +from core.model import Library from core.model.collection import Collection from core.model.configuration import ExternalIntegration from core.util.http import HTTP @@ -16,26 +16,16 @@ def mock_collection( self, _db: Session, library: Library, name: str = "Test Bibliotheca Collection" ) -> Collection: """Create a mock Bibliotheca collection for use in tests.""" - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict( - external_account_id="c", - ), + collection, _ = Collection.by_name_and_protocol( + _db, name=name, protocol=ExternalIntegration.BIBLIOTHECA ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.BIBLIOTHECA - ) - config = collection.create_integration_configuration( - ExternalIntegration.BIBLIOTHECA - ) - config.settings_dict = { + collection.integration_configuration.settings_dict = { "username": "a", "password": "b", + "external_account_id": "c", } - config.for_library(library.id, create=True) - library.collections.append(collection) + if library not in collection.libraries: + collection.libraries.append(library) return collection def __init__(self, _db, collection, *args, **kwargs): diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index a6181c421..eb693495d 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -1,6 +1,5 @@ from abc import ABC from collections import defaultdict -from typing import Type from sqlalchemy.orm import Session @@ -11,7 +10,7 @@ LoanInfo, PatronActivityCirculationAPI, ) -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from core.external_search import ExternalSearchIndex from core.integration.settings import BaseSettings from core.model import DataSource, Hold, Loan, get_one_or_create @@ -29,11 +28,11 @@ def description(cls) -> str: return "" @classmethod - def settings_class(cls) -> Type[BaseSettings]: + def settings_class(cls) -> type[BaseSettings]: return BaseSettings @classmethod - def library_settings_class(cls) -> Type[BaseSettings]: + def library_settings_class(cls) -> type[BaseSettings]: return BaseSettings diff --git a/tests/api/mockapi/enki.py b/tests/api/mockapi/enki.py index e73477677..6eed0ed85 100644 --- a/tests/api/mockapi/enki.py +++ b/tests/api/mockapi/enki.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import Any from sqlalchemy.orm import Session @@ -12,10 +12,10 @@ class MockEnkiAPI(EnkiAPI): def __init__( - self, _db: Session, library: Library, collection: Optional[Collection] = None + self, _db: Session, library: Library, collection: Collection | None = None ) -> None: - self.responses: List[MockRequestsResponse] = [] - self.requests: List[List[Any]] = [] + self.responses: list[MockRequestsResponse] = [] + self.requests: list[list[Any]] = [] if not collection: collection, ignore = Collection.by_name_and_protocol( @@ -24,14 +24,14 @@ def __init__( assert collection is not None collection.protocol = EnkiAPI.ENKI if collection not in library.collections: - library.collections.append(collection) + collection.libraries.append(library) # Set the "Enki library ID" variable between the default library # and this Enki collection. - assert library.id is not None + library_config = collection.integration_configuration.for_library(library) + assert library_config is not None DatabaseTransactionFixture.set_settings( - collection.integration_configuration.for_library(library.id, create=True), - **{self.ENKI_LIBRARY_ID_KEY: "c"} + library_config, **{self.ENKI_LIBRARY_ID_KEY: "c"} ) _db.commit() @@ -40,7 +40,7 @@ def __init__( def queue_response(self, status_code, headers={}, content=None): self.responses.insert(0, MockRequestsResponse(status_code, headers, content)) - def _request(self, method, url, headers, data, params, **kwargs): + def _request(self, url, method, headers, data, params, **kwargs): """Override EnkiAPI._request to pull responses from a queue instead of making real HTTP requests """ diff --git a/tests/api/mockapi/opds_for_distributors.py b/tests/api/mockapi/opds_for_distributors.py index 038bb5de0..d27417a17 100644 --- a/tests/api/mockapi/opds_for_distributors.py +++ b/tests/api/mockapi/opds_for_distributors.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session from api.opds_for_distributors import OPDSForDistributorsAPI -from core.model import Library, get_one_or_create +from core.model import Library from core.model.collection import Collection from core.util.http import HTTP from tests.core.mock import MockRequestsResponse @@ -20,25 +20,17 @@ def mock_collection( :param _db: Database session. :param name: A name for the collection. """ - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict( - external_account_id="http://opds", - ), + collection, _ = Collection.by_name_and_protocol( + _db, name=name, protocol=OPDSForDistributorsAPI.label() ) - integration = collection.create_external_integration( - protocol=OPDSForDistributorsAPI.label() + collection.integration_configuration.settings_dict = dict( + username="a", + password="b", + data_source="data_source", + external_account_id="http://opds", ) - config = collection.create_integration_configuration( - OPDSForDistributorsAPI.label() - ) - config.settings_dict = dict( - username="a", password="b", data_source="data_source" - ) - config.for_library(library.id, create=True) - library.collections.append(collection) + if library not in collection.libraries: + collection.libraries.append(library) return collection def __init__(self, _db, collection, *args, **kwargs): diff --git a/tests/api/mockapi/overdrive.py b/tests/api/mockapi/overdrive.py index e96c927ee..b5bf23fff 100644 --- a/tests/api/mockapi/overdrive.py +++ b/tests/api/mockapi/overdrive.py @@ -2,13 +2,12 @@ from sqlalchemy.orm import Session -from api.overdrive import OverdriveAPI, OverdriveConstants -from core.model import Library, get_one_or_create +from api.overdrive import OverdriveAPI, OverdriveLibrarySettings, OverdriveSettings +from core.model import Library from core.model.collection import Collection from core.model.configuration import ExternalIntegration from core.util.http import HTTP from tests.core.mock import MockRequestsResponse -from tests.fixtures.database import DatabaseTransactionFixture class MockOverdriveResponse: @@ -51,28 +50,24 @@ def mock_collection( ils_name: str = "e", ): """Create a mock Overdrive collection for use in tests.""" - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict(external_account_id=library_id), + collection, _ = Collection.by_name_and_protocol( + _db, name=name, protocol=ExternalIntegration.OVERDRIVE ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.OVERDRIVE + settings = OverdriveSettings( + external_account_id=library_id, + overdrive_website_id=website_id, + overdrive_client_key=client_key, + overdrive_client_secret=client_secret, ) - config = collection.create_integration_configuration( - ExternalIntegration.OVERDRIVE + OverdriveAPI.settings_update(collection.integration_configuration, settings) + if library not in collection.libraries: + collection.libraries.append(library) + library_settings = OverdriveLibrarySettings( + ils_name=ils_name, ) - config.settings_dict = { - OverdriveConstants.OVERDRIVE_CLIENT_KEY: client_key, - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: client_secret, - OverdriveConstants.OVERDRIVE_WEBSITE_ID: website_id, - } - library.collections.append(collection) - db = DatabaseTransactionFixture - assert library.id is not None - db.set_settings(config.for_library(library.id, create=True), ils_name=ils_name) - _db.refresh(config) + library_config = collection.integration_configuration.for_library(library.id) + assert library_config is not None + OverdriveAPI.library_settings_update(library_config, library_settings) return collection def queue_collection_token(self): diff --git a/tests/api/saml/configuration/test_model.py b/tests/api/saml/configuration/test_model.py index 82c090115..be8797e95 100644 --- a/tests/api/saml/configuration/test_model.py +++ b/tests/api/saml/configuration/test_model.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from datetime import datetime -from typing import Callable from unittest.mock import MagicMock, call, create_autospec import pytest diff --git a/tests/api/saml/conftest.py b/tests/api/saml/conftest.py index 7a93fad70..4d99e2d2b 100644 --- a/tests/api/saml/conftest.py +++ b/tests/api/saml/conftest.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Callable from functools import partial -from typing import TYPE_CHECKING, Callable, List, Optional +from typing import TYPE_CHECKING from unittest.mock import MagicMock import pytest @@ -57,8 +58,8 @@ def create_mock_onelogin_configuration( ) -> Callable[..., SAMLOneLoginConfiguration]: def _create_mock( service_provider: SAMLServiceProviderMetadata, - identity_providers: List[SAMLIdentityProviderMetadata], - configuration: Optional[SAMLWebSSOAuthSettings] = None, + identity_providers: list[SAMLIdentityProviderMetadata], + configuration: SAMLWebSSOAuthSettings | None = None, ): if configuration is None: configuration = create_saml_configuration() diff --git a/tests/api/saml/metadata/federations/test_validator.py b/tests/api/saml/metadata/federations/test_validator.py index 30cac7c42..006cebcad 100644 --- a/tests/api/saml/metadata/federations/test_validator.py +++ b/tests/api/saml/metadata/federations/test_validator.py @@ -1,6 +1,5 @@ import datetime import os -from typing import Optional, Type, Union import pytest from freezegun import freeze_time @@ -136,8 +135,8 @@ def test_validate( self, _, current_time: datetime.datetime, - metadata: Union[str, bytes], - expected_exception: Optional[Type[Exception]], + metadata: str | bytes, + expected_exception: type[Exception] | None, ): # Arrange validator = SAMLFederatedMetadataExpirationValidator() diff --git a/tests/api/saml/metadata/test_parser.py b/tests/api/saml/metadata/test_parser.py index 419072624..487387c1a 100644 --- a/tests/api/saml/metadata/test_parser.py +++ b/tests/api/saml/metadata/test_parser.py @@ -1,4 +1,3 @@ -from typing import Dict, Union from unittest.mock import MagicMock, create_autospec import pytest @@ -38,7 +37,7 @@ class TestSAMLMetadataParser: ], ) def test_parse_raises_exception_when_xml_metadata_has_incorrect_format( - self, _, incorrect_xml: Union[str, bytes] + self, _, incorrect_xml: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -63,7 +62,7 @@ def test_parse_raises_exception_when_xml_metadata_has_incorrect_format( def test_parse_raises_exception_when_idp_metadata_does_not_contain_sso_service( self, _, - incorrect_xml_with_one_idp_metadata_without_sso_service: Union[str, bytes], + incorrect_xml_with_one_idp_metadata_without_sso_service: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -90,9 +89,9 @@ def test_parse_raises_exception_when_idp_metadata_does_not_contain_sso_service( def test_parse_raises_exception_when_idp_metadata_contains_sso_service_with_wrong_binding( self, _, - incorrect_xml_with_one_idp_metadata_with_sso_service_with_wrong_binding: Union[ - str, bytes - ], + incorrect_xml_with_one_idp_metadata_with_sso_service_with_wrong_binding: ( + str | bytes + ), ): # Arrange metadata_parser = SAMLMetadataParser() @@ -119,7 +118,7 @@ def test_parse_raises_exception_when_idp_metadata_contains_sso_service_with_wron def test_parse_does_not_raise_exception_when_xml_metadata_does_not_have_display_names( self, _, - correct_xml_with_one_idp_metadata_without_display_names: Union[str, bytes], + correct_xml_with_one_idp_metadata_without_display_names: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -167,7 +166,7 @@ def test_parse_does_not_raise_exception_when_xml_metadata_does_not_have_display_ ], ) def test_parse_correctly_parses_one_idp_metadata( - self, _, correct_xml_with_idp_1: Union[str, bytes] + self, _, correct_xml_with_idp_1: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -269,7 +268,7 @@ def test_parse_correctly_parses_one_idp_metadata( ], ) def test_parse_correctly_parses_idp_metadata_without_name_id_format( - self, _, correct_xml_with_idp_1: Union[str, bytes] + self, _, correct_xml_with_idp_1: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -376,7 +375,7 @@ def test_parse_correctly_parses_idp_metadata_without_name_id_format( def test_parse_correctly_parses_idp_metadata_with_one_certificate( self, _, - correct_xml_with_one_idp_metadata_with_one_certificate: Union[str, bytes], + correct_xml_with_one_idp_metadata_with_one_certificate: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -483,7 +482,7 @@ def test_parse_correctly_parses_idp_metadata_with_one_certificate( ], ) def test_parse_correctly_parses_metadata_with_multiple_descriptors( - self, _, correct_xml_with_multiple_idps: Union[str, bytes] + self, _, correct_xml_with_multiple_idps: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -631,7 +630,7 @@ def test_parse_correctly_parses_metadata_with_multiple_descriptors( def test_parse_raises_exception_when_sp_metadata_does_not_contain_acs_service( self, _, - incorrect_xml_with_one_sp_metadata_without_acs_service: Union[str, bytes], + incorrect_xml_with_one_sp_metadata_without_acs_service: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -653,7 +652,7 @@ def test_parse_raises_exception_when_sp_metadata_does_not_contain_acs_service( ], ) def test_parse_correctly_parses_one_sp_metadata( - self, _, correct_xml_with_one_sp: Union[str, bytes] + self, _, correct_xml_with_one_sp: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -888,7 +887,7 @@ def test_parse( name_id_nq: str, name_id_spnq: str, name_id: str, - attributes: Dict[str, Dict], + attributes: dict[str, dict], expected_result: SAMLSubject, ): # Arrange diff --git a/tests/api/saml/test_auth.py b/tests/api/saml/test_auth.py index 25a71779e..305d5598d 100644 --- a/tests/api/saml/test_auth.py +++ b/tests/api/saml/test_auth.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from copy import copy -from typing import Callable from unittest.mock import MagicMock, create_autospec, patch from urllib.parse import parse_qs, urlsplit diff --git a/tests/api/saml/test_provider.py b/tests/api/saml/test_provider.py index 9eb814185..01328f4e1 100644 --- a/tests/api/saml/test_provider.py +++ b/tests/api/saml/test_provider.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Callable +from collections.abc import Callable from unittest.mock import MagicMock, create_autospec, patch import pytest diff --git a/tests/api/sip/test_authentication_provider.py b/tests/api/sip/test_authentication_provider.py index 717308933..6d70e0d8d 100644 --- a/tests/api/sip/test_authentication_provider.py +++ b/tests/api/sip/test_authentication_provider.py @@ -1,18 +1,20 @@ import json +from collections.abc import Callable from datetime import datetime from decimal import Decimal from functools import partial -from typing import Callable, cast +from typing import cast import pytest from api.authentication.base import PatronData from api.authentication.basic import BasicAuthProviderLibrarySettings, Keyboards +from api.problem_details import INVALID_CREDENTIALS from api.sip import SIP2AuthenticationProvider, SIP2LibrarySettings, SIP2Settings from api.sip.client import Constants, Sip2Encoding, SIPClient from api.sip.dialect import Dialect from core.config import CannotLoadConfiguration -from core.util.http import RemoteIntegrationException +from core.util.problem_detail import ProblemDetail from tests.fixtures.database import DatabaseTransactionFixture @@ -400,7 +402,7 @@ def test_encoding( assert patrondata.external_type is None assert PatronData.NO_VALUE == patrondata.block_reason - def test_ioerror_during_connect_becomes_remoteintegrationexception( + def test_ioerror_during_connect_becomes_problemdetail( self, create_provider: Callable[..., SIP2AuthenticationProvider], create_settings: Callable[..., SIP2Settings], @@ -418,20 +420,26 @@ def connect(self): ) provider = create_provider(client=CannotConnect, settings=settings) - with pytest.raises(RemoteIntegrationException) as excinfo: - provider.remote_authenticate( - "username", - "password", - ) - assert "Error accessing unknown server: Doom!" in str(excinfo.value) + response = provider.remote_authenticate( + "username", + "password", + ) - def test_ioerror_during_send_becomes_remoteintegrationexception( + assert isinstance(response, ProblemDetail) + assert response.status_code == INVALID_CREDENTIALS.status_code + assert response.uri == INVALID_CREDENTIALS.uri + assert ( + response.detail + == "Error contacting authentication server (unknown server). Please try again later." + ) + + def test_ioerror_during_send_becomes_problemdetail( self, create_provider: Callable[..., SIP2AuthenticationProvider], create_settings: Callable[..., SIP2Settings], ): """If there's an IOError communicating with the server, - it becomes a RemoteIntegrationException. + it becomes a ProblemDetail to be sent to the client. """ class CannotSend(MockSIPClient): @@ -443,12 +451,18 @@ def do_send(self, data): ) provider = create_provider(client=CannotSend, settings=settings) - with pytest.raises(RemoteIntegrationException) as excinfo: - provider.remote_authenticate( - "username", - "password", - ) - assert "Error accessing server.local: Doom!" in str(excinfo.value) + response = provider.remote_authenticate( + "username", + "password", + ) + + assert isinstance(response, ProblemDetail) + assert response.status_code == INVALID_CREDENTIALS.status_code + assert response.uri == INVALID_CREDENTIALS.uri + assert ( + response.detail + == "Error contacting authentication server (server.local). Please try again later." + ) def test_parse_date(self): parse = SIP2AuthenticationProvider.parse_date @@ -492,6 +506,20 @@ def patron_information(self, identifier, password): assert client.patron_information == "1234" assert client.password is None + def test_info_to_patrondata_problemdetail( + self, + create_provider: Callable[..., SIP2AuthenticationProvider], + create_settings: Callable[..., SIP2Settings], + ): + # If we get a ProblemDetail we just return it. + settings = create_settings( + url="server.local", + ) + provider = create_provider(settings=settings) + problem_detail = ProblemDetail("foo") + patron = provider.info_to_patrondata(problem_detail) + assert patron is problem_detail + def test_info_to_patrondata_validate_password( self, create_provider: Callable[..., SIP2AuthenticationProvider], @@ -508,8 +536,7 @@ def test_info_to_patrondata_validate_password( TestSIP2AuthenticationProvider.sierra_valid_login ) patron = provider.info_to_patrondata(info) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "foo@example.com" == patron.email_address assert "LE CARRÉ, JOHN" == patron.personal_name @@ -541,8 +568,7 @@ def test_info_to_patrondata_no_validate_password( TestSIP2AuthenticationProvider.sierra_valid_login ) patron = provider.info_to_patrondata(info, validate_password=False) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "foo@example.com" == patron.email_address assert "LE CARRÉ, JOHN" == patron.personal_name @@ -556,8 +582,7 @@ def test_info_to_patrondata_no_validate_password( TestSIP2AuthenticationProvider.sierra_invalid_login ) patron = provider.info_to_patrondata(info, validate_password=False) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "foo@example.com" == patron.email_address assert "SHELDON, ALICE" == patron.personal_name @@ -590,8 +615,7 @@ def test_patron_block_setting( TestSIP2AuthenticationProvider.evergreen_expired_card ) patron = provider.info_to_patrondata(info) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "863716" == patron.permanent_id assert "Booth Expired Test" == patron.personal_name @@ -625,8 +649,7 @@ def test_patron_block_setting_with_fines( ) info["fee_limit"] = "10.0" patron = provider.info_to_patrondata(info) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "863718" == patron.permanent_id assert "Booth Excessive Fines Test" == patron.personal_name diff --git a/tests/api/sip/test_client.py b/tests/api/sip/test_client.py index 309c22d6d..68a3f90a3 100644 --- a/tests/api/sip/test_client.py +++ b/tests/api/sip/test_client.py @@ -2,8 +2,8 @@ import socket import ssl import tempfile +from collections.abc import Callable from functools import partial -from typing import Callable, List, Optional from unittest.mock import MagicMock, Mock, patch import pytest @@ -161,7 +161,7 @@ def test_connect(self): socket.socket = old_socket def test_secure_connect_insecure(self, mock_socket: MockSocketFixture): - self.context: Optional[MagicMock] = None + self.context: MagicMock | None = None def create_context(protocol): self.context = Mock(spec=ssl.SSLContext) @@ -221,7 +221,7 @@ def create_context(protocol): # Record the temporary files created. self.old_mkstemp = tempfile.mkstemp - self.temporary_files: List[str] = [] + self.temporary_files: list[str] = [] def create_temporary_file(): (fd, name) = self.old_mkstemp() diff --git a/tests/api/test_adobe_vendor_id.py b/tests/api/test_adobe_vendor_id.py index 18de4070d..47a6a50bb 100644 --- a/tests/api/test_adobe_vendor_id.py +++ b/tests/api/test_adobe_vendor_id.py @@ -2,7 +2,6 @@ import base64 import datetime -from typing import Type from unittest.mock import MagicMock import pytest @@ -42,7 +41,7 @@ class TestAuthdataUtility: def test_eligible_authdata_vendor_id_integrations( self, registration_status: RegistrationStatus, - authdata_utility_type: Type[AuthdataUtility] | Type[None], + authdata_utility_type: type[AuthdataUtility] | type[None], authdata: AuthdataUtility, vendor_id_fixture: VendorIDFixture, ): diff --git a/tests/api/test_annotations.py b/tests/api/test_annotations.py index eae2d7547..92b56923b 100644 --- a/tests/api/test_annotations.py +++ b/tests/api/test_annotations.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Any, Dict +from typing import Any import pytest from pyld import jsonld @@ -390,7 +390,7 @@ class TestAnnotationParser: def _sample_jsonld( annotation_parser_fixture: AnnotationParserFixture, motivation=Annotation.IDLING ): - data: Dict[Any, Any] = dict() + data: dict[Any, Any] = dict() data["@context"] = [ AnnotationWriter.JSONLD_CONTEXT, {"ls": Annotation.LS_NAMESPACE}, @@ -456,7 +456,7 @@ def test_parse_expanded_jsonld( ): annotation_parser_fixture.pool.loan_to(annotation_parser_fixture.patron_value) - data: Dict[Any, Any] = dict() + data: dict[Any, Any] = dict() data["@type"] = ["http://www.w3.org/ns/oa#Annotation"] data["http://www.w3.org/ns/oa#motivatedBy"] = [{"@id": Annotation.IDLING}] data["http://www.w3.org/ns/oa#hasBody"] = [ @@ -514,7 +514,7 @@ def test_parse_compacted_jsonld( ): annotation_parser_fixture.pool.loan_to(annotation_parser_fixture.patron_value) - data: Dict[Any, Any] = dict() + data: dict[Any, Any] = dict() data["@type"] = "http://www.w3.org/ns/oa#Annotation" data["http://www.w3.org/ns/oa#motivatedBy"] = {"@id": Annotation.IDLING} data["http://www.w3.org/ns/oa#hasBody"] = { @@ -749,4 +749,6 @@ def test_parse_jsonld_with_patron_opt_out( data_json, annotation_parser_fixture.patron_value, ) - assert PATRON_NOT_OPTED_IN_TO_ANNOTATION_SYNC == annotation + + # We no longer respect the patron settings for sync + assert isinstance(annotation, Annotation) diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 505741c21..ecf4b35af 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -7,9 +7,10 @@ import json import os import re +from collections.abc import Callable from decimal import Decimal from functools import partial -from typing import TYPE_CHECKING, Callable, Literal, Tuple, cast +from typing import TYPE_CHECKING, Literal, cast from unittest.mock import MagicMock, PropertyMock, patch import flask @@ -166,7 +167,7 @@ def patron_data() -> PatronData: ) -InactivePatronFixture = Tuple[Patron, PatronData] +InactivePatronFixture = tuple[Patron, PatronData] @pytest.fixture diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index 55ee36a46..c8effd80c 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -4,15 +4,7 @@ import random from datetime import datetime, timedelta from io import BytesIO, StringIO -from typing import ( - TYPE_CHECKING, - ClassVar, - Optional, - Protocol, - Type, - cast, - runtime_checkable, -) +from typing import TYPE_CHECKING, ClassVar, Protocol, cast, runtime_checkable from unittest import mock from unittest.mock import MagicMock, create_autospec @@ -106,12 +98,6 @@ def bibliotheca_fixture( class TestBibliothecaAPI: - def test_external_integration(self, bibliotheca_fixture: BibliothecaAPITestFixture): - assert ( - bibliotheca_fixture.collection.external_integration - == bibliotheca_fixture.api.external_integration(object()) - ) - def test__run_self_tests( self, bibliotheca_fixture: BibliothecaAPITestFixture, @@ -983,7 +969,7 @@ def as_problem_detail_document(self, debug=False) -> ProblemDetail: def test_exception( self, incoming_message: str, - error_class: Type[CirculationException], + error_class: type[CirculationException], error_code: int, problem_detail_title: str, problem_detail_code: int, @@ -1039,8 +1025,8 @@ def test_exception( ) def test_remote_initiated_server_error( self, - incoming_message: Optional[str], - incoming_message_from_file: Optional[str], + incoming_message: str | None, + incoming_message_from_file: str | None, error_string: str, api_bibliotheca_files_fixture: BibliothecaFilesFixture, ): diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index eaef89741..d354a1081 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -932,7 +932,9 @@ def test_borrow_hold_limit_reached( def test_fulfill_errors(self, circulation_api: CirculationAPIFixture): # Here's an open-access title. collection = circulation_api.db.collection( - protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS" + protocol=ExternalIntegration.OPDS_IMPORT, + data_source_name="OPDS", + external_account_id="http://url/", ) circulation_api.pool.open_access = True circulation_api.pool.collection = collection diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index a43de03ac..7be453e76 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -1,8 +1,8 @@ from unittest.mock import MagicMock from api.authenticator import LibraryAuthenticator +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.custom_index import CustomIndexView from api.problem_details import * from core.feed.annotator.circulation import ( diff --git a/tests/api/test_controller_marc.py b/tests/api/test_controller_marc.py deleted file mode 100644 index 59652cd13..000000000 --- a/tests/api/test_controller_marc.py +++ /dev/null @@ -1,171 +0,0 @@ -import datetime - -from core.model import CachedMARCFile, ExternalIntegration, Representation, create -from core.util.datetime_helpers import utc_now -from tests.fixtures.api_controller import CirculationControllerFixture - - -class TestMARCRecordController: - def test_download_page_with_exporter_and_files( - self, circulation_fixture: CirculationControllerFixture - ): - db = circulation_fixture.db - - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - - library = db.default_library() - lane = db.lane(display_name="Test Lane") - - exporter = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], - ) - - rep1, ignore = create( - db.session, - Representation, - url="http://mirror1", - mirror_url="http://mirror1", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=now, - ) - cache1, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=None, - representation=rep1, - end_time=now, - ) - - rep2, ignore = create( - db.session, - Representation, - url="http://mirror2", - mirror_url="http://mirror2", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=yesterday, - ) - cache2, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=lane, - representation=rep2, - end_time=yesterday, - ) - - rep3, ignore = create( - db.session, - Representation, - url="http://mirror3", - mirror_url="http://mirror3", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=now, - ) - cache3, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=None, - representation=rep3, - end_time=now, - start_time=yesterday, - ) - - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - - assert "

All Books

" in html - assert ( - 'Full file - last updated %s' - % now.strftime("%B %-d, %Y") - in html - ) - assert "

Update-only files

" in html - assert ( - 'Updates from %s to %s' - % (yesterday.strftime("%B %-d, %Y"), now.strftime("%B %-d, %Y")) - in html - ) - - assert "

Test Lane

" in html - assert ( - 'Full file - last updated %s' - % yesterday.strftime("%B %-d, %Y") - in html - ) - - def test_download_page_with_exporter_but_no_files( - self, circulation_fixture: CirculationControllerFixture - ): - db = circulation_fixture.db - - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - - library = db.default_library() - - exporter = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], - ) - - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - assert "MARC files aren't ready" in html - - def test_download_page_no_exporter( - self, circulation_fixture: CirculationControllerFixture - ): - db = circulation_fixture.db - library = db.default_library() - - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - assert ("No MARC exporter is currently configured") in html - - # If the exporter was deleted after some MARC files were cached, - # they will still be available to download. - now = utc_now() - rep, ignore = create( - db.session, - Representation, - url="http://mirror1", - mirror_url="http://mirror1", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=now, - ) - cache, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=None, - representation=rep, - end_time=now, - ) - - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - assert "No MARC exporter is currently configured" in html - assert "

All Books

" in html - assert ( - 'Full file - last updated %s' - % now.strftime("%B %-d, %Y") - in html - ) diff --git a/tests/api/test_device_tokens.py b/tests/api/test_device_tokens.py index 1a9775b80..f807a1f8a 100644 --- a/tests/api/test_device_tokens.py +++ b/tests/api/test_device_tokens.py @@ -5,7 +5,7 @@ from tests.fixtures.api_controller import ControllerFixture -@patch("api.controller.flask") +@patch("api.controller.device_tokens.flask") class TestDeviceTokens: def test_create_invalid_type(self, flask, controller_fixture: ControllerFixture): db = controller_fixture.db diff --git a/tests/api/test_enki.py b/tests/api/test_enki.py index 9cdbb3cbb..811ec3e82 100644 --- a/tests/api/test_enki.py +++ b/tests/api/test_enki.py @@ -82,8 +82,12 @@ def test_enki_library_id(self, enki_test_fixture: EnkiTestFixure): assert other_library.id is not None config = enki_test_fixture.api.integration_configuration() assert config is not None + + config.libraries.append(other_library) + lib_config = config.for_library(other_library) + assert lib_config is not None DatabaseTransactionFixture.set_settings( - config.for_library(other_library.id, create=True), + lib_config, **{enki_test_fixture.api.ENKI_LIBRARY_ID_KEY: "other library id"}, ) db.session.commit() diff --git a/tests/api/test_firstbook2.py b/tests/api/test_firstbook2.py index 2515ce19e..432843de1 100644 --- a/tests/api/test_firstbook2.py +++ b/tests/api/test_firstbook2.py @@ -1,8 +1,8 @@ import os import time import urllib.parse +from collections.abc import Callable from functools import partial -from typing import Callable import jwt import pytest diff --git a/tests/api/test_kansas_patron.py b/tests/api/test_kansas_patron.py index 7a4a50b80..bc33bb98d 100644 --- a/tests/api/test_kansas_patron.py +++ b/tests/api/test_kansas_patron.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from functools import partial -from typing import Callable, List import pytest from lxml import etree @@ -18,7 +18,7 @@ def __init__(self, content): class MockAPI(KansasAuthenticationAPI): - queue: List[bytes] + queue: list[bytes] def __init__( self, diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index b02782c42..f4379e50a 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -31,7 +31,14 @@ from core.external_search import Filter from core.lane import DefaultSortOrderFacets, Facets, FeaturedFacets, Lane, WorkList from core.metadata_layer import ContributorData, Metadata -from core.model import Contributor, DataSource, Edition, ExternalIntegration, create +from core.model import ( + Contributor, + DataSource, + Edition, + ExternalIntegration, + Library, + create, +) from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture from tests.fixtures.search import ExternalSearchFixtureFake @@ -908,14 +915,67 @@ def test_overview_facets(self, lane_fixture: LaneFixture): class TestCrawlableFacets: def test_default(self, db: DatabaseTransactionFixture): facets = CrawlableFacets.default(db.default_library()) - assert CrawlableFacets.COLLECTION_FULL == facets.collection - assert CrawlableFacets.AVAILABLE_ALL == facets.availability - assert CrawlableFacets.ORDER_LAST_UPDATE == facets.order - assert False == facets.order_ascending + assert facets.collection == CrawlableFacets.COLLECTION_FULL + assert facets.availability == CrawlableFacets.AVAILABLE_ALL + assert facets.order == CrawlableFacets.ORDER_LAST_UPDATE + assert facets.order_ascending is False - # There's only one enabled value for each facet group. - for group in facets.enabled_facets: - assert 1 == len(group) + [ + order, + availability, + collection, + distributor, + collectionName, + ] = facets.enabled_facets + + # The default facets are the only ones enabled. + for facet in [order, availability, collection]: + assert len(facet) == 1 + + # Except for distributor and collectionName, which have the default + # and data for each collection in the library. + for facet in [distributor, collectionName]: + assert len(facet) == 1 + len(db.default_library().collections) + + @pytest.mark.parametrize( + "group_name, expected", + [ + (Facets.ORDER_FACET_GROUP_NAME, Facets.ORDER_LAST_UPDATE), + (Facets.AVAILABILITY_FACET_GROUP_NAME, Facets.AVAILABLE_ALL), + (Facets.COLLECTION_FACET_GROUP_NAME, Facets.COLLECTION_FULL), + (Facets.DISTRIBUTOR_FACETS_GROUP_NAME, Facets.DISTRIBUTOR_ALL), + (Facets.COLLECTION_NAME_FACETS_GROUP_NAME, Facets.COLLECTION_NAME_ALL), + ], + ) + def test_available_none(self, group_name: str, expected: list[str]) -> None: + assert CrawlableFacets.available_facets(None, group_name) == [expected] + + @pytest.mark.parametrize( + "group_name, expected", + [ + (Facets.ORDER_FACET_GROUP_NAME, [Facets.ORDER_LAST_UPDATE]), + (Facets.AVAILABILITY_FACET_GROUP_NAME, [Facets.AVAILABLE_ALL]), + (Facets.COLLECTION_FACET_GROUP_NAME, [Facets.COLLECTION_FULL]), + (Facets.DISTRIBUTOR_FACETS_GROUP_NAME, [Facets.DISTRIBUTOR_ALL, "foo"]), + ( + Facets.COLLECTION_NAME_FACETS_GROUP_NAME, + [Facets.COLLECTION_NAME_ALL, "foo"], + ), + ], + ) + def test_available(self, group_name: str, expected: list[str]): + mock = MagicMock(spec=Library) + mock.enabled_facets = MagicMock(return_value=["foo"]) + + assert CrawlableFacets.available_facets(mock, group_name) == expected + + if group_name in [ + Facets.DISTRIBUTOR_FACETS_GROUP_NAME, + Facets.COLLECTION_NAME_FACETS_GROUP_NAME, + ]: + assert mock.enabled_facets.call_count == 1 + else: + assert mock.enabled_facets.call_count == 0 class TestCrawlableCollectionBasedLane: @@ -927,7 +987,7 @@ def test_init(self, db: DatabaseTransactionFixture): library = db.default_library() default_collection = db.default_collection() other_library_collection = db.collection() - library.collections.append(other_library_collection) + other_library_collection.libraries.append(library) # This collection is not associated with any library. unused_collection = db.collection() @@ -1111,12 +1171,13 @@ def test_constructor(self, db: DatabaseTransactionFixture): # The default library comes with a collection whose data # source is unspecified. Make another one whose data source _is_ # specified. + library = db.default_library() overdrive_collection = db.collection( "Test Overdrive Collection", protocol=ExternalIntegration.OVERDRIVE, data_source_name=DataSource.OVERDRIVE, ) - db.default_library().collections.append(overdrive_collection) + overdrive_collection.libraries.append(library) # Create another collection that is _not_ associated with this # library. It will not be used at all. @@ -1127,11 +1188,11 @@ def test_constructor(self, db: DatabaseTransactionFixture): ) # Pass in a JackpotFacets object - facets = JackpotFacets.default(db.default_library()) + facets = JackpotFacets.default(library) # The JackpotWorkList has no works of its own -- only its children # have works. - wl = JackpotWorkList(db.default_library(), facets) + wl = JackpotWorkList(library, facets) assert [] == wl.works(db.session) # Let's take a look at the children. @@ -1156,11 +1217,11 @@ def test_constructor(self, db: DatabaseTransactionFixture): # These worklists show ebooks and audiobooks from the two # collections associated with the default library. [ - default_ebooks, default_audio, - overdrive_ebooks, + default_ebooks, overdrive_audio, - ] = available_now + overdrive_ebooks, + ] = sorted(available_now, key=lambda x: x.display_name) assert ( "License source {OPDS} - Medium {Book} - Collection name {%s}" diff --git a/tests/api/test_marc.py b/tests/api/test_marc.py deleted file mode 100644 index b11dbacf0..000000000 --- a/tests/api/test_marc.py +++ /dev/null @@ -1,240 +0,0 @@ -import urllib.error -import urllib.parse -import urllib.request - -from pymarc import Record - -from api.marc import LibraryAnnotator -from core.config import Configuration -from core.marc import MARCExporter -from core.model import ConfigurationSetting, ExternalIntegration, create -from core.model.discovery_service_registration import DiscoveryServiceRegistration -from tests.fixtures.database import ( - DatabaseTransactionFixture, - IntegrationConfigurationFixture, -) - - -class TestLibraryAnnotator: - def test_annotate_work_record(self, db: DatabaseTransactionFixture): - # Mock class to verify that the correct methods - # are called by annotate_work_record. - class MockAnnotator(LibraryAnnotator): - called_with = dict() - - def add_marc_organization_code(self, record, marc_org): - self.called_with["add_marc_organization_code"] = [record, marc_org] - - def add_summary(self, record, work): - self.called_with["add_summary"] = [record, work] - - def add_simplified_genres(self, record, work): - self.called_with["add_simplified_genres"] = [record, work] - - def add_web_client_urls(self, record, library, identifier, integration): - self.called_with["add_web_client_urls"] = [ - record, - library, - identifier, - integration, - ] - - # Also check that the parent class annotate_work_record is called. - def add_distributor(self, record, pool): - self.called_with["add_distributor"] = [record, pool] - - def add_formats(self, record, pool): - self.called_with["add_formats"] = [record, pool] - - annotator = MockAnnotator(db.default_library()) - record = Record() - work = db.work(with_license_pool=True) - pool = work.license_pools[0] - edition = pool.presentation_edition - identifier = pool.identifier - - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], - ) - - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration - ) - - # If there are no settings, the only methods called will be add_web_client_urls - # and the parent class methods. - assert "add_marc_organization_code" not in annotator.called_with - assert "add_summary" not in annotator.called_with - assert "add_simplified_genres" not in annotator.called_with - assert [ - record, - db.default_library(), - identifier, - integration, - ] == annotator.called_with.get("add_web_client_urls") - assert [record, pool] == annotator.called_with.get("add_distributor") - assert [record, pool] == annotator.called_with.get("add_formats") - - # If settings are false, the methods still won't be called. - ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.INCLUDE_SUMMARY, db.default_library(), integration - ).value = "false" - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.INCLUDE_SIMPLIFIED_GENRES, - db.default_library(), - integration, - ).value = "false" - - annotator = MockAnnotator(db.default_library()) - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration - ) - - assert "add_marc_organization_code" not in annotator.called_with - assert "add_summary" not in annotator.called_with - assert "add_simplified_genres" not in annotator.called_with - assert [ - record, - db.default_library(), - identifier, - integration, - ] == annotator.called_with.get("add_web_client_urls") - assert [record, pool] == annotator.called_with.get("add_distributor") - assert [record, pool] == annotator.called_with.get("add_formats") - - # Once the include settings are true and the marc organization code is set, - # all methods are called. - ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.INCLUDE_SUMMARY, db.default_library(), integration - ).value = "true" - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.INCLUDE_SIMPLIFIED_GENRES, - db.default_library(), - integration, - ).value = "true" - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.MARC_ORGANIZATION_CODE, - db.default_library(), - integration, - ).value = "marc org" - - annotator = MockAnnotator(db.default_library()) - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration - ) - - assert [record, "marc org"] == annotator.called_with.get( - "add_marc_organization_code" - ) - assert [record, work] == annotator.called_with.get("add_summary") - assert [record, work] == annotator.called_with.get("add_simplified_genres") - assert [ - record, - db.default_library(), - identifier, - integration, - ] == annotator.called_with.get("add_web_client_urls") - assert [record, pool] == annotator.called_with.get("add_distributor") - assert [record, pool] == annotator.called_with.get("add_formats") - - def test_add_web_client_urls( - self, - db: DatabaseTransactionFixture, - create_integration_configuration: IntegrationConfigurationFixture, - ): - # Web client URLs can come from either the MARC export integration or - # a library registry integration. - - identifier = db.identifier(foreign_id="identifier") - lib_short_name = db.default_library().short_name - - # The URL for a work is constructed as: - # - //works/ - work_link_template = "{cm_base}/{lib}/works/{qid}" - # It is then encoded and the web client URL is constructed in this form: - # - /book/ - client_url_template = "{client_base}/book/{work_link}" - - qualified_identifier = urllib.parse.quote( - identifier.type + "/" + identifier.identifier, safe="" - ) - cm_base_url = "http://test-circulation-manager" - - expected_work_link = work_link_template.format( - cm_base=cm_base_url, lib=lib_short_name, qid=qualified_identifier - ) - encoded_work_link = urllib.parse.quote(expected_work_link, safe="") - - client_base_1 = "http://web_catalog" - client_base_2 = "http://another_web_catalog" - expected_client_url_1 = client_url_template.format( - client_base=client_base_1, work_link=encoded_work_link - ) - expected_client_url_2 = client_url_template.format( - client_base=client_base_2, work_link=encoded_work_link - ) - - # A few checks to ensure that our setup is useful. - assert lib_short_name is not None - assert len(lib_short_name) > 0 - assert client_base_1 != client_base_2 - assert expected_client_url_1 != expected_client_url_2 - assert expected_client_url_1.startswith(client_base_1) - assert expected_client_url_2.startswith(client_base_2) - - ConfigurationSetting.sitewide( - db.session, Configuration.BASE_URL_KEY - ).value = cm_base_url - - annotator = LibraryAnnotator(db.default_library()) - - # If no web catalog URLs are set for the library, nothing will be changed. - record = Record() - annotator.add_web_client_urls(record, db.default_library(), identifier) - assert [] == record.get_fields("856") - - # Add a URL from a library registry. - registry = create_integration_configuration.discovery_service() - create( - db.session, - DiscoveryServiceRegistration, - library=db.default_library(), - integration=registry, - web_client=client_base_1, - ) - - record = Record() - annotator.add_web_client_urls(record, db.default_library(), identifier) - [field] = record.get_fields("856") - assert ["4", "0"] == field.indicators - assert expected_client_url_1 == field.get_subfields("u")[0] - - # Add a manually configured URL on a MARC export integration. - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], - ) - - ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.WEB_CLIENT_URL, db.default_library(), integration - ).value = client_base_2 - - record = Record() - annotator.add_web_client_urls( - record, db.default_library(), identifier, integration - ) - [field1, field2] = record.get_fields("856") - assert ["4", "0"] == field1.indicators - assert expected_client_url_2 == field1.get_subfields("u")[0] - - assert ["4", "0"] == field2.indicators - assert expected_client_url_1 == field2.get_subfields("u")[0] diff --git a/tests/api/test_millenium_patron.py b/tests/api/test_millenium_patron.py index a6664ab94..8289b9d6b 100644 --- a/tests/api/test_millenium_patron.py +++ b/tests/api/test_millenium_patron.py @@ -1,7 +1,8 @@ +from collections.abc import Callable from datetime import date, timedelta from decimal import Decimal from functools import partial -from typing import Any, Callable, List +from typing import Any from urllib import parse import pytest @@ -28,8 +29,8 @@ def __init__(self, content): class MockAPI(MilleniumPatronAPI): - queue: List[Any] - requests_made: List[Any] + queue: list[Any] + requests_made: list[Any] def __init__( self, diff --git a/tests/api/test_odl.py b/tests/api/test_odl.py index 62635d671..040a285c8 100644 --- a/tests/api/test_odl.py +++ b/tests/api/test_odl.py @@ -3,7 +3,7 @@ import datetime import json import urllib.parse -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock import dateutil @@ -619,7 +619,7 @@ def test_fulfill_success( delivery_mechanism: str, correct_type: str, correct_link: str, - links: Dict[str, Any], + links: dict[str, Any], ) -> None: # Fulfill a loan in a way that gives access to a license file. odl_api_test_fixture.license.setup(concurrency=1, available=1) # type: ignore[attr-defined] diff --git a/tests/api/test_odl2.py b/tests/api/test_odl2.py index b5a384bb7..cbca67109 100644 --- a/tests/api/test_odl2.py +++ b/tests/api/test_odl2.py @@ -1,5 +1,4 @@ import datetime -from typing import List, Optional import pytest from freezegun import freeze_time @@ -40,10 +39,10 @@ class TestODL2Importer: @staticmethod def _get_delivery_mechanism_by_drm_scheme_and_content_type( - delivery_mechanisms: List[LicensePoolDeliveryMechanism], + delivery_mechanisms: list[LicensePoolDeliveryMechanism], content_type: str, drm_scheme: str, - ) -> Optional[DeliveryMechanism]: + ) -> DeliveryMechanism | None: """Find a license pool in the list by its identifier. :param delivery_mechanisms: List of delivery mechanisms diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index 8d440b4b3..9a4d18dc5 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Callable, Union +from collections.abc import Callable from unittest.mock import MagicMock, patch import pytest @@ -58,7 +58,7 @@ def _auth_doc(without_links=False) -> str: if not without_links else {} ) - doc: dict[str, list[dict[str, Union[str, list]]]] = { + doc: dict[str, list[dict[str, str | list]]] = { "authentication": [ { **{"type": "http://opds-spec.org/auth/oauth/client_credentials"}, @@ -92,16 +92,6 @@ def opds_dist_api_fixture( class TestOPDSForDistributorsAPI: - def test_external_integration( - self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture - ): - assert ( - opds_dist_api_fixture.collection.external_integration - == opds_dist_api_fixture.api.external_integration( - opds_dist_api_fixture.db.session - ) - ) - def test__run_self_tests( self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture ): @@ -584,7 +574,9 @@ def test_import(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): # Both works were created, since we can use their acquisition links # to give copies to patrons. - [camelot, southern] = sorted(imported_works, key=lambda x: x.title) + [camelot, camelot_audio, southern] = sorted( + imported_works, key=lambda x: x.title + ) # Each work has a license pool. [camelot_pool] = camelot.license_pools @@ -608,6 +600,22 @@ def test_import(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): assert LicensePool.UNLIMITED_ACCESS == pool.licenses_owned assert LicensePool.UNLIMITED_ACCESS == pool.licenses_available assert (pool.work.last_update_time - now).total_seconds() <= 2 + assert pool.should_track_playtime == False + + # Audiobooks always track playtime + camelot_audio_pool = camelot_audio.license_pools[0] + assert camelot_audio_pool.should_track_playtime == True + [camelot_audio_acquisition_link] = [ + l + for l in camelot_audio_pool.identifier.links + if l.rel == Hyperlink.GENERIC_OPDS_ACQUISITION + and l.resource.representation.media_type + == Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE + ] + assert ( + "https://library.biblioboard.com/ext/api/media/04377e87-ab69-41c8-a2a4-812d55dc0953/assets/content.json" + == camelot_audio_acquisition_link.resource.representation.url + ) [camelot_acquisition_link] = [ l diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index 2d69ff793..42e71029e 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -7,8 +7,8 @@ import os import random from datetime import timedelta -from typing import TYPE_CHECKING, Any, Dict -from unittest.mock import MagicMock, PropertyMock, create_autospec, patch +from typing import TYPE_CHECKING, Any +from unittest.mock import MagicMock, create_autospec, patch import pytest from requests import Response @@ -59,9 +59,9 @@ from core.util.http import BadResponseException from tests.api.mockapi.overdrive import MockOverdriveAPI from tests.core.mock import DummyHTTPClient, MockRequestsResponse -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse if TYPE_CHECKING: from tests.fixtures.api_overdrive_files import OverdriveAPIFilesFixture @@ -568,14 +568,6 @@ def availability_link_list(self, content): # this page) and a link to the next page. assert result == (["an availability queue"], "http://next-page/") - def test_external_integration(self, overdrive_api_fixture: OverdriveAPIFixture): - assert ( - overdrive_api_fixture.collection.external_integration - == overdrive_api_fixture.api.external_integration( - overdrive_api_fixture.db.session - ) - ) - def test_lock_in_format(self, overdrive_api_fixture: OverdriveAPIFixture): # Verify which formats do or don't need to be locked in before # fulfillment. @@ -2127,25 +2119,12 @@ def test_refresh_patron_access_token_is_fulfillment( patron = db.patron() patron.authorization_identifier = "barcode" credential = db.credential(patron=patron) - db.default_collection().integration_configuration.protocol = "Overdrive" - db.default_collection().external_account_id = 1 - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{ - OverdriveConstants.OVERDRIVE_CLIENT_KEY: "user", - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: "password", - OverdriveConstants.OVERDRIVE_WEBSITE_ID: "100", - }, - ) - db.default_collection().integration_configuration.for_library( - patron.library.id, create=True - ) # Mocked testing credentials encoded_auth = base64.b64encode(b"TestingKey:TestingSecret") # use a real Overdrive API - od_api = OverdriveAPI(db.session, db.default_collection()) + od_api = OverdriveAPI(db.session, overdrive_api_fixture.collection) od_api._server_nickname = OverdriveConstants.TESTING_SERVERS # but mock the request methods od_api._do_post = MagicMock() @@ -2172,18 +2151,8 @@ def test_cannot_fulfill_error_audiobook( db = overdrive_api_fixture.db patron = db.patron() patron.authorization_identifier = "barcode" - db.default_collection().integration_configuration.protocol = "Overdrive" - db.default_collection().external_account_id = 1 - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{ - OverdriveConstants.OVERDRIVE_CLIENT_KEY: "user", - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: "password", - OverdriveConstants.OVERDRIVE_WEBSITE_ID: "100", - }, - ) # use a real Overdrive API - od_api = OverdriveAPI(db.session, db.default_collection()) + od_api = OverdriveAPI(db.session, overdrive_api_fixture.collection) od_api._server_nickname = OverdriveConstants.TESTING_SERVERS od_api.get_loan = MagicMock(return_value={"isFormatLockedIn": True}) od_api.get_download_link = MagicMock(return_value=None) @@ -2207,21 +2176,8 @@ def test_no_drm_fulfillment(self, overdrive_api_fixture: OverdriveAPIFixture): patron = db.patron() work = db.work(with_license_pool=True) patron.authorization_identifier = "barcode" - db.default_collection().integration_configuration.protocol = "Overdrive" - db.default_collection().external_account_id = 1 - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{ - OverdriveConstants.OVERDRIVE_CLIENT_KEY: "user", - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: "password", - OverdriveConstants.OVERDRIVE_WEBSITE_ID: "100", - }, - ) - db.default_collection().integration_configuration.for_library( - patron.library.id, create=True - ) - od_api = OverdriveAPI(db.session, db.default_collection()) + od_api = OverdriveAPI(db.session, overdrive_api_fixture.collection) od_api._server_nickname = OverdriveConstants.TESTING_SERVERS # Load the mock API data @@ -2297,7 +2253,8 @@ def token_post( pin = "patron_pin" # clear out any collections added before we add ours - library.collections = [] + for collection in library.collections: + collection.libraries = [] # Distinct credentials for the two OverDrive collections in which our # library has membership. @@ -2346,7 +2303,7 @@ def token_post( Goals.LICENSE_GOAL, {ExternalIntegration.OVERDRIVE: MockAPI} ), ) - od_apis: Dict[str, OverdriveAPI] = { + od_apis: dict[str, OverdriveAPI] = { api.collection.name: api # type: ignore[union-attr,misc] for api in list(circulation.api_for_collection.values()) } @@ -2495,7 +2452,7 @@ def make_direct_download_link(cls, download_link): error_url = "http://error/" # Here we don't even know the name of the format. - empty: Dict[str, Any] = dict() + empty: dict[str, Any] = dict() with pytest.raises(IOError) as excinfo: m(empty, error_url) assert "No linkTemplates for format (unknown)" in str(excinfo.value) @@ -3044,7 +3001,7 @@ def test_catch_up_from_with_failures_retried( db = overdrive_api_fixture.db class MockAPI: - tries: Dict[str, int] = {} + tries: dict[str, int] = {} def __init__(self, *ignore, **kwignore): self.licensepools = [] @@ -3109,7 +3066,7 @@ def test_catch_up_from_with_failures_all( db = overdrive_api_fixture.db class MockAPI: - tries: Dict[str, int] = {} + tries: dict[str, int] = {} def __init__(self, *ignore, **kwignore): self.licensepools = [] @@ -3807,8 +3764,10 @@ def test_to_collection(self, overdrive_api_fixture: OverdriveAPIFixture): p, collection = account.to_collection(session) assert p == parent assert parent == collection.parent - assert collection.external_account_id == account.library_id - assert ExternalIntegration.LICENSE_GOAL == collection.external_integration.goal + assert ( + collection.integration_configuration.settings_dict["external_account_id"] + == account.library_id + ) assert ExternalIntegration.OVERDRIVE == collection.protocol assert Goals.LICENSE_GOAL == collection.integration_configuration.goal assert ExternalIntegration.OVERDRIVE == collection.protocol @@ -3950,19 +3909,25 @@ def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture client_secret = "cs" library_token = "lt" - parent: Collection = db.collection( + library = db.library() + parent: Collection = MockOverdriveAPI.mock_collection( + db.session, + library, name=parent_library_name, - protocol=ExternalIntegration.OVERDRIVE, - external_account_id=parent_od_library_id, + library_id=parent_od_library_id, + client_key=client_key, + client_secret=client_secret, ) - child1: Collection = db.collection( + child1: Collection = MockOverdriveAPI.mock_collection( + db.session, + library, name=child1_library_name, - protocol=ExternalIntegration.OVERDRIVE, - external_account_id=child1_advantage_library_id, + library_id=child1_advantage_library_id, ) child1.parent = parent - overdrive_api = MagicMock() - overdrive_api.get_advantage_accounts.return_value = [ + overdrive_api = MockOverdriveAPI(db.session, parent) + mock_get_advantage_accounts = MagicMock() + mock_get_advantage_accounts.return_value = [ OverdriveAdvantageAccount( parent_od_library_id, child1_advantage_library_id, @@ -3976,10 +3941,8 @@ def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture child2_token, ), ] - - overdrive_api.client_key.return_value = bytes(client_key, "utf-8") - overdrive_api.client_secret.return_value = bytes(client_secret, "utf-8") - type(overdrive_api).collection_token = PropertyMock(return_value=library_token) + overdrive_api.get_advantage_accounts = mock_get_advantage_accounts + overdrive_api._collection_token = library_token with patch( "api.overdrive.GenerateOverdriveAdvantageAccountList._create_overdrive_api" @@ -4034,6 +3997,4 @@ def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture os.remove(output_file_path) assert last_index == 2 - overdrive_api.client_key.assert_called_once() - overdrive_api.client_secret.assert_called_once() overdrive_api.get_advantage_accounts.assert_called_once() diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index f66b0ecc1..d172241f6 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -4,27 +4,29 @@ import logging from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional -from unittest.mock import MagicMock, patch +from typing import TYPE_CHECKING +from unittest.mock import MagicMock, call, create_autospec, patch import pytest from _pytest.logging import LogCaptureFixture +from sqlalchemy.exc import NoResultFound from alembic.util import CommandError from api.adobe_vendor_id import AuthdataUtility from api.config import Configuration -from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from core.external_search import ExternalSearchIndex -from core.lane import WorkList -from core.marc import MARCExporter +from core.integration.goals import Goals +from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings from core.model import ( LOCK_ID_DB_INIT, - CachedMARCFile, ConfigurationSetting, Credential, DataSource, - ExternalIntegration, + DiscoveryServiceRegistration, + IntegrationConfiguration, + Library, + MarcFile, SessionManager, create, ) @@ -43,7 +45,10 @@ if TYPE_CHECKING: from tests.fixtures.authenticator import SimpleAuthIntegrationFixture - from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.database import ( + DatabaseTransactionFixture, + IntegrationConfigurationFixture, + ) class TestAdobeAccountIDResetScript: @@ -111,215 +116,434 @@ def lane_script_fixture( return LaneScriptFixture(db, library_fixture) -class TestCacheMARCFilesFixture: +class CacheMARCFilesFixture: def __init__(self, db: DatabaseTransactionFixture): self.db = db - self.lane = db.lane(genres=["Science Fiction"]) - self.integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL - ) - - self.exporter = MARCExporter(None, None, self.integration) - self.mock_records = MagicMock() self.mock_services = MagicMock() - self.exporter.records = self.mock_records + self.exporter = MagicMock(spec=MARCExporter) + self.library = self.db.default_library() + self.collection = self.db.collection() + self.collection.export_marc_records = True + self.collection.libraries += [self.library] + self.cm_base_url = "http://test-circulation-manager/" + + ConfigurationSetting.sitewide( + db.session, Configuration.BASE_URL_KEY + ).value = self.cm_base_url + + def integration(self, library: Library | None = None) -> IntegrationConfiguration: + if library is None: + library = self.library + + return self.db.integration_configuration( + protocol=MARCExporter.__name__, + goal=Goals.CATALOG_GOAL, + libraries=[library], + ) - def script(self, cmd_args: Optional[list[str]] = None) -> CacheMARCFiles: + def script(self, cmd_args: list[str] | None = None) -> CacheMARCFiles: cmd_args = cmd_args or [] return CacheMARCFiles( - self.db.session, services=self.mock_services, cmd_args=cmd_args - ) - - def assert_call(self, call: Any) -> None: - assert call.args[0] == self.lane - assert isinstance(call.args[1], MARCLibraryAnnotator) - assert call.args[2] == self.mock_services.storage.public.return_value - - def create_cached_file(self, end_time: datetime.datetime) -> CachedMARCFile: - representation, _ = self.db.representation() - cached, _ = create( self.db.session, - CachedMARCFile, - library=self.db.default_library(), - lane=self.lane, - representation=representation, - end_time=end_time, + exporter=self.exporter, + services=self.mock_services, + cmd_args=cmd_args, ) - return cached @pytest.fixture -def cache_marc_files(db: DatabaseTransactionFixture) -> TestCacheMARCFilesFixture: - return TestCacheMARCFilesFixture(db) +def cache_marc_files(db: DatabaseTransactionFixture) -> CacheMARCFilesFixture: + return CacheMARCFilesFixture(db) class TestCacheMARCFiles: - def test_should_process_library(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - script = CacheMARCFiles(db.session, cmd_args=[]) - assert False == script.should_process_library(db.default_library()) - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], + def test_settings(self, cache_marc_files: CacheMARCFilesFixture): + # Test that the script gets the correct settings. + test_library = cache_marc_files.library + other_library = cache_marc_files.db.library() + + expected_settings = MarcExporterSettings(update_frequency=3) + expected_library_settings = MarcExporterLibrarySettings( + organization_code="test", + include_summary=True, + include_genres=True, + ) + + other_library_settings = MarcExporterLibrarySettings( + organization_code="other", + ) + + integration = cache_marc_files.integration(test_library) + integration.libraries += [other_library] + + test_library_integration = integration.for_library(test_library) + assert test_library_integration is not None + other_library_integration = integration.for_library(other_library) + assert other_library_integration is not None + MARCExporter.settings_update(integration, expected_settings) + MARCExporter.library_settings_update( + test_library_integration, expected_library_settings + ) + MARCExporter.library_settings_update( + other_library_integration, other_library_settings ) - assert True == script.should_process_library(db.default_library()) - - def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - parent = db.lane() - parent.size = 100 - child = db.lane(parent=parent) - child.size = 10 - grandchild = db.lane(parent=child) - grandchild.size = 1 - wl = WorkList() - empty = db.lane(fiction=False) - empty.size = 0 - - script = CacheMARCFiles(db.session, cmd_args=[]) - script.max_depth = 1 - assert True == script.should_process_lane(parent) - assert True == script.should_process_lane(child) - assert False == script.should_process_lane(grandchild) - assert True == script.should_process_lane(wl) - assert False == script.should_process_lane(empty) - - script.max_depth = 0 - assert True == script.should_process_lane(parent) - assert False == script.should_process_lane(child) - assert False == script.should_process_lane(grandchild) - assert True == script.should_process_lane(wl) - assert False == script.should_process_lane(empty) - - def test_process_lane_never_run(self, cache_marc_files: TestCacheMARCFilesFixture): + script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + actual_settings, actual_library_settings = script.settings(test_library) - # If the script has never been run before, it runs the exporter once - # to create a file with all records. - assert cache_marc_files.mock_records.call_count == 1 - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args) + assert actual_settings == expected_settings + assert actual_library_settings == expected_library_settings - def test_process_lane_cached_update( - self, cache_marc_files: TestCacheMARCFilesFixture + def test_settings_none(self, cache_marc_files: CacheMARCFilesFixture): + # If there are no settings, the setting function raises an exception. + test_library = cache_marc_files.library + script = cache_marc_files.script() + with pytest.raises(NoResultFound): + script.settings(test_library) + + def test_process_libraries_no_storage( + self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture ): - # If we have a cached file already, and it's old enough, the script will - # run the exporter twice, first to update that file and second to create - # a file with changes since that first file was originally created. - db = cache_marc_files.db - now = utc_now() - last_week = now - datetime.timedelta(days=7) - cache_marc_files.create_cached_file(last_week) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 3 + # If there is no storage integration, the script logs an error and returns. + script = cache_marc_files.script() + script.storage_service = None + caplog.set_level(logging.INFO) + script.process_libraries([MagicMock(), MagicMock()]) + assert "No storage service was found" in caplog.text + + def test_get_collections(self, cache_marc_files: CacheMARCFilesFixture): + # Test that the script gets the correct collections. + test_library = cache_marc_files.library + collection1 = cache_marc_files.collection + + # Second collection is configured to export MARC records. + collection2 = cache_marc_files.db.collection() + collection2.export_marc_records = True + collection2.libraries += [test_library] + + # Third collection is not configured to export MARC records. + collection3 = cache_marc_files.db.collection() + collection3.export_marc_records = False + collection3.libraries += [test_library] + + # Fourth collection is configured to export MARC records, but is + # configured to export only to a different library. + other_library = cache_marc_files.db.library() + other_collection = cache_marc_files.db.collection() + other_collection.export_marc_records = True + other_collection.libraries += [other_library] script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert cache_marc_files.mock_records.call_count == 2 - # First call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) + # We should get back the two collections that are configured to export + # MARC records to this library. + collections = script.get_collections(test_library) + assert set(collections) == {collection1, collection2} - # Second call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) - assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - < last_week - ) + # Set collection3 to export MARC records to this library. + collection3.export_marc_records = True + + # We should get back all three collections that are configured to export + # MARC records to this library. + collections = script.get_collections(test_library) + assert set(collections) == {collection1, collection2, collection3} - def test_process_lane_cached_recent( - self, cache_marc_files: TestCacheMARCFilesFixture + def test_get_web_client_urls( + self, + db: DatabaseTransactionFixture, + cache_marc_files: CacheMARCFilesFixture, + create_integration_configuration: IntegrationConfigurationFixture, ): - # If we already have a recent cached file, the script won't do anything. - db = cache_marc_files.db - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - cache_marc_files.create_cached_file(yesterday) - ConfigurationSetting.for_library_and_externalintegration( + # No web client URLs are returned if there are no discovery service registrations. + script = cache_marc_files.script() + assert script.get_web_client_urls(cache_marc_files.library) == [] + + # If we pass in a configured web client URL, that URL is returned. + assert script.get_web_client_urls( + cache_marc_files.library, "http://web-client" + ) == ["http://web-client"] + + # Add a URL from a library registry. + registry = create_integration_configuration.discovery_service() + create( db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 3 + DiscoveryServiceRegistration, + library=cache_marc_files.library, + integration=registry, + web_client="http://web-client-url/", + ) + assert script.get_web_client_urls(cache_marc_files.library) == [ + "http://web-client-url/" + ] - script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert cache_marc_files.mock_records.call_count == 0 + # URL from library registry and configured URL are both returned. + assert script.get_web_client_urls( + cache_marc_files.library, "http://web-client" + ) == [ + "http://web-client-url/", + "http://web-client", + ] - def test_process_lane_cached_recent_force( - self, cache_marc_files: TestCacheMARCFilesFixture + def test_process_library_not_configured( + self, + cache_marc_files: CacheMARCFilesFixture, ): - # But we can force it to run anyway. - db = cache_marc_files.db - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - last_week = now - datetime.timedelta(days=7) - cache_marc_files.create_cached_file(yesterday) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 3 + script = cache_marc_files.script() + mock_process_collection = create_autospec(script.process_collection) + script.process_collection = mock_process_collection + mock_settings = create_autospec(script.settings) + script.settings = mock_settings + mock_settings.side_effect = NoResultFound + + # If there is no integration configuration for the library, the script + # does nothing. + script.process_library(cache_marc_files.library) + mock_process_collection.assert_not_called() + + def test_process_library(self, cache_marc_files: CacheMARCFilesFixture): + script = cache_marc_files.script() + mock_annotator_cls = MagicMock() + mock_process_collection = create_autospec(script.process_collection) + script.process_collection = mock_process_collection + mock_settings = create_autospec(script.settings) + script.settings = mock_settings + settings = MarcExporterSettings(update_frequency=3) + library_settings = MarcExporterLibrarySettings( + organization_code="test", + web_client_url="http://web-client-url/", + include_summary=True, + include_genres=False, + ) + mock_settings.return_value = ( + settings, + library_settings, + ) - script = cache_marc_files.script(cmd_args=["--force"]) - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert cache_marc_files.mock_records.call_count == 2 + before_call_time = utc_now() + + # If there is an integration configuration for the library, the script + # processes all the collections for that library. + script.process_library( + cache_marc_files.library, annotator_cls=mock_annotator_cls + ) + + after_call_time = utc_now() + + mock_annotator_cls.assert_called_once_with( + cache_marc_files.cm_base_url, + cache_marc_files.library.short_name, + [library_settings.web_client_url], + library_settings.organization_code, + library_settings.include_summary, + library_settings.include_genres, + ) + + assert mock_process_collection.call_count == 1 + ( + library, + collection, + annotator, + update_frequency, + creation_time, + ) = mock_process_collection.call_args.args + assert library == cache_marc_files.library + assert collection == cache_marc_files.collection + assert annotator == mock_annotator_cls.return_value + assert update_frequency == settings.update_frequency + assert creation_time > before_call_time + assert creation_time < after_call_time + + def test_last_updated( + self, db: DatabaseTransactionFixture, cache_marc_files: CacheMARCFilesFixture + ): + script = cache_marc_files.script() - # First call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) + # If there is no cached file, we return None. + assert ( + script.last_updated(cache_marc_files.library, cache_marc_files.collection) + is None + ) - # Second call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + # If there is a cached file, we return the time it was created. + file1 = MarcFile( + library=cache_marc_files.library, + collection=cache_marc_files.collection, + created=datetime_utc(1984, 5, 8), + key="file1", + ) + db.session.add(file1) assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - < yesterday + script.last_updated(cache_marc_files.library, cache_marc_files.collection) + == file1.created ) + + # If there are multiple cached files, we return the time of the most recent one. + file2 = MarcFile( + library=cache_marc_files.library, + collection=cache_marc_files.collection, + created=utc_now(), + key="file2", + ) + db.session.add(file2) assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - > last_week + script.last_updated(cache_marc_files.library, cache_marc_files.collection) + == file2.created ) - def test_process_lane_cached_frequency_zero( - self, cache_marc_files: TestCacheMARCFilesFixture + def test_force(self, cache_marc_files: CacheMARCFilesFixture): + script = cache_marc_files.script() + assert script.force is False + + script = cache_marc_files.script(cmd_args=["--force"]) + assert script.force is True + + @pytest.mark.parametrize( + "last_updated, force, update_frequency, run_exporter", + [ + pytest.param(None, False, 10, True, id="never_run_before"), + pytest.param(None, False, 10, True, id="never_run_before_w_force"), + pytest.param( + utc_now() - datetime.timedelta(days=5), + False, + 10, + False, + id="recently_run", + ), + pytest.param( + utc_now() - datetime.timedelta(days=5), + True, + 10, + True, + id="recently_run_w_force", + ), + pytest.param( + utc_now() - datetime.timedelta(days=5), + False, + 0, + True, + id="recently_run_w_frequency_0", + ), + pytest.param( + utc_now() - datetime.timedelta(days=15), + False, + 10, + True, + id="not_recently_run", + ), + pytest.param( + utc_now() - datetime.timedelta(days=15), + True, + 10, + True, + id="not_recently_run_w_force", + ), + pytest.param( + utc_now() - datetime.timedelta(days=15), + False, + 0, + True, + id="not_recently_run_w_frequency_0", + ), + ], + ) + def test_process_collection_skip( + self, + cache_marc_files: CacheMARCFilesFixture, + caplog: LogCaptureFixture, + last_updated: datetime.datetime | None, + force: bool, + update_frequency: int, + run_exporter: bool, ): - # The update frequency can also be 0, in which case it will always run. - # If we already have a recent cached file, the script won't do anything. - db = cache_marc_files.db - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - last_week = now - datetime.timedelta(days=7) - cache_marc_files.create_cached_file(yesterday) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 0 script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + script.exporter = MagicMock() + now = utc_now() + caplog.set_level(logging.INFO) + + script.force = force + script.last_updated = MagicMock(return_value=last_updated) + script.process_collection( + cache_marc_files.library, + cache_marc_files.collection, + MagicMock(), + update_frequency, + now, + ) - assert cache_marc_files.mock_records.call_count == 2 + if run_exporter: + assert script.exporter.records.call_count > 0 + assert "Processed collection" in caplog.text + else: + assert script.exporter.records.call_count == 0 + assert "Skipping collection" in caplog.text - # First call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) + def test_process_collection_never_called( + self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture + ): + # If the collection has not been processed before, the script processes + # the collection and created a full export. + caplog.set_level(logging.INFO) + script = cache_marc_files.script() + mock_exporter = MagicMock(spec=MARCExporter) + script.exporter = mock_exporter + script.last_updated = MagicMock(return_value=None) + mock_annotator = MagicMock() + creation_time = utc_now() + script.process_collection( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + 10, + creation_time, + ) + mock_exporter.records.assert_called_once_with( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + creation_time=creation_time, + ) + assert "Processed collection" in caplog.text - # Second call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) - assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - < yesterday + def test_process_collection_with_last_updated( + self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture + ): + # If the collection has been processed before, the script processes + # the collection, created a full export and a delta export. + caplog.set_level(logging.INFO) + script = cache_marc_files.script() + mock_exporter = MagicMock(spec=MARCExporter) + script.exporter = mock_exporter + last_updated = utc_now() - datetime.timedelta(days=20) + script.last_updated = MagicMock(return_value=last_updated) + mock_annotator = MagicMock() + creation_time = utc_now() + script.process_collection( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + 10, + creation_time, ) - assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - > last_week + assert "Processed collection" in caplog.text + assert mock_exporter.records.call_count == 2 + + full_call = call( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + creation_time=creation_time, + ) + + delta_call = call( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + creation_time=creation_time, + since_time=last_updated, ) + mock_exporter.records.assert_has_calls([full_call, delta_call]) + class TestInstanceInitializationScript: # These are some basic tests for the instance initialization script. It is tested diff --git a/tests/api/test_selftest.py b/tests/api/test_selftest.py index 18381ac08..b7eb79f42 100644 --- a/tests/api/test_selftest.py +++ b/tests/api/test_selftest.py @@ -296,6 +296,10 @@ def _no_delivery_mechanisms_test(self): self._no_delivery_mechanisms_called = True return "1" + @property + def collection(self) -> None: + return None + mock = Mock() results = [x for x in mock._run_self_tests(MagicMock())] assert ["1"] == [x.result for x in results] diff --git a/tests/api/test_simple_auth.py b/tests/api/test_simple_auth.py index 6e048aa9d..be13ebdb7 100644 --- a/tests/api/test_simple_auth.py +++ b/tests/api/test_simple_auth.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from functools import partial -from typing import Callable import pytest diff --git a/tests/api/test_sirsidynix_auth_provider.py b/tests/api/test_sirsidynix_auth_provider.py index 7216cbdcd..400c4fcfd 100644 --- a/tests/api/test_sirsidynix_auth_provider.py +++ b/tests/api/test_sirsidynix_auth_provider.py @@ -1,6 +1,7 @@ +from collections.abc import Callable from copy import deepcopy from functools import partial -from typing import Any, Callable, Dict, List, Tuple +from typing import Any from unittest.mock import MagicMock, call, patch import pytest @@ -329,7 +330,7 @@ def test_blocked_patron_status_info( "amountOwed": {"currencyCode": "USD", "amount": "0.00"}, } - statuses: List[Tuple[Dict[str, bool], Any]] = [ + statuses: list[tuple[dict[str, bool], Any]] = [ ({"hasMaxDaysWithFines": True}, PatronData.EXCESSIVE_FINES), ({"hasMaxFines": True}, PatronData.EXCESSIVE_FINES), ({"hasMaxLostItem": True}, PatronData.TOO_MANY_LOST), diff --git a/tests/core/configuration/test_library.py b/tests/core/configuration/test_library.py index ad00aa286..93289c850 100644 --- a/tests/core/configuration/test_library.py +++ b/tests/core/configuration/test_library.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from functools import partial -from typing import Callable, List, Optional import pytest @@ -31,8 +31,8 @@ def library_settings() -> LibrarySettingsFixture: ], ) def test_validate_language_codes( - languages: Optional[List[str]], - expected: Optional[List[str]], + languages: list[str] | None, + expected: list[str] | None, library_settings: LibrarySettingsFixture, ) -> None: settings = library_settings(large_collection_languages=languages) diff --git a/tests/core/conftest.py b/tests/core/conftest.py index fc9177cad..1a48ae45f 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -1,3 +1,5 @@ +from freezegun.config import configure as fg_configure + pytest_plugins = [ "tests.fixtures.announcements", "tests.fixtures.csv_files", @@ -12,4 +14,10 @@ "tests.fixtures.services", "tests.fixtures.time", "tests.fixtures.tls_server", + "tests.fixtures.webserver", ] + +# Make sure if we are using pyinstrument to profile tests, that +# freezegun doesn't interfere with it. +# See: https://github.com/spulec/freezegun#ignore-packages +fg_configure(extend_ignore_list=["pyinstrument"]) diff --git a/tests/core/files/opds/content_server_mini.opds b/tests/core/files/opds/content_server_mini.opds index 10bc4ad2e..bf224cec6 100644 --- a/tests/core/files/opds/content_server_mini.opds +++ b/tests/core/files/opds/content_server_mini.opds @@ -1,10 +1,10 @@ - + http://localhost:5000/ Open-Access Content 2015-01-02T16:56:40Z - + urn:librarysimplified.org/terms/id/Gutenberg%20ID/10441 The Green Mouse @@ -31,7 +31,8 @@ en Project Gutenberg - + + true @@ -53,6 +54,7 @@ en Project Gutenberg + true diff --git a/tests/core/files/opds2/feed.json b/tests/core/files/opds2/feed.json index 918afa4fd..5cbc4d5a4 100644 --- a/tests/core/files/opds2/feed.json +++ b/tests/core/files/opds2/feed.json @@ -12,12 +12,13 @@ "publications": [ { "metadata": { - "@type": "http://schema.org/Book", + "@type": "http://schema.org/Audiobook", "title": "Moby-Dick", "author": "Herman Melville", "identifier": "urn:isbn:978-3-16-148410-0", "duration": 100.2, "language": "en", + "http://palaceproject.io/terms/timeTracking": true, "publisher": { "name": "Test Publisher" }, @@ -36,12 +37,12 @@ { "rel": "self", "href": "http://example.org/publication.json", - "type": "application/opds-publication+json" + "type": "application/audiobook+json" }, { "rel": "http://opds-spec.org/acquisition/open-access", "href": "http://example.org/moby-dick.epub", - "type": "application/epub+zip" + "type": "application/audiobook+json" } ], "images": [ @@ -68,6 +69,7 @@ "@type": "http://schema.org/Book", "title": "Adventures of Huckleberry Finn", "description": "Adventures of Huckleberry Finn is a novel by Mark Twain, first published in the United Kingdom in December 1884 and in the United States in February 1885.", + "http://palaceproject.io/terms/timeTracking": true, "author": [ { "name": "Mark Twain" diff --git a/tests/core/integration/test_settings.py b/tests/core/integration/test_settings.py index f7b858918..fb2221738 100644 --- a/tests/core/integration/test_settings.py +++ b/tests/core/integration/test_settings.py @@ -1,6 +1,5 @@ import dataclasses import logging -from typing import Optional from unittest.mock import MagicMock import pytest @@ -33,7 +32,7 @@ def custom_validator(cls, v): raise SettingsValidationError(mock_problem_detail) return v - test: Optional[str] = FormField( + test: str | None = FormField( "test", form=ConfigurationFormItem(label="Test", description="Test description"), ) @@ -160,6 +159,13 @@ def test_settings_extra_args(mock_settings, caplog): assert len(caplog.records) == 1 assert "Unexpected extra argument 'extra' for model MockSettings" in caplog.text + # Exclude extra defaults to False, but we call it explicitly here + # to make sure it can be explicitly set to False. + assert settings.dict(exclude_extra=False) == {"number": 1, "extra": "extra"} + + # The extra args will be ignored if we call dict with exclude_extra=True + assert settings.dict(exclude_extra=True) == {"number": 1} + def test_settings_logger(mock_settings): log = mock_settings.logger() diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index b7bd95e8a..fdc182ec1 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -2,7 +2,6 @@ import re from datetime import datetime, timedelta -from typing import List from unittest.mock import MagicMock, call, patch import pytest @@ -11,13 +10,14 @@ from api.model.time_tracking import PlaytimeTimeEntry from core.config import Configuration +from core.equivalents_coverage import EquivalentIdentifiersCoverageProvider from core.jobs.playtime_entries import ( PlaytimeEntriesEmailReportsScript, PlaytimeEntriesSummationScript, ) from core.model import create from core.model.collection import Collection -from core.model.identifier import Identifier +from core.model.identifier import Equivalency, Identifier from core.model.library import Library from core.model.time_tracking import PlaytimeEntry, PlaytimeSummary from core.util.datetime_helpers import datetime_utc, previous_months, utc_now @@ -30,7 +30,7 @@ def create_playtime_entries( collection: Collection, library: Library, *entries: PlaytimeTimeEntry, -) -> List[PlaytimeEntry]: +) -> list[PlaytimeEntry]: all_inserted = [] for entry in entries: inserted = PlaytimeEntry( @@ -200,8 +200,8 @@ def test_reap_processed_entries(self, db: DatabaseTransactionFixture): ) == [("4",), ("5",)] -def date3m(days): - return previous_months(number_of_months=3)[0] + timedelta(days=days) +def date1m(days): + return previous_months(number_of_months=1)[0] + timedelta(days=days) def playtime(session, identifier, collection, library, timestamp, total_seconds): @@ -229,60 +229,132 @@ def test_do_run(self, db: DatabaseTransactionFixture): collection2 = db.collection() library2 = db.library() - playtime(db.session, identifier, collection, library, date3m(3), 1) - playtime(db.session, identifier, collection, library, date3m(31), 2) + isbn_ids: dict[str, Identifier] = { + "i1": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="080442957X" + ), + "i2": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="9788175257665" + ), + } + identifier.equivalencies = [ + Equivalency( + input_id=identifier.id, output_id=isbn_ids["i1"].id, strength=0.5 + ), + Equivalency( + input_id=isbn_ids["i1"].id, output_id=isbn_ids["i2"].id, strength=1 + ), + ] + strongest_isbn = isbn_ids["i2"].identifier + no_isbn = "" + + # We're using the RecursiveEquivalencyCache, so must refresh it. + EquivalentIdentifiersCoverageProvider(db.session).run() + + playtime(db.session, identifier, collection, library, date1m(3), 1) + playtime(db.session, identifier, collection, library, date1m(31), 2) playtime( - db.session, identifier, collection, library, date3m(-31), 60 - ) # out of range: more than a month prior to the quarter + db.session, identifier, collection, library, date1m(-31), 60 + ) # out of range: prior to the beginning of the default reporting period playtime( - db.session, identifier, collection, library, date3m(95), 60 + db.session, identifier, collection, library, date1m(95), 60 ) # out of range: future - playtime(db.session, identifier2, collection, library, date3m(3), 5) - playtime(db.session, identifier2, collection, library, date3m(4), 6) + playtime(db.session, identifier2, collection, library, date1m(3), 5) + playtime(db.session, identifier2, collection, library, date1m(4), 6) # Collection2 - playtime(db.session, identifier, collection2, library, date3m(3), 100) + playtime(db.session, identifier, collection2, library, date1m(3), 100) # library2 - playtime(db.session, identifier, collection, library2, date3m(3), 200) + playtime(db.session, identifier, collection, library2, date1m(3), 200) # collection2 library2 - playtime(db.session, identifier, collection2, library2, date3m(3), 300) + playtime(db.session, identifier, collection2, library2, date1m(3), 300) reporting_name = "test cm" - # Horrible unbracketted syntax for python 3.8 - with patch("core.jobs.playtime_entries.csv.writer") as writer, patch( - "core.jobs.playtime_entries.EmailManager" - ) as email, patch( - "core.jobs.playtime_entries.os.environ", - new={ - Configuration.REPORTING_EMAIL_ENVIRONMENT_VARIABLE: "reporting@test.email", - Configuration.REPORTING_NAME_ENVIRONMENT_VARIABLE: reporting_name, - }, + with ( + patch("core.jobs.playtime_entries.csv.writer") as writer, + patch("core.jobs.playtime_entries.EmailManager") as email, + patch( + "core.jobs.playtime_entries.os.environ", + new={ + Configuration.REPORTING_EMAIL_ENVIRONMENT_VARIABLE: "reporting@test.email", + Configuration.REPORTING_NAME_ENVIRONMENT_VARIABLE: reporting_name, + }, + ), ): + # Act PlaytimeEntriesEmailReportsScript(db.session).run() + # Assert assert ( writer().writerow.call_count == 6 ) # 1 header, 5 identifier,collection,library entries - cutoff = date3m(0).replace(day=1) + cutoff = date1m(0).replace(day=1) until = utc_now().date().replace(day=1) column1 = f"{cutoff} - {until}" call_args = writer().writerow.call_args_list assert call_args == [ call( - ["date", "urn", "collection", "library", "title", "total seconds"] + [ + "date", + "urn", + "isbn", + "collection", + "library", + "title", + "total seconds", + ] ), # Header - call((column1, identifier.urn, collection2.name, library2.name, None, 300)), - call((column1, identifier.urn, collection2.name, library.name, None, 100)), - call((column1, identifier.urn, collection.name, library2.name, None, 200)), call( - (column1, identifier.urn, collection.name, library.name, None, 3) + ( + column1, + identifier.urn, + strongest_isbn, + collection2.name, + library2.name, + None, + 300, + ) + ), + call( + ( + column1, + identifier.urn, + strongest_isbn, + collection2.name, + library.name, + None, + 100, + ) + ), + call( + ( + column1, + identifier.urn, + strongest_isbn, + collection.name, + library2.name, + None, + 200, + ) + ), + call( + ( + column1, + identifier.urn, + strongest_isbn, + collection.name, + library.name, + None, + 1, + ) ), # Identifier without edition call( ( column1, identifier2.urn, + no_isbn, collection.name, library.name, edition.title, @@ -305,7 +377,7 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): identifier = db.identifier() collection = db.default_collection() library = db.default_library() - entry = playtime(db.session, identifier, collection, library, date3m(20), 1) + _ = playtime(db.session, identifier, collection, library, date1m(20), 1) with patch("core.jobs.playtime_entries.os.environ", new={}): script = PlaytimeEntriesEmailReportsScript(db.session) @@ -314,7 +386,86 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): assert script._log.error.call_count == 1 assert script._log.warning.call_count == 1 - assert "date,urn,collection," in script._log.warning.call_args[0][0] + assert "date,urn,isbn,collection," in script._log.warning.call_args[0][0] + + @pytest.mark.parametrize( + "id_key, equivalents, default_value, expected_isbn", + [ + # If the identifier is an ISBN, we will not use an equivalency. + [ + "i1", + (("g1", "g2", 1), ("g2", "i1", 1), ("g1", "i2", 0.5)), + "", + "080442957X", + ], + [ + "i2", + (("g1", "g2", 1), ("g2", "i1", 0.5), ("g1", "i2", 1)), + "", + "9788175257665", + ], + ["i1", (("i1", "i2", 200),), "", "080442957X"], + ["i2", (("i2", "i1", 200),), "", "9788175257665"], + # If identifier is not an ISBN, but has an equivalency that is, use the strongest match. + [ + "g2", + (("g1", "g2", 1), ("g2", "i1", 1), ("g1", "i2", 0.5)), + "", + "080442957X", + ], + [ + "g2", + (("g1", "g2", 1), ("g2", "i1", 0.5), ("g1", "i2", 1)), + "", + "9788175257665", + ], + # If we don't find an equivalent ISBN identifier, then we'll use the default. + ["g2", (), "default value", "default value"], + ["g1", (("g1", "g2", 1),), "default value", "default value"], + # If identifier is None, expect default value. + [None, (), "default value", "default value"], + ], + ) + def test__isbn_for_identifier( + self, + db: DatabaseTransactionFixture, + id_key: str | None, + equivalents: tuple[tuple[str, str, int | float]], + default_value: str, + expected_isbn: str, + ): + ids: dict[str, Identifier] = { + "i1": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="080442957X" + ), + "i2": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="9788175257665" + ), + "g1": db.identifier(identifier_type=Identifier.GUTENBERG_ID), + "g2": db.identifier(identifier_type=Identifier.GUTENBERG_ID), + } + equivalencies = [ + Equivalency( + input_id=ids[equivalent[0]].id, + output_id=ids[equivalent[1]].id, + strength=equivalent[2], + ) + for equivalent in equivalents + ] + test_identifier: Identifier | None = ids[id_key] if id_key is not None else None + if test_identifier is not None: + test_identifier.equivalencies = equivalencies + + # We're using the RecursiveEquivalencyCache, so must refresh it. + EquivalentIdentifiersCoverageProvider(db.session).run() + + # Act + result = PlaytimeEntriesEmailReportsScript._isbn_for_identifier( + test_identifier, + default_value=default_value, + ) + # Assert + assert result == expected_isbn @pytest.mark.parametrize( "current_utc_time, start_arg, expected_start, until_arg, expected_until", @@ -323,14 +474,14 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): [ datetime(2020, 1, 1, 0, 0, 0), None, - datetime_utc(2019, 10, 1, 0, 0, 0), + datetime_utc(2019, 12, 1, 0, 0, 0), None, datetime_utc(2020, 1, 1, 0, 0, 0), ], [ datetime(2020, 1, 31, 0, 0, 0), None, - datetime_utc(2019, 10, 1, 0, 0, 0), + datetime_utc(2019, 12, 1, 0, 0, 0), None, datetime_utc(2020, 1, 1, 0, 0, 0), ], @@ -346,9 +497,9 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): [ datetime(2020, 1, 31, 0, 0, 0), None, - datetime_utc(2019, 10, 1, 0, 0, 0), - "2019-11-20", - datetime_utc(2019, 11, 20, 0, 0, 0), + datetime_utc(2019, 12, 1, 0, 0, 0), + "2019-12-20", + datetime_utc(2019, 12, 20, 0, 0, 0), ], # When both dates are specified, the current datetime doesn't matter. # Both dates specified, but we test at a specific time here anyway. @@ -413,7 +564,7 @@ def test_parse_command_line( [ datetime(2020, 1, 31, 0, 0, 0), None, - datetime_utc(2019, 10, 1, 0, 0, 0), + datetime_utc(2019, 12, 1, 0, 0, 0), "2019-06-11", datetime_utc(2019, 6, 11, 0, 0, 0), ], diff --git a/tests/core/mock.py b/tests/core/mock.py index 050beb470..ced72b17c 100644 --- a/tests/core/mock.py +++ b/tests/core/mock.py @@ -1,6 +1,5 @@ import json import logging -from typing import Optional, Union from core.coverage import ( BibliographicCoverageProvider, @@ -71,7 +70,7 @@ def __getattr__(self, item): class MockCoverageProvider: """Mixin class for mock CoverageProviders that defines common constants.""" - SERVICE_NAME: Optional[str] = "Generic mock CoverageProvider" + SERVICE_NAME: str | None = "Generic mock CoverageProvider" # Whenever a CoverageRecord is created, the data_source of that # record will be Project Gutenberg. @@ -79,11 +78,11 @@ class MockCoverageProvider: # For testing purposes, this CoverageProvider will try to cover # every identifier in the database. - INPUT_IDENTIFIER_TYPES: Union[None, str, object] = None + INPUT_IDENTIFIER_TYPES: None | str | object = None # This CoverageProvider can work with any Collection that supports # the OPDS import protocol (e.g. DatabaseTest._default_collection). - PROTOCOL: Optional[str] = ExternalIntegration.OPDS_IMPORT + PROTOCOL: str | None = ExternalIntegration.OPDS_IMPORT class InstrumentedCoverageProvider(MockCoverageProvider, IdentifierCoverageProvider): diff --git a/tests/core/models/test_before_flush_decorator.py b/tests/core/models/test_before_flush_decorator.py index 19c276dbc..a358931e5 100644 --- a/tests/core/models/test_before_flush_decorator.py +++ b/tests/core/models/test_before_flush_decorator.py @@ -1,4 +1,4 @@ -from typing import Callable, List, Optional +from collections.abc import Callable from unittest.mock import MagicMock, PropertyMock, call import pytest @@ -17,9 +17,9 @@ def before_flush_decorator() -> BeforeFlushListener: @pytest.fixture def create_session() -> Callable[..., Session]: def create( - new: Optional[List[Base]] = None, - deleted: Optional[List[Base]] = None, - dirty: Optional[List[Base]] = None, + new: list[Base] | None = None, + deleted: list[Base] | None = None, + dirty: list[Base] | None = None, ) -> Session: new = new or [] deleted = deleted or [] diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index fb8810b9b..a33602ab9 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -1,9 +1,13 @@ import json +from unittest.mock import MagicMock import pytest +from sqlalchemy import select from core.config import Configuration -from core.model import create, get_one_or_create +from core.external_search import ExternalSearchIndex +from core.integration.goals import Goals +from core.model import get_one_or_create from core.model.circulationevent import CirculationEvent from core.model.collection import Collection from core.model.configuration import ConfigurationSetting, ExternalIntegration @@ -11,10 +15,7 @@ from core.model.customlist import CustomList from core.model.datasource import DataSource from core.model.edition import Edition -from core.model.integration import ( - IntegrationConfiguration, - IntegrationLibraryConfiguration, -) +from core.model.integration import IntegrationLibraryConfiguration from core.model.licensing import Hold, License, LicensePool, Loan from core.model.work import Work from tests.fixtures.database import DatabaseTransactionFixture @@ -48,10 +49,7 @@ def example_collection_fixture( class TestCollection: - def test_by_name_and_protocol( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture + def test_by_name_and_protocol(self, db: DatabaseTransactionFixture): name = "A name" protocol = ExternalIntegration.OVERDRIVE key = (name, protocol) @@ -91,7 +89,22 @@ def test_by_name_and_protocol( Collection.by_name_and_protocol( db.session, name, ExternalIntegration.BIBLIOTHECA ) - assert 'Collection "A name" does not use protocol "Bibliotheca".' in str( + assert 'Integration "A name" does not use protocol "Bibliotheca".' in str( + excinfo.value + ) + + # You'll get an exception if you look up an existing integration + # but the goal doesn't match. + db.integration_configuration( + protocol=protocol, goal=Goals.DISCOVERY_GOAL, name="another name" + ) + + with pytest.raises(ValueError) as excinfo: + Collection.by_name_and_protocol( + db.session, "another name", ExternalIntegration.OVERDRIVE + ) + + assert 'Integration "another name" does not have goal "LICENSE_GOAL".' in str( excinfo.value ) @@ -119,25 +132,6 @@ def test_by_protocol(self, example_collection_fixture: ExampleCollectionFixture) c1.marked_for_deletion = True assert [test_collection] == Collection.by_protocol(db.session, overdrive).all() - def test_by_datasource(self, example_collection_fixture: ExampleCollectionFixture): - """Collections can be found by their associated DataSource""" - db = example_collection_fixture.database_fixture - c1 = db.collection(data_source_name=DataSource.GUTENBERG) - c2 = db.collection(data_source_name=DataSource.OVERDRIVE) - - # Using the DataSource name - assert {c1} == set( - Collection.by_datasource(db.session, DataSource.GUTENBERG).all() - ) - - # Using the DataSource itself - overdrive = DataSource.lookup(db.session, DataSource.OVERDRIVE) - assert {c2} == set(Collection.by_datasource(db.session, overdrive).all()) - - # A collection marked for deletion is filtered out. - c2.marked_for_deletion = True - assert 0 == Collection.by_datasource(db.session, overdrive).count() - def test_parents(self, example_collection_fixture: ExampleCollectionFixture): db = example_collection_fixture.database_fixture @@ -153,39 +147,6 @@ def test_parents(self, example_collection_fixture: ExampleCollectionFixture): c3.parent_id = c2.id assert [c2, c1] == list(c3.parents) - def test_create_external_integration( - self, example_collection_fixture: ExampleCollectionFixture - ): - # A newly created Collection has no associated ExternalIntegration. - db = example_collection_fixture.database_fixture - collection, ignore = get_one_or_create( - db.session, Collection, name=db.fresh_str() - ) - assert None == collection.external_integration_id - with pytest.raises(ValueError) as excinfo: - getattr(collection, "external_integration") - assert "No known external integration for collection" in str(excinfo.value) - - # We can create one with create_external_integration(). - overdrive = ExternalIntegration.OVERDRIVE - integration = collection.create_external_integration(protocol=overdrive) - assert integration.id == collection.external_integration_id - assert overdrive == integration.protocol - - # If we call create_external_integration() again we get the same - # ExternalIntegration as before. - integration2 = collection.create_external_integration(protocol=overdrive) - assert integration == integration2 - - # If we try to initialize an ExternalIntegration with a different - # protocol, we get an error. - with pytest.raises(ValueError) as excinfo: - collection.create_external_integration(protocol="blah") - assert ( - "Located ExternalIntegration, but its protocol (Overdrive) does not match desired protocol (blah)." - in str(excinfo.value) - ) - def test_get_protocol(self, db: DatabaseTransactionFixture): test_collection = db.collection() integration = test_collection.integration_configuration @@ -284,9 +245,7 @@ def test_default_loan_period( test_collection = example_collection_fixture.collection library = db.default_library() - library.collections.append(test_collection) - assert isinstance(library.id, int) - test_collection.integration_configuration.for_library(library.id, create=True) + test_collection.libraries.append(library) ebook = Edition.BOOK_MEDIUM audio = Edition.AUDIO_MEDIUM @@ -372,19 +331,20 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): about a Collection. """ db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection library = db.default_library() library.name = "The only library" library.short_name = "only one" - library.collections.append(test_collection) - test_collection.external_account_id = "id" + test_collection = example_collection_fixture.collection + test_collection.libraries.append(library) + test_collection.integration_configuration.settings_dict = { "url": "url", "username": "username", "password": "password", "setting": "value", + "external_account_id": "id", } data = test_collection.explain() @@ -392,7 +352,7 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): 'Name: "test collection"', 'Protocol: "Overdrive"', 'Used by library: "only one"', - 'External account ID: "id"', + 'Setting "external_account_id": "id"', 'Setting "setting": "value"', 'Setting "url": "url"', 'Setting "username": "username"', @@ -403,20 +363,21 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): # If the collection is the child of another collection, # its parent is mentioned. - child = Collection(name="Child", external_account_id="id2") + child = db.collection( + name="Child", + external_account_id="id2", + protocol=ExternalIntegration.OVERDRIVE, + ) child.parent = test_collection - - child.create_external_integration(protocol=ExternalIntegration.OVERDRIVE) - child.create_integration_configuration(protocol=ExternalIntegration.OVERDRIVE) data = child.explain() assert [ 'Name: "Child"', "Parent: test collection", 'Protocol: "Overdrive"', - 'External account ID: "id2"', + 'Setting "external_account_id": "id2"', ] == data - def test_disassociate_library( + def test_disassociate_libraries( self, example_collection_fixture: ExampleCollectionFixture ): db = example_collection_fixture.database_fixture @@ -428,60 +389,51 @@ def test_disassociate_library( other_library = db.library() collection.libraries.append(other_library) - # It has an ExternalIntegration, which has some settings. + # It has an integration, which has some settings. integration = collection.integration_configuration - DatabaseTransactionFixture.set_settings( - integration, **{"integration setting": "value2"} - ) - setting2 = integration.for_library(db.default_library().id) - DatabaseTransactionFixture.set_settings( - setting2, **{"default_library+integration setting": "value2"} - ) - setting3 = integration.for_library(other_library.id, create=True) - DatabaseTransactionFixture.set_settings( - setting3, **{"other_library+integration setting": "value3"} - ) + integration.settings_dict = {"key": "value"} + + # And it has some library-specific settings. + default_library_settings = integration.for_library(db.default_library()) + assert default_library_settings is not None + default_library_settings.settings_dict = {"a": "b"} + other_library_settings = integration.for_library(other_library) + assert other_library_settings is not None + other_library_settings.settings_dict = {"c": "d"} # Now, disassociate one of the libraries from the collection. - collection.disassociate_library(db.default_library()) + collection.libraries.remove(db.default_library()) # It's gone. assert db.default_library() not in collection.libraries assert collection not in db.default_library().collections - # Furthermore, ConfigurationSettings that configure that - # Library's relationship to this Collection's - # ExternalIntegration have been deleted. - all_settings = db.session.query(IntegrationConfiguration).all() - all_library_settings = db.session.query(IntegrationLibraryConfiguration).all() - assert setting2 not in all_library_settings + # The library-specific settings for that library have been deleted. + library_config_ids = [ + l.library_id + for l in db.session.execute( + select(IntegrationLibraryConfiguration.library_id) + ) + ] + assert db.default_library().id not in library_config_ids - # The other library is unaffected. + # But the library-specific settings for the other library are still there. assert other_library in collection.libraries - assert collection in other_library.collections - assert setting3 in all_library_settings + assert other_library.id in library_config_ids + assert collection.integration_configuration.library_configurations[ + 0 + ].settings_dict == {"c": "d"} - # As is the library-independent configuration of this Collection's - # ExternalIntegration. - assert integration in all_settings + # We now disassociate all libraries from the collection. + collection.libraries.clear() - # Calling disassociate_library again is a no-op. - collection.disassociate_library(db.default_library()) - assert db.default_library() not in collection.libraries + # All the library-specific settings have been deleted. + assert collection.integration_configuration.library_configurations == [] + assert collection.integration_configuration.libraries == [] + assert collection.libraries == [] - # If you somehow manage to call disassociate_library on a Collection - # that has no associated ExternalIntegration, an exception is raised. - collection.integration_configuration_id = None - with pytest.raises(ValueError) as excinfo: - collection.disassociate_library(other_library) - assert "No known integration library configuration for collection" in str( - excinfo.value - ) - - collection.external_integration_id = None - with pytest.raises(ValueError) as excinfo: - collection.disassociate_library(other_library) - assert "No known external integration for collection" in str(excinfo.value) + # The integration settings are still there. + assert collection.integration_configuration.settings_dict == {"key": "value"} def test_custom_lists(self, example_collection_fixture: ExampleCollectionFixture): db = example_collection_fixture.database_fixture @@ -611,17 +563,6 @@ def test_delete(self, example_collection_fixture: ExampleCollectionFixture): # It's associated with a library. assert db.default_library() in collection.libraries - # It has an ExternalIntegration, which has some settings. - integration = collection.external_integration - setting1 = integration.set_setting("integration setting", "value2") - setting2 = ConfigurationSetting.for_library_and_externalintegration( - db.session, - "library+integration setting", - db.default_library(), - integration, - ) - setting2.value = "value2" - # It's got a Work that has a LicensePool, which has a License, # which has a loan. work = db.work(with_license_pool=True) @@ -659,13 +600,7 @@ def test_delete(self, example_collection_fixture: ExampleCollectionFixture): # Finally, here's a mock ExternalSearchIndex so we can track when # Works are removed from the search index. - class MockExternalSearchIndex: - removed = [] - - def remove_work(self, work): - self.removed.append(work) - - index = MockExternalSearchIndex() + index = MagicMock(spec=ExternalSearchIndex) # delete() will not work on a collection that's not marked for # deletion. @@ -711,16 +646,7 @@ def remove_work(self, work): # Our search index was told to remove the first work (which no longer # has any LicensePools), but not the second. - assert [work] == index.removed - - # The collection ExternalIntegration and its settings have been deleted. - # The storage ExternalIntegration remains. - external_integrations = db.session.query(ExternalIntegration).all() - assert integration not in external_integrations - - settings = db.session.query(ConfigurationSetting).all() - for setting in (setting1, setting2): - assert setting not in settings + index.remove_work.assert_called_once_with(work) # If no search_index is passed into delete() (the default behavior), # we try to instantiate the normal ExternalSearchIndex object. Since @@ -733,22 +659,3 @@ def remove_work(self, work): # We've now deleted every LicensePool created for this test. assert 0 == db.session.query(LicensePool).count() assert [] == work2.license_pools - - -class TestCollectionForMetadataWrangler: - """Tests that requirements to the metadata wrangler's use of Collection - are being met by continued development on the Collection class. - - If any of these tests are failing, development will be required on the - metadata wrangler to meet the needs of the new Collection class. - """ - - def test_only_name_is_required( - self, example_collection_fixture: ExampleCollectionFixture - ): - """Test that only name is a required field on - the Collection class. - """ - db = example_collection_fixture.database_fixture - collection = create(db.session, Collection, name="banana")[0] - assert True == isinstance(collection, Collection) diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 351ede607..da04cfe46 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -604,16 +604,3 @@ def test_explain( # If we pass in True for include_secrets, we see the passwords. with_secrets = integration.explain(include_secrets=True) assert "password='somepass'" in with_secrets - - def test_custom_accept_header( - self, example_externalintegration_fixture: ExampleExternalIntegrationFixture - ): - db = example_externalintegration_fixture.database_fixture - - integration = db.external_integration("protocol", "goal") - # Must be empty if not set - assert integration.custom_accept_header == None - - # Must be the same value if set - integration.custom_accept_header = "custom header" - assert integration.custom_accept_header == "custom header" diff --git a/tests/core/models/test_coverage.py b/tests/core/models/test_coverage.py index 2c1803c5f..c395594f0 100644 --- a/tests/core/models/test_coverage.py +++ b/tests/core/models/test_coverage.py @@ -1,5 +1,4 @@ import datetime -from typing import List import pytest @@ -587,8 +586,8 @@ def relevant_records(work): class ExampleEquivalencyCoverageRecordFixture: - identifiers: List[Identifier] - equivalencies: List[Equivalency] + identifiers: list[Identifier] + equivalencies: list[Equivalency] transaction: DatabaseTransactionFixture def __init__(self, transaction: DatabaseTransactionFixture): diff --git a/tests/core/models/test_discovery_service_registration.py b/tests/core/models/test_discovery_service_registration.py index f953d01ac..6b60d8eb3 100644 --- a/tests/core/models/test_discovery_service_registration.py +++ b/tests/core/models/test_discovery_service_registration.py @@ -1,5 +1,3 @@ -from typing import Optional - import pytest from sqlalchemy import select @@ -20,8 +18,8 @@ class RegistrationFixture: def __call__( self, - library: Optional[Library] = None, - integration: Optional[IntegrationConfiguration] = None, + library: Library | None = None, + integration: IntegrationConfiguration | None = None, ) -> DiscoveryServiceRegistration: library = library or self.library_fixture.library() integration = integration or self.integration_fixture( diff --git a/tests/core/models/test_integration_configuration.py b/tests/core/models/test_integration_configuration.py index 251487423..4618159a3 100644 --- a/tests/core/models/test_integration_configuration.py +++ b/tests/core/models/test_integration_configuration.py @@ -1,5 +1,7 @@ +from unittest.mock import MagicMock + from core.integration.goals import Goals -from core.model import create +from core.model import Library, create from core.model.integration import IntegrationConfiguration from tests.fixtures.database import DatabaseTransactionFixture @@ -19,16 +21,18 @@ def test_for_library(seslf, db: DatabaseTransactionFixture): # No library ID provided assert config.for_library(None) is None + # Library has no ID + mock_library = MagicMock(spec=Library) + mock_library.id = None + assert config.for_library(mock_library) is None + # No library config exists assert config.for_library(library.id) is None - # This should create a new config - libconfig = config.for_library(library.id, create=True) - assert libconfig is not None - assert libconfig.library == library - assert libconfig.parent == config - assert libconfig.settings_dict == {} + config.libraries.append(library) + + # Library config exists + libconfig = config.for_library(library.id) - # The same config is returned henceforth - assert config.for_library(library.id) == libconfig - assert config.for_library(library.id, create=True) == libconfig + # The same config is returned for the same library + assert config.for_library(library) is libconfig diff --git a/tests/core/models/test_library.py b/tests/core/models/test_library.py index 49b98e6cc..73b16d261 100644 --- a/tests/core/models/test_library.py +++ b/tests/core/models/test_library.py @@ -134,7 +134,7 @@ def test_estimated_holdings_by_language(self, db: DatabaseTransactionFixture): # If we remove the default collection from the default library, # it loses all its works. - db.default_library().collections = [] + db.default_collection().libraries = [] estimate = library.estimated_holdings_by_language(include_open_access=False) assert dict() == estimate diff --git a/tests/core/models/test_licensing.py b/tests/core/models/test_licensing.py index be80c6d4d..0dbb99b00 100644 --- a/tests/core/models/test_licensing.py +++ b/tests/core/models/test_licensing.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Callable, Optional +from collections.abc import Callable from unittest.mock import MagicMock, PropertyMock import pytest @@ -481,32 +481,32 @@ def test_best_available_license(self, licenses: TestLicenseFixture): # First, we use the time-limited license that's expiring first. assert time_limited_2 == licenses.pool.best_available_license() - time_limited_2.loan_to(licenses.db.patron()) + time_limited_2.checkout() # When that's not available, we use the next time-limited license. assert licenses.time_limited == licenses.pool.best_available_license() - licenses.time_limited.loan_to(licenses.db.patron()) + licenses.time_limited.checkout() # The time-and-loan-limited license also counts as time-limited for this. assert licenses.time_and_loan_limited == licenses.pool.best_available_license() - licenses.time_and_loan_limited.loan_to(licenses.db.patron()) + licenses.time_and_loan_limited.checkout() # Next is the perpetual license. assert licenses.perpetual == licenses.pool.best_available_license() - licenses.perpetual.loan_to(licenses.db.patron()) + licenses.perpetual.checkout() # Then the loan-limited license with the most remaining checkouts. assert licenses.loan_limited == licenses.pool.best_available_license() - licenses.loan_limited.loan_to(licenses.db.patron()) + licenses.loan_limited.checkout() # That license allows 2 concurrent checkouts, so it's still the # best license until it's checked out again. assert licenses.loan_limited == licenses.pool.best_available_license() - licenses.loan_limited.loan_to(licenses.db.patron()) + licenses.loan_limited.checkout() # There's one more loan-limited license. assert loan_limited_2 == licenses.pool.best_available_license() - loan_limited_2.loan_to(licenses.db.patron()) + loan_limited_2.checkout() # Now all licenses are either loaned out or expired. assert None == licenses.pool.best_available_license() @@ -1537,10 +1537,10 @@ class TestFormatPriorities: @pytest.fixture def mock_delivery( self, - ) -> Callable[[Optional[str], Optional[str]], DeliveryMechanism]: + ) -> Callable[[str | None, str | None], DeliveryMechanism]: def delivery_mechanism( - drm_scheme: Optional[str] = None, - content_type: Optional[str] = "application/epub+zip", + drm_scheme: str | None = None, + content_type: str | None = "application/epub+zip", ) -> DeliveryMechanism: def _delivery_eq(self, other): return ( @@ -1564,10 +1564,10 @@ def _delivery_repr(self): @pytest.fixture def mock_mechanism( self, mock_delivery - ) -> Callable[[Optional[str], Optional[str]], LicensePoolDeliveryMechanism]: + ) -> Callable[[str | None, str | None], LicensePoolDeliveryMechanism]: def mechanism( - drm_scheme: Optional[str] = None, - content_type: Optional[str] = "application/epub+zip", + drm_scheme: str | None = None, + content_type: str | None = "application/epub+zip", ) -> LicensePoolDeliveryMechanism: def _mechanism_eq(self, other): return self.delivery_mechanism == other.delivery_mechanism diff --git a/tests/core/models/test_listeners.py b/tests/core/models/test_listeners.py index 508aa8704..975162177 100644 --- a/tests/core/models/test_listeners.py +++ b/tests/core/models/test_listeners.py @@ -1,5 +1,6 @@ import functools -from typing import Any, Iterable +from collections.abc import Iterable +from typing import Any import pytest @@ -189,35 +190,6 @@ def test_lane_change_updates_configuration( lane.add_genre("Science Fiction") data.mock.assert_was_called() - def test_configuration_relevant_collection_change_updates_configuration( - self, - example_site_configuration_changed_fixture: ExampleSiteConfigurationHasChangedFixture, - ): - """When you add a relevant item to a SQLAlchemy collection, such as - adding a Collection to library.collections, - site_configuration_has_changed is called. - """ - - data = example_site_configuration_changed_fixture - session = data.transaction.session - # Creating a collection calls the method via an 'after_insert' - # event on Collection. - library = data.transaction.default_library() - collection = data.transaction.collection() - session.commit() - data.mock.assert_was_called() - - # Adding the collection to the library calls the method via - # an 'append' event on Collection.libraries. - library.collections.append(collection) - session.commit() - data.mock.assert_was_called() - - # NOTE: test_work.py:TestWork.test_reindex_on_availability_change - # tests the circumstances under which a database change - # requires that a Work's entry in the search index be - # recreated. - def _set_property(object, value, property_name): setattr(object, property_name, value) diff --git a/tests/core/models/test_marcfile.py b/tests/core/models/test_marcfile.py new file mode 100644 index 000000000..7ee27cb2a --- /dev/null +++ b/tests/core/models/test_marcfile.py @@ -0,0 +1,42 @@ +from datetime import datetime + +import pytest +from sqlalchemy import select + +from core.model import MarcFile +from tests.fixtures.database import DatabaseTransactionFixture + + +@pytest.mark.parametrize( + "delete_library, delete_collection", + [ + (False, True), + (True, False), + (True, True), + ], +) +def test_delete_library_collection( + db: DatabaseTransactionFixture, delete_library: bool, delete_collection: bool +) -> None: + library = db.default_library() + collection = db.default_collection() + session = db.session + + file = MarcFile( + library=library, collection=collection, key="key", created=datetime.now() + ) + session.add(file) + session.commit() + + if delete_library: + session.delete(library) + if delete_collection: + session.delete(collection) + session.commit() + + marc_files = session.scalars(select(MarcFile)).all() + assert len(marc_files) == 1 + [marc_file] = marc_files + + assert marc_file.library is None if delete_library else library + assert marc_file.collection is None if delete_collection else collection diff --git a/tests/core/models/test_patron.py b/tests/core/models/test_patron.py index d6653725c..d2dcb69f1 100644 --- a/tests/core/models/test_patron.py +++ b/tests/core/models/test_patron.py @@ -428,19 +428,6 @@ def test_set_synchronize_annotations(self, db: DatabaseTransactionFixture): db.session.commit() assert 0 == len(p1.annotations) - # Patron #1 can no longer use Annotation.get_one_or_create. - pytest.raises( - ValueError, - Annotation.get_one_or_create, - db.session, - patron=p1, - identifier=identifier, - motivation=Annotation.IDLING, - ) - - # Patron #2's annotation is unaffected. - assert 1 == len(p2.annotations) - # But patron #2 can use Annotation.get_one_or_create. i2, is_new = Annotation.get_one_or_create( db.session, diff --git a/tests/core/models/test_work.py b/tests/core/models/test_work.py index 58c26d758..f2a12c271 100644 --- a/tests/core/models/test_work.py +++ b/tests/core/models/test_work.py @@ -264,7 +264,7 @@ def test_calculate_presentation( # The Work now has a complete set of WorkCoverageRecords # associated with it, reflecting all the operations that - # occured as part of calculate_presentation(). + # occurred as part of calculate_presentation(). # # All the work has actually been done, except for the work of # updating the search index, which has been registered and @@ -278,16 +278,15 @@ def test_calculate_presentation( (wcr.CLASSIFY_OPERATION, success), (wcr.SUMMARY_OPERATION, success), (wcr.QUALITY_OPERATION, success), - (wcr.GENERATE_MARC_OPERATION, success), (wcr.UPDATE_SEARCH_INDEX_OPERATION, wcr.REGISTERED), } assert expect == {(x.operation, x.status) for x in records} # Now mark the pool with the presentation edition as suppressed. # work.calculate_presentation() will call work.mark_licensepools_as_superceded(), - # which will mark the suppressed pool as superceded and take its edition out of the running. + # which will mark the suppressed pool as superseded and take its edition out of the running. # Make sure that work's presentation edition and work's author, etc. - # fields are updated accordingly, and that the superceded pool's edition + # fields are updated accordingly, and that the superseded pool's edition # knows it's no longer the champ. pool2.suppressed = True @@ -314,7 +313,7 @@ def test_calculate_presentation( # Updating availability also modified work.last_update_time. assert (utc_now() - work.last_update_time) < datetime.timedelta(seconds=2) - # make a staff (admin interface) edition. its fields should supercede all others below it + # make a staff (admin interface) edition. its fields should supersede all others below it # except when it has no contributors, and they do. pool2.suppressed = False @@ -333,7 +332,7 @@ def test_calculate_presentation( work.calculate_presentation(search_index_client=index) - # The title of the Work got superceded. + # The title of the Work got superseded. assert "The Staff Title" == work.title # The author of the Work is still the author of edition2 and was not clobbered. @@ -1002,7 +1001,7 @@ def test_to_search_document(self, db: DatabaseTransactionFixture): # for the same Work. collection1 = db.default_collection() collection2 = db.collection() - db.default_library().collections.append(collection2) + collection2.libraries.append(db.default_library()) pool2 = db.licensepool(edition=edition, collection=collection2) pool2.work_id = work.id pool2.licenses_available = 0 @@ -1634,13 +1633,6 @@ def test_for_unchecked_subjects(self, db: DatabaseTransactionFixture): classification2.subject.checked = True assert [] == qu.all() - def test_calculate_marc_record(self, db: DatabaseTransactionFixture): - work = db.work(with_license_pool=True) - - work.calculate_marc_record() - assert work.title in work.marc_record - assert "online resource" in work.marc_record - def test_active_licensepool_ignores_superceded_licensepools( self, db: DatabaseTransactionFixture ): @@ -1714,8 +1706,8 @@ def test_active_license_pool_accounts_for_library( l2 = db.library() c1 = db.collection() c2 = db.collection() - l1.collections = [c1] - l2.collections = [c2] + c1.libraries = [l1] + c2.libraries = [l2] work: Work = db.work(presentation_edition=db.edition()) lp1: LicensePool = db.licensepool( work.presentation_edition, diff --git a/tests/core/search/test_service.py b/tests/core/search/test_service.py index 53ece7748..4192e5199 100644 --- a/tests/core/search/test_service.py +++ b/tests/core/search/test_service.py @@ -1,4 +1,4 @@ -from typing import Iterable +from collections.abc import Iterable from core.search.document import LONG, SearchMappingDocument from core.search.revision import SearchSchemaRevision diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py index 8b62e3c69..1328aa707 100644 --- a/tests/core/service/storage/test_s3.py +++ b/tests/core/service/storage/test_s3.py @@ -1,8 +1,9 @@ from __future__ import annotations import functools +from collections.abc import Generator from io import BytesIO -from typing import TYPE_CHECKING, Generator, Optional +from typing import TYPE_CHECKING from unittest.mock import MagicMock import pytest @@ -99,6 +100,15 @@ def test_generate_url( url = service.generate_url(key) assert url == expected + def test_delete(self, s3_service_fixture: S3ServiceFixture): + """The S3Service.delete method deletes the object from the bucket.""" + service = s3_service_fixture.service() + service.client.delete_object = MagicMock() + service.delete("key") + service.client.delete_object.assert_called_once_with( + Bucket=s3_service_fixture.bucket, Key="key" + ) + @pytest.mark.parametrize( "content", ["foo bar baz", b"byte string"], @@ -200,9 +210,7 @@ def test_multipart_upload(self, s3_service_fixture: S3ServiceFixture): assert upload.exception is None s3_service_fixture.mock_s3_client.complete_multipart_upload.assert_called_once() - def test_multipart_upload_boto_exception( - self, s3_service_fixture: S3ServiceFixture - ): + def test_multipart_upload_exception(self, s3_service_fixture: S3ServiceFixture): service = s3_service_fixture.service() exception = BotoCoreError() s3_service_fixture.mock_s3_client.upload_part.side_effect = exception @@ -219,28 +227,6 @@ def test_multipart_upload_boto_exception( assert upload.exception is exception s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() - def test_multipart_upload_other_exception( - self, s3_service_fixture: S3ServiceFixture - ): - service = s3_service_fixture.service() - exception = ValueError("foo") - s3_service_fixture.mock_s3_client.upload_part.side_effect = exception - - # A non-boto exception is raised during upload, the upload is aborted - # and the exception is raised. - with pytest.raises(ValueError) as excinfo: - with service.multipart(key="key") as upload: - assert upload.complete is False - assert upload.url == "https://region.test.com/bucket/key" - assert upload.exception is None - upload.upload_part(b"test") - - assert upload.complete is False - assert upload.exception is exception - s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() - assert excinfo.value is exception - - # Calling upload_part after the upload is complete raises an error. with pytest.raises(RuntimeError): upload.upload_part(b"foo") @@ -312,6 +298,24 @@ def s3_service_integration_fixture() -> ( @pytest.mark.minio class TestS3ServiceIntegration: + def test_delete(self, s3_service_integration_fixture: S3ServiceIntegrationFixture): + """The S3Service.delete method deletes the object from the bucket.""" + service = s3_service_integration_fixture.public + bucket = service.bucket + + raw_client = s3_service_integration_fixture.s3_client + content = BytesIO() + content.write(b"foo bar baz") + raw_client.upload_fileobj(content, bucket, "key") + + bucket_contents = raw_client.list_objects(Bucket=bucket).get("Contents", []) + assert len(bucket_contents) == 1 + assert bucket_contents[0]["Key"] == "key" + + service.delete("key") + bucket_contents = raw_client.list_objects(Bucket=bucket).get("Contents", []) + assert len(bucket_contents) == 0 + @pytest.mark.parametrize( "key, bucket, content, content_type", [ @@ -328,7 +332,7 @@ def test_store( key: str, bucket: str, content: bytes | str, - content_type: Optional[str], + content_type: str | None, s3_service_integration_fixture: S3ServiceIntegrationFixture, ): """The S3Service.store method stores content in the bucket.""" @@ -368,7 +372,7 @@ def test_multipart( key: str, bucket: str, content: bytes, - content_type: Optional[str], + content_type: str | None, s3_service_integration_fixture: S3ServiceIntegrationFixture, ): service = getattr(s3_service_integration_fixture, bucket) diff --git a/tests/core/test_app_server.py b/tests/core/test_app_server.py index 9c336084e..1e1c79a83 100644 --- a/tests/core/test_app_server.py +++ b/tests/core/test_app_server.py @@ -1,8 +1,8 @@ import gzip import json +from collections.abc import Callable, Iterable from functools import partial from io import BytesIO -from typing import Callable, Iterable from unittest.mock import MagicMock, PropertyMock import flask diff --git a/tests/core/test_coverage.py b/tests/core/test_coverage.py index 9fb3d48e0..e96e69bcb 100644 --- a/tests/core/test_coverage.py +++ b/tests/core/test_coverage.py @@ -7,7 +7,6 @@ CoverageFailure, CoverageProviderProgress, IdentifierCoverageProvider, - MARCRecordWorkCoverageProvider, PresentationReadyWorkCoverageProvider, WorkClassificationCoverageProvider, WorkPresentationEditionCoverageProvider, @@ -2193,22 +2192,3 @@ def test_process_item(self, db: DatabaseTransactionFixture): policy.calculate_quality, ] ) - - -class TestMARCRecordWorkCoverageProvider: - def test_run(self, db: DatabaseTransactionFixture): - provider = MARCRecordWorkCoverageProvider(db.session) - work = db.work(with_license_pool=True) - work.marc_record = "old junk" - work.presentation_ready = False - - # The work is not presentation-ready, so nothing happens. - provider.run() - assert "old junk" == work.marc_record - - # The work is presentation-ready, so its MARC record is - # regenerated. - work.presentation_ready = True - provider.run() - assert work.title in work.marc_record - assert "online resource" in work.marc_record diff --git a/tests/core/test_equivalent_coverage.py b/tests/core/test_equivalent_coverage.py index 60452f346..119e05b74 100644 --- a/tests/core/test_equivalent_coverage.py +++ b/tests/core/test_equivalent_coverage.py @@ -1,5 +1,3 @@ -from typing import List - import pytest import sqlalchemy from sqlalchemy import or_ @@ -23,8 +21,8 @@ class EquivalentCoverageFixture: coverage_records: ExampleEquivalencyCoverageRecordFixture provider: EquivalentIdentifiersCoverageProvider transaction: DatabaseTransactionFixture - identifiers: List[Identifier] - equivalencies: List[Equivalency] + identifiers: list[Identifier] + equivalencies: list[Equivalency] @pytest.fixture() diff --git a/tests/core/test_external_search.py b/tests/core/test_external_search.py index 4dcc6e7b4..b92d1b17a 100644 --- a/tests/core/test_external_search.py +++ b/tests/core/test_external_search.py @@ -2,8 +2,8 @@ import re import time import uuid +from collections.abc import Callable, Collection from datetime import datetime -from typing import Callable, Collection, List from unittest.mock import MagicMock import pytest @@ -1326,7 +1326,7 @@ def _populate_works( # Each work has one LicensePool associated with the default # collection. result.collection1 = transaction.default_collection() - result.collection1.name = "Collection 1 - ACB" + result.collection1.integration_configuration.name = "Collection 1 - ACB" [result.a1] = result.a.license_pools [result.b1] = result.b.license_pools [result.c1] = result.c.license_pools @@ -1632,7 +1632,7 @@ class TestAuthorFilterData: sort_name: Contributor viaf: Contributor lc: Contributor - works: List[Work] + works: list[Work] literary_wonderlands: Work ubik: Work justice: Work @@ -3428,7 +3428,8 @@ def test_constructor(self, filter_fixture: FilterFixture): # If the library has no collections, the collection filter # will filter everything out. - transaction.default_library().collections = [] + transaction.default_collection().libraries = [] + assert transaction.default_library().collections == [] library_filter = Filter(collections=transaction.default_library()) assert [] == library_filter.collection_ids @@ -3678,7 +3679,7 @@ def scoring_functions(self, filter): # library. library2 = transaction.library() collection2 = transaction.collection() - library2.collections.append(collection2) + collection2.libraries.append(library2) for_other_library = WorkList() for_other_library.initialize(library2) for_default_library.append_child(for_other_library) @@ -5380,7 +5381,7 @@ class TestExternalSearchJSONQueryData: book_work: Work facets: SearchFacets filter: Filter - random_works: List[Work] + random_works: list[Work] class TestExternalSearchJSONQuery: diff --git a/tests/core/test_http.py b/tests/core/test_http.py index 88b0c785f..4a32f125c 100644 --- a/tests/core/test_http.py +++ b/tests/core/test_http.py @@ -1,57 +1,66 @@ -import logging +import functools +from collections.abc import Callable +from dataclasses import dataclass import pytest +import requests from core.util.http import HTTP, RequestNetworkException from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse +@dataclass +class TestHttpFixture: + server: MockAPIServer + request_with_timeout: Callable[..., requests.Response] + + @pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" +def test_http_fixture(mock_web_server: MockAPIServer): + # Make sure we don't wait for retries, as that will slow down the tests. + request_with_timeout = functools.partial( + HTTP.request_with_timeout, timeout=1, backoff_factor=0 + ) + return TestHttpFixture( + server=mock_web_server, request_with_timeout=request_with_timeout ) - _server.stop() class TestHTTP: - def test_retries_unspecified(self, mock_web_server: MockAPIServer): + def test_retries_unspecified(self, test_http_fixture: TestHttpFixture): for i in range(1, 7): response = MockAPIServerResponse() response.content = b"Ouch." response.status_code = 502 - mock_web_server.enqueue_response("GET", "/test", response) + test_http_fixture.server.enqueue_response("GET", "/test", response) with pytest.raises(RequestNetworkException): - HTTP.request_with_timeout("GET", mock_web_server.url("/test")) + test_http_fixture.request_with_timeout( + "GET", test_http_fixture.server.url("/test") + ) - assert len(mock_web_server.requests()) == 6 - request = mock_web_server.requests().pop() + assert len(test_http_fixture.server.requests()) == 6 + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - def test_retries_none(self, mock_web_server: MockAPIServer): + def test_retries_none(self, test_http_fixture: TestHttpFixture): response = MockAPIServerResponse() response.content = b"Ouch." response.status_code = 502 - mock_web_server.enqueue_response("GET", "/test", response) + test_http_fixture.server.enqueue_response("GET", "/test", response) with pytest.raises(RequestNetworkException): - HTTP.request_with_timeout( - "GET", mock_web_server.url("/test"), max_retry_count=0 + test_http_fixture.request_with_timeout( + "GET", test_http_fixture.server.url("/test"), max_retry_count=0 ) - assert len(mock_web_server.requests()) == 1 - request = mock_web_server.requests().pop() + assert len(test_http_fixture.server.requests()) == 1 + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - def test_retries_3(self, mock_web_server: MockAPIServer): + def test_retries_3(self, test_http_fixture: TestHttpFixture): response0 = MockAPIServerResponse() response0.content = b"Ouch." response0.status_code = 502 @@ -64,24 +73,24 @@ def test_retries_3(self, mock_web_server: MockAPIServer): response2.content = b"OK!" response2.status_code = 200 - mock_web_server.enqueue_response("GET", "/test", response0) - mock_web_server.enqueue_response("GET", "/test", response1) - mock_web_server.enqueue_response("GET", "/test", response2) + test_http_fixture.server.enqueue_response("GET", "/test", response0) + test_http_fixture.server.enqueue_response("GET", "/test", response1) + test_http_fixture.server.enqueue_response("GET", "/test", response2) - response = HTTP.request_with_timeout( - "GET", mock_web_server.url("/test"), max_retry_count=3 + response = test_http_fixture.request_with_timeout( + "GET", test_http_fixture.server.url("/test"), max_retry_count=3 ) assert response.status_code == 200 - assert len(mock_web_server.requests()) == 3 - request = mock_web_server.requests().pop() + assert len(test_http_fixture.server.requests()) == 3 + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - request = mock_web_server.requests().pop() + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - request = mock_web_server.requests().pop() + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 8b1b4fc1e..2676883d2 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -1,7 +1,6 @@ import datetime import logging import random -from typing import List, Tuple from unittest.mock import MagicMock, call import pytest @@ -44,7 +43,7 @@ tuple_to_numericrange, ) from core.model.collection import Collection -from core.model.configuration import ConfigurationSetting, ExternalIntegration +from core.model.configuration import ConfigurationAttributeValue, ExternalIntegration from core.problem_details import INVALID_INPUT from core.util.datetime_helpers import utc_now from core.util.opds_writer import OPDSFeed @@ -60,7 +59,7 @@ class MockFacetConfig: but you don't care which EntryPoints are configured. """ - entrypoints: List = [] + entrypoints: list = [] def test_items(self): ep = AudiobooksEntryPoint @@ -304,7 +303,7 @@ def _configure_facets(library, enabled, default): library._settings = None def test_facet_groups(self, db: DatabaseTransactionFixture): - db.default_collection().data_source = DataSource.AMAZON + db.default_collection().data_source = DataSource.AMAZON # type: ignore[assignment] facets = Facets( db.default_library(), Facets.COLLECTION_FULL, @@ -714,8 +713,8 @@ def test_from_request_gets_available_facets_through_hook_methods( # available_facets() and default_facets() methods. This gives # subclasses a chance to add extra facets or change defaults. class Mock(Facets): - available_facets_calls: List[Tuple] = [] - default_facet_calls: List[Tuple] = [] + available_facets_calls: list[tuple] = [] + default_facet_calls: list[tuple] = [] # For whatever reason, this faceting object allows only a # single setting for each facet group. @@ -2571,14 +2570,10 @@ def test_worklist_for_resultset_no_holds_allowed( w1.license_pools[0].licenses_available = 0 collection1: Collection = w1.license_pools[0].collection - cs1 = ConfigurationSetting( - library_id=db.default_library().id, - external_integration_id=collection1.external_integration_id, - key=ExternalIntegration.DISPLAY_RESERVES, - _value="no", - ) - db.session.add(cs1) - db.session.commit() + integration1 = collection1.integration_configuration + integration1.settings_dict = { + ExternalIntegration.DISPLAY_RESERVES: ConfigurationAttributeValue.NOVALUE.value + } class MockHit: def __init__(self, work_id, has_last_update=False): @@ -2612,7 +2607,7 @@ def __contains__(self, k): # Work1 now has 2 licensepools, one of which has availability alternate_collection = db.collection() - db.default_library().collections.append(alternate_collection) + alternate_collection.libraries.append(db.default_library()) alternate_w1_lp: LicensePool = db.licensepool( w1.presentation_edition, collection=alternate_collection ) @@ -2625,13 +2620,9 @@ def __contains__(self, k): assert [[w2], [w1]] == m(db.session, [[hit2], [hit1]]) # Now both collections are restricted and have no availability - cs2 = ConfigurationSetting( - library_id=db.default_library().id, - external_integration_id=alternate_collection.external_integration_id, - key=ExternalIntegration.DISPLAY_RESERVES, - _value="no", - ) - db.session.add(cs2) + alternate_collection.integration_configuration.settings_dict = { + ExternalIntegration.DISPLAY_RESERVES: ConfigurationAttributeValue.NOVALUE.value + } assert [[w2], []] == m(db.session, [[hit2], [hit1]]) # Both restricted but one has availability @@ -2863,14 +2854,17 @@ def test_works_from_database_end_to_end(self, db: DatabaseTransactionFixture): # A DatabaseBackedWorkList will only find books licensed # through one of its collections. + db.default_collection().libraries = [] collection = db.collection() - db.default_library().collections = [collection] + collection.libraries.append(db.default_library()) + assert db.default_library().collections == [collection] wl.initialize(db.default_library()) assert 0 == wl.works_from_database(db.session).count() # If a DatabaseBackedWorkList has no collections, it has no # books. - db.default_library().collections = [] + collection.libraries = [] + assert db.default_library().collections == [] wl.initialize(db.default_library()) assert 0 == wl.works_from_database(db.session).count() diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index cdb5fe9bc..fad3fee05 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -1,87 +1,214 @@ from __future__ import annotations import datetime +import functools +import logging +import urllib from typing import TYPE_CHECKING -from unittest.mock import MagicMock -from urllib.parse import quote +from unittest.mock import MagicMock, create_autospec, patch import pytest -from freezegun import freeze_time +from _pytest.logging import LogCaptureFixture from pymarc import MARCReader, Record -from core.config import CannotLoadConfiguration -from core.external_search import Filter -from core.lane import WorkList -from core.marc import Annotator, MARCExporter, MARCExporterFacets +from core.marc import Annotator, MARCExporter from core.model import ( - CachedMARCFile, Contributor, DataSource, DeliveryMechanism, Edition, - ExternalIntegration, Genre, Identifier, LicensePoolDeliveryMechanism, + MarcFile, Representation, RightsStatus, - Work, - get_one, ) from core.util.datetime_helpers import datetime_utc, utc_now -from tests.mocks.search import ExternalSearchIndexFake +from core.util.uuid import uuid_encode if TYPE_CHECKING: from tests.fixtures.database import DatabaseTransactionFixture - from tests.fixtures.s3 import S3ServiceFixture - from tests.fixtures.search import ExternalSearchFixtureFake + from tests.fixtures.s3 import MockS3Service, S3ServiceFixture + + +class AnnotateWorkRecordFixture: + def __init__(self): + self.cm_url = "http://cm.url" + self.short_name = "short_name" + self.web_client_urls = ["http://webclient.url"] + self.organization_name = "org" + self.include_summary = True + self.include_genres = True + + self.annotator = Annotator( + self.cm_url, + self.short_name, + self.web_client_urls, + self.organization_name, + self.include_summary, + self.include_genres, + ) + + self.revised = MagicMock() + self.work = MagicMock() + self.pool = MagicMock() + self.edition = MagicMock() + self.identifier = MagicMock() + + self.mock_leader = create_autospec(self.annotator.leader, return_value=" " * 24) + self.mock_add_control_fields = create_autospec( + self.annotator.add_control_fields + ) + self.mock_add_marc_organization_code = create_autospec( + self.annotator.add_marc_organization_code + ) + self.mock_add_isbn = create_autospec(self.annotator.add_isbn) + self.mock_add_title = create_autospec(self.annotator.add_title) + self.mock_add_contributors = create_autospec(self.annotator.add_contributors) + self.mock_add_publisher = create_autospec(self.annotator.add_publisher) + self.mock_add_distributor = create_autospec(self.annotator.add_distributor) + self.mock_add_physical_description = create_autospec( + self.annotator.add_physical_description + ) + self.mock_add_audience = create_autospec(self.annotator.add_audience) + self.mock_add_series = create_autospec(self.annotator.add_series) + self.mock_add_system_details = create_autospec( + self.annotator.add_system_details + ) + self.mock_add_formats = create_autospec(self.annotator.add_formats) + self.mock_add_summary = create_autospec(self.annotator.add_summary) + self.mock_add_genres = create_autospec(self.annotator.add_genres) + self.mock_add_ebooks_subject = create_autospec( + self.annotator.add_ebooks_subject + ) + self.mock_add_web_client_urls = create_autospec( + self.annotator.add_web_client_urls + ) + + self.annotator.leader = self.mock_leader + self.annotator.add_control_fields = self.mock_add_control_fields + self.annotator.add_marc_organization_code = self.mock_add_marc_organization_code + self.annotator.add_isbn = self.mock_add_isbn + self.annotator.add_title = self.mock_add_title + self.annotator.add_contributors = self.mock_add_contributors + self.annotator.add_publisher = self.mock_add_publisher + self.annotator.add_distributor = self.mock_add_distributor + self.annotator.add_physical_description = self.mock_add_physical_description + self.annotator.add_audience = self.mock_add_audience + self.annotator.add_series = self.mock_add_series + self.annotator.add_system_details = self.mock_add_system_details + self.annotator.add_formats = self.mock_add_formats + self.annotator.add_summary = self.mock_add_summary + self.annotator.add_genres = self.mock_add_genres + self.annotator.add_ebooks_subject = self.mock_add_ebooks_subject + self.annotator.add_web_client_urls = self.mock_add_web_client_urls + + self.annotate_work_record = functools.partial( + self.annotator.annotate_work_record, + self.revised, + self.work, + self.pool, + self.edition, + self.identifier, + ) + + +@pytest.fixture +def annotate_work_record_fixture() -> AnnotateWorkRecordFixture: + return AnnotateWorkRecordFixture() class TestAnnotator: - def test_annotate_work_record(self, db: DatabaseTransactionFixture): - session = db.session + def test_annotate_work_record( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + with patch("core.marc.Record") as mock_record: + fixture.annotate_work_record() + + mock_record.assert_called_once_with( + force_utf8=True, leader=fixture.mock_leader.return_value + ) + fixture.mock_leader.assert_called_once_with(fixture.revised) + record = mock_record() + fixture.mock_add_control_fields.assert_called_once_with( + record, fixture.identifier, fixture.pool, fixture.edition + ) + fixture.mock_add_marc_organization_code.assert_called_once_with( + record, fixture.organization_name + ) + fixture.mock_add_isbn.assert_called_once_with(record, fixture.identifier) + fixture.mock_add_title.assert_called_once_with(record, fixture.edition) + fixture.mock_add_contributors.assert_called_once_with(record, fixture.edition) + fixture.mock_add_publisher.assert_called_once_with(record, fixture.edition) + fixture.mock_add_distributor.assert_called_once_with(record, fixture.pool) + fixture.mock_add_physical_description.assert_called_once_with( + record, fixture.edition + ) + fixture.mock_add_audience.assert_called_once_with(record, fixture.work) + fixture.mock_add_series.assert_called_once_with(record, fixture.edition) + fixture.mock_add_system_details.assert_called_once_with(record) + fixture.mock_add_formats.assert_called_once_with(record, fixture.pool) + fixture.mock_add_summary.assert_called_once_with(record, fixture.work) + fixture.mock_add_genres.assert_called_once_with(record, fixture.work) + fixture.mock_add_ebooks_subject.assert_called_once_with(record) + fixture.mock_add_web_client_urls.assert_called_once_with( + record, + fixture.identifier, + fixture.short_name, + fixture.cm_url, + fixture.web_client_urls, + ) - # Verify that annotate_work_record adds the distributor and formats. - class MockAnnotator(Annotator): - add_distributor_called_with = None - add_formats_called_with = None + def test_annotate_work_record_no_summary( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + fixture.annotator.include_summary = False + fixture.annotate_work_record() - def add_distributor(self, record, pool): - self.add_distributor_called_with = [record, pool] + assert fixture.mock_add_summary.call_count == 0 - def add_formats(self, record, pool): - self.add_formats_called_with = [record, pool] + def test_annotate_work_record_no_genres( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + fixture.annotator.include_genres = False + fixture.annotate_work_record() - annotator = MockAnnotator() - record = Record() - work = db.work(with_license_pool=True) - pool = work.license_pools[0] + assert fixture.mock_add_genres.call_count == 0 - annotator.annotate_work_record(work, pool, None, None, record) - assert [record, pool] == annotator.add_distributor_called_with - assert [record, pool] == annotator.add_formats_called_with + def test_annotate_work_record_no_organization_code( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + fixture.annotator.organization_code = None + fixture.annotate_work_record() - def test_leader(self, db: DatabaseTransactionFixture): - work = db.work(with_license_pool=True) - leader = Annotator.leader(work) - assert "00000nam 2200000 4500" == leader + assert fixture.mock_add_marc_organization_code.call_count == 0 + + def test_leader(self): + leader = Annotator.leader(False) + assert leader == "00000nam 2200000 4500" - # If there's already a marc record cached, the record status changes. - work.marc_record = "cached" - leader = Annotator.leader(work) - assert "00000cam 2200000 4500" == leader + # If the record is revised, the leader is different. + leader = Annotator.leader(True) + assert leader == "00000cam 2200000 4500" - def _check_control_field(self, record, tag, expected): + @staticmethod + def _check_control_field(record, tag, expected): [field] = record.get_fields(tag) - assert expected == field.value() + assert field.value() == expected - def _check_field(self, record, tag, expected_subfields, expected_indicators=None): + @staticmethod + def _check_field(record, tag, expected_subfields, expected_indicators=None): if not expected_indicators: expected_indicators = [" ", " "] [field] = record.get_fields(tag) - assert expected_indicators == field.indicators + assert field.indicators == expected_indicators for subfield, value in expected_subfields.items(): - assert value == field.get_subfields(subfield)[0] + assert field.get_subfields(subfield)[0] == value def test_add_control_fields(self, db: DatabaseTransactionFixture): # This edition has one format and was published before 1900. @@ -455,7 +582,7 @@ def test_add_simplified_genres(self, db: DatabaseTransactionFixture): work.genres = [fantasy, romance] record = Record() - Annotator.add_simplified_genres(record, work) + Annotator.add_genres(record, work) fields = record.get_fields("650") [fantasy_field, romance_field] = sorted( fields, key=lambda x: x.get_subfields("a")[0] @@ -472,303 +599,274 @@ def test_add_ebooks_subject(self): Annotator.add_ebooks_subject(record) self._check_field(record, "655", {"a": "Electronic books."}, [" ", "0"]) + def test_add_web_client_urls_empty(self): + record = MagicMock(spec=Record) + identifier = MagicMock() + Annotator.add_web_client_urls(record, identifier, "", "", []) + assert record.add_field.call_count == 0 + + def test_add_web_client_urls(self, db: DatabaseTransactionFixture): + record = Record() + identifier = db.identifier() + short_name = "short_name" + cm_url = "http://cm.url" + web_client_urls = ["http://webclient1.url", "http://webclient2.url"] + Annotator.add_web_client_urls( + record, identifier, short_name, cm_url, web_client_urls + ) + fields = record.get_fields("856") + assert len(fields) == 2 + [field1, field2] = fields + assert field1.indicators == ["4", "0"] + assert field2.indicators == ["4", "0"] + + # The URL for a work is constructed as: + # - //works/ + work_link_template = "{cm_base}/{lib}/works/{qid}" + # It is then encoded and the web client URL is constructed in this form: + # - /book/ + client_url_template = "{client_base}/book/{work_link}" + + qualified_identifier = urllib.parse.quote( + identifier.type + "/" + identifier.identifier, safe="" + ) + + expected_work_link = work_link_template.format( + cm_base=cm_url, lib=short_name, qid=qualified_identifier + ) + encoded_work_link = urllib.parse.quote(expected_work_link, safe="") + + expected_client_url_1 = client_url_template.format( + client_base=web_client_urls[0], work_link=encoded_work_link + ) + expected_client_url_2 = client_url_template.format( + client_base=web_client_urls[1], work_link=encoded_work_link + ) + + # A few checks to ensure that our setup is useful. + assert web_client_urls[0] != web_client_urls[1] + assert expected_client_url_1 != expected_client_url_2 + assert expected_client_url_1.startswith(web_client_urls[0]) + assert expected_client_url_2.startswith(web_client_urls[1]) + + assert field1.get_subfields("u")[0] == expected_client_url_1 + assert field2.get_subfields("u")[0] == expected_client_url_2 + class MarcExporterFixture: - def __init__(self, db: DatabaseTransactionFixture): + def __init__(self, db: DatabaseTransactionFixture, s3: MockS3Service): self.db = db - self.integration = self._integration(db) self.now = utc_now() - self.exporter = MARCExporter.from_config(db.default_library()) - self.annotator = Annotator() - self.w1 = db.work(genre="Mystery", with_open_access_download=True) - self.w2 = db.work(genre="Mystery", with_open_access_download=True) + self.library = db.default_library() + self.s3_service = s3 + self.exporter = MARCExporter(self.db.session, s3) + self.mock_annotator = MagicMock(spec=Annotator) + assert self.library.short_name is not None + self.annotator = Annotator( + "http://cm.url", + self.library.short_name, + ["http://webclient.url"], + "org", + True, + True, + ) + + self.library = db.library() + self.collection = db.collection() + self.collection.libraries.append(self.library) - self.search_engine = ExternalSearchIndexFake(db.session) - self.search_engine.mock_query_works([self.w1, self.w2]) + self.now = utc_now() + self.yesterday = self.now - datetime.timedelta(days=1) + self.last_week = self.now - datetime.timedelta(days=7) - @staticmethod - def _integration(db: DatabaseTransactionFixture): - return db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], + self.w1 = db.work( + genre="Mystery", with_open_access_download=True, collection=self.collection + ) + self.w1.last_update_time = self.yesterday + self.w2 = db.work( + genre="Mystery", with_open_access_download=True, collection=self.collection + ) + self.w2.last_update_time = self.last_week + + self.records = functools.partial( + self.exporter.records, + self.library, + self.collection, + annotator=self.annotator, + creation_time=self.now, ) @pytest.fixture def marc_exporter_fixture( db: DatabaseTransactionFixture, - external_search_fake_fixture: ExternalSearchFixtureFake, + s3_service_fixture: S3ServiceFixture, ) -> MarcExporterFixture: - # external_search_fake_fixture is used only for the integration it creates - return MarcExporterFixture(db) + return MarcExporterFixture(db, s3_service_fixture.mock_service()) class TestMARCExporter: - def test_from_config(self, db: DatabaseTransactionFixture): - pytest.raises( - CannotLoadConfiguration, MARCExporter.from_config, db.default_library() - ) - - integration = MarcExporterFixture._integration(db) - exporter = MARCExporter.from_config(db.default_library()) - assert integration == exporter.integration - assert db.default_library() == exporter.library - - other_library = db.library() - pytest.raises(CannotLoadConfiguration, MARCExporter.from_config, other_library) - - def test_create_record(self, db: DatabaseTransactionFixture): + def test_create_record( + self, db: DatabaseTransactionFixture, marc_exporter_fixture: MarcExporterFixture + ): work = db.work( with_license_pool=True, title="old title", authors=["old author"], data_source_name=DataSource.OVERDRIVE, ) - annotator = Annotator() - # The record isn't cached yet, so a new record is created and cached. - assert None == work.marc_record - record = MARCExporter.create_record(work, annotator) - [title_field] = record.get_fields("245") - assert "old title" == title_field.get_subfields("a")[0] - [author_field] = record.get_fields("100") - assert "author, old" == author_field.get_subfields("a")[0] - [distributor_field] = record.get_fields("264") - assert DataSource.OVERDRIVE == distributor_field.get_subfields("b")[0] - cached = work.marc_record - assert "old title" in cached - assert "author, old" in cached - # The distributor isn't part of the cached record. - assert DataSource.OVERDRIVE not in cached - - work.presentation_edition.title = "new title" - work.presentation_edition.sort_author = "author, new" - new_data_source = DataSource.lookup(db.session, DataSource.BIBLIOTHECA) - work.license_pools[0].data_source = new_data_source - - # Now that the record is cached, creating a record will - # use the cache. Distributor will be updated since it's - # not part of the cached record. - record = MARCExporter.create_record(work, annotator) - [title_field] = record.get_fields("245") - assert "old title" == title_field.get_subfields("a")[0] - [author_field] = record.get_fields("100") - assert "author, old" == author_field.get_subfields("a")[0] - [distributor_field] = record.get_fields("264") - assert DataSource.BIBLIOTHECA == distributor_field.get_subfields("b")[0] - - # But we can force an update to the cached record. - record = MARCExporter.create_record(work, annotator, force_create=True) - [title_field] = record.get_fields("245") - assert "new title" == title_field.get_subfields("a")[0] - [author_field] = record.get_fields("100") - assert "author, new" == author_field.get_subfields("a")[0] - [distributor_field] = record.get_fields("264") - assert DataSource.BIBLIOTHECA == distributor_field.get_subfields("b")[0] - cached = work.marc_record - assert "old title" not in cached - assert "author, old" not in cached - assert "new title" in cached - assert "author, new" in cached - - # If we pass in an integration, it's passed along to the annotator. - integration = MarcExporterFixture._integration(db) - - class MockAnnotator(Annotator): - integration = None - - def annotate_work_record( - self, work, pool, edition, identifier, record, integration - ): - self.integration = integration - - annotator = MockAnnotator() - record = MARCExporter.create_record(work, annotator, integration=integration) - assert integration == annotator.integration - - @freeze_time("2020-01-01 00:00:00") - def test_create_record_roundtrip(self, db: DatabaseTransactionFixture): - # Create a marc record from a work with special characters - # in both the title and author name and round-trip it to - # the DB and back again to make sure we are creating records - # we can understand. - # - # We freeze the current time here, because a MARC record has - # a timestamp when it was created and we need the created - # records to match. - - annotator = Annotator() - - # Creates a new record and saves it to the database - work = db.work( - title="Little Mimi\u2019s First Counting Lesson", - authors=["Lagerlo\xf6f, Selma Ottiliana Lovisa,"], - with_license_pool=True, + mock_revised = MagicMock() + + create_record = functools.partial( + MARCExporter.create_record, + revised=mock_revised, + work=work, + annotator=marc_exporter_fixture.mock_annotator, ) - record = MARCExporter.create_record(work, annotator) - loaded_record = MARCExporter.create_record(work, annotator) - assert record.as_marc() == loaded_record.as_marc() - # Loads a existing record from the DB - new_work = get_one(db.session, Work, id=work.id) - new_record = MARCExporter.create_record(new_work, annotator) - assert record.as_marc() == new_record.as_marc() + record = create_record() + assert record is not None + + # Make sure we pass the expected arguments to Annotator.annotate_work_record + marc_exporter_fixture.mock_annotator.annotate_work_record.assert_called_once_with( + mock_revised, + work, + work.license_pools[0], + work.license_pools[0].presentation_edition, + work.license_pools[0].identifier, + ) - @pytest.mark.parametrize("object_type", ["lane", "worklist"]) - def test_records_lane( + def test_records( self, - object_type: str, db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, ): - if object_type == "lane": - lane_or_wl = db.lane("Test Lane", genres=["Mystery"]) - elif object_type == "worklist": - lane_or_wl = WorkList() - lane_or_wl.initialize(db.default_library(), display_name="All Books") - else: - raise RuntimeError() - exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - search_engine = marc_exporter_fixture.search_engine + storage_service = marc_exporter_fixture.s3_service + creation_time = marc_exporter_fixture.now - # If there's a storage protocol but not corresponding storage integration, - # it raises an exception. - pytest.raises(Exception, exporter.records, lane_or_wl, annotator) + marc_exporter_fixture.records() - storage_service = s3_service_fixture.mock_service() - exporter.records( - lane_or_wl, - annotator, - storage_service, - query_batch_size=1, - search_engine=search_engine, - ) - - # The file was mirrored and a CachedMARCFile was created to track the mirrored file. + # The file was mirrored and a MarcFile was created to track the mirrored file. assert len(storage_service.uploads) == 1 - [cache] = db.session.query(CachedMARCFile).all() - assert cache.library == db.default_library() - if object_type == "lane": - assert cache.lane == lane_or_wl - else: - assert cache.lane is None - assert cache.representation.content is None - assert storage_service.uploads[0].key == "{}/{}/{}.mrc".format( - db.default_library().short_name, - str(cache.representation.fetched_at), - lane_or_wl.display_name, - ) - assert quote(storage_service.uploads[0].key) in cache.representation.mirror_url - assert cache.start_time is None - assert marc_exporter_fixture.now < cache.end_time + [cache] = db.session.query(MarcFile).all() + assert cache.library == marc_exporter_fixture.library + assert cache.collection == marc_exporter_fixture.collection + + short_name = marc_exporter_fixture.library.short_name + collection_name = marc_exporter_fixture.collection.name + date_str = creation_time.strftime("%Y-%m-%d") + uuid_str = uuid_encode(cache.id) + + assert ( + cache.key + == f"marc/{short_name}/{collection_name}.full.{date_str}.{uuid_str}.mrc" + ) + assert cache.created == creation_time + assert cache.since is None records = list(MARCReader(storage_service.uploads[0].content)) assert len(records) == 2 title_fields = [record.get_fields("245") for record in records] - titles = [fields[0].get_subfields("a")[0] for fields in title_fields] - assert set(titles) == { + titles = {fields[0].get_subfields("a")[0] for fields in title_fields} + assert titles == { marc_exporter_fixture.w1.title, marc_exporter_fixture.w2.title, } - assert marc_exporter_fixture.w1.title in marc_exporter_fixture.w1.marc_record - assert marc_exporter_fixture.w2.title in marc_exporter_fixture.w2.marc_record + def test_records_since_time( + self, + db: DatabaseTransactionFixture, + marc_exporter_fixture: MarcExporterFixture, + ): + # If the `since` parameter is set, only works updated since that time + # are included in the export and the filename reflects that we created + # a partial export. + since = marc_exporter_fixture.now - datetime.timedelta(days=3) + storage_service = marc_exporter_fixture.s3_service + creation_time = marc_exporter_fixture.now + + marc_exporter_fixture.records( + since_time=since, + ) + [cache] = db.session.query(MarcFile).all() + assert cache.library == marc_exporter_fixture.library + assert cache.collection == marc_exporter_fixture.collection + + short_name = marc_exporter_fixture.library.short_name + collection_name = marc_exporter_fixture.collection.name + from_date = since.strftime("%Y-%m-%d") + to_date = creation_time.strftime("%Y-%m-%d") + uuid_str = uuid_encode(cache.id) + + assert ( + cache.key + == f"marc/{short_name}/{collection_name}.delta.{from_date}.{to_date}.{uuid_str}.mrc" + ) + assert cache.created == creation_time + assert cache.since == since + + # Only the work updated since the `since` time is included in the export. + [record] = list(MARCReader(storage_service.uploads[0].content)) + [title_field] = record.get_fields("245") + assert title_field.get_subfields("a")[0] == marc_exporter_fixture.w1.title - def test_records_start_time( + def test_records_none( self, db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, + caplog: LogCaptureFixture, ): - # If a start time is set, it's used in the mirror url. - # - # (Our mock search engine returns everthing in its 'index', - # so this doesn't test that the start time is actually used to - # find works -- that's in the search index tests and the - # tests of MARCExporterFacets.) - start_time = marc_exporter_fixture.now - datetime.timedelta(days=3) - exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - search_engine = marc_exporter_fixture.search_engine - lane = db.lane("Test Lane", genres=["Mystery"]) - storage_service = s3_service_fixture.mock_service() - - exporter.records( - lane, - annotator, - storage_service, - start_time=start_time, - query_batch_size=2, - search_engine=search_engine, - ) - [cache] = db.session.query(CachedMARCFile).all() - - assert cache.library == db.default_library() - assert cache.lane == lane - assert cache.representation.content is None - assert storage_service.uploads[0].key == "{}/{}-{}/{}.mrc".format( - db.default_library().short_name, - str(start_time), - str(cache.representation.fetched_at), - lane.display_name, - ) - assert cache.start_time == start_time - assert marc_exporter_fixture.now < cache.end_time - - def test_records_empty_search( + # If there are no works to export, no file is created and a log message is generated. + caplog.set_level(logging.INFO) + + storage_service = marc_exporter_fixture.s3_service + + # Remove the works from the database. + db.session.delete(marc_exporter_fixture.w1) + db.session.delete(marc_exporter_fixture.w2) + + marc_exporter_fixture.records() + + assert [] == storage_service.uploads + assert db.session.query(MarcFile).count() == 0 + assert len(caplog.records) == 1 + assert "No MARC records to upload" in caplog.text + + def test_records_exception( self, db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, + caplog: LogCaptureFixture, ): - # If the search engine returns no contents for the lane, - # nothing will be mirrored, but a CachedMARCFile is still - # created to track that we checked for updates. + # If an exception occurs while exporting, no file is created and a log message is generated. + caplog.set_level(logging.ERROR) + exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - empty_search_engine = ExternalSearchIndexFake(db.session) - lane = db.lane("Test Lane", genres=["Mystery"]) - storage_service = s3_service_fixture.mock_service() + storage_service = marc_exporter_fixture.s3_service - exporter.records( - lane, - annotator, - storage_service, - search_engine=empty_search_engine, - ) + # Mock our query function to raise an exception. + exporter.query_works = MagicMock(side_effect=Exception("Boom!")) + + marc_exporter_fixture.records() assert [] == storage_service.uploads - [cache] = db.session.query(CachedMARCFile).all() - assert cache.library == db.default_library() - assert cache.lane == lane - assert cache.representation.content is None - assert cache.start_time is None - assert marc_exporter_fixture.now < cache.end_time + assert db.session.query(MarcFile).count() == 0 + assert len(caplog.records) == 1 + assert "Failed to upload MARC file" in caplog.text + assert "Boom!" in caplog.text def test_records_minimum_size( self, - db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, ): - lane = db.lane(genres=["Mystery"]) - storage_service = s3_service_fixture.mock_service() exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - search_engine = marc_exporter_fixture.search_engine - - # Make sure we page exactly how many times we need to - works = [ - db.work(genre="Mystery", with_open_access_download=True) for _ in range(4) - ] - search_engine.mock_query_works(works) + storage_service = marc_exporter_fixture.s3_service exporter.MINIMUM_UPLOAD_BATCH_SIZE_BYTES = 100 + # Mock the "records" generated, and force the response to be of certain sizes created_record_mock = MagicMock() created_record_mock.as_marc = MagicMock( @@ -776,14 +874,13 @@ def test_records_minimum_size( ) exporter.create_record = lambda *args: created_record_mock - exporter.records( - lane, - annotator, - storage_service, - search_engine=search_engine, - query_batch_size=1, + # Mock the query_works to return 4 works + exporter.query_works = MagicMock( + return_value=[MagicMock(), MagicMock(), MagicMock(), MagicMock()] ) + marc_exporter_fixture.records() + assert storage_service.mocked_multipart_upload is not None # Even though there are 4 parts, we upload in 3 batches due to minimum size limitations # The "4"th part gets uploaded due it being the tail piece @@ -793,26 +890,3 @@ def test_records_minimum_size( b"2" * 20 + b"3" * 500, b"4" * 10, ] - - -class TestMARCExporterFacets: - def test_modify_search_filter(self): - # A facet object. - facets = MARCExporterFacets("some start time") - - # A filter about to be modified by the facet object. - filter = Filter() - filter.order_ascending = False - - facets.modify_search_filter(filter) - - # updated_after has been set and results are to be returned in - # order of increasing last_update_time. - assert "last_update_time" == filter.order - assert True == filter.order_ascending - assert "some start time" == filter.updated_after - - def test_scoring_functions(self): - # A no-op. - facets = MARCExporterFacets("some start time") - assert [] == facets.scoring_functions(object()) diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index dc50b228d..203242d4b 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -1,16 +1,14 @@ import datetime -from typing import Generator, List, Union +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from _pytest.logging import LogCaptureFixture from requests import Response -from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory from api.circulation import CirculationAPI, FulfillmentInfo from api.circulation_exceptions import CannotFulfill from core.model import ( - ConfigurationSetting, Contribution, Contributor, DataSource, @@ -27,7 +25,12 @@ ) from core.model.collection import Collection from core.model.constants import IdentifierType -from core.opds2_import import OPDS2API, OPDS2Importer, RWPMManifestParser +from core.opds2_import import ( + OPDS2API, + OPDS2Importer, + PalaceOPDS2FeedParserFactory, + RWPMManifestParser, +) from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.opds2_files import OPDS2FilesFixture @@ -96,15 +99,19 @@ def opds2_importer_fixture( ) -> TestOPDS2ImporterFixture: data = TestOPDS2ImporterFixture() data.transaction = db - data.collection = db.collection(protocol=OPDS2API.label()) + data.collection = db.collection( + protocol=OPDS2API.label(), + data_source_name="OPDS 2.0 Data Source", + external_account_id="http://opds2.example.org/feed", + ) data.library = db.default_library() - data.library.collections.append(data.collection) + data.collection.libraries.append(data.library) data.data_source = DataSource.lookup( db.session, "OPDS 2.0 Data Source", autocreate=True ) data.collection.data_source = data.data_source data.importer = OPDS2Importer( - db.session, data.collection, RWPMManifestParser(OPDS2FeedParserFactory()) + db.session, data.collection, RWPMManifestParser(PalaceOPDS2FeedParserFactory()) ) return data @@ -140,7 +147,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( opds2_importer_fixture.transaction.session, ) content_server_feed_text = opds2_files_fixture.sample_text("feed.json") - content_server_feed: Union[str, bytes] + content_server_feed: str | bytes if manifest_type == "bytes": content_server_feed = content_server_feed_text.encode() @@ -167,7 +174,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( assert "Moby-Dick" == moby_dick_edition.title assert "eng" == moby_dick_edition.language assert "eng" == moby_dick_edition.language - assert EditionConstants.BOOK_MEDIUM == moby_dick_edition.medium + assert EditionConstants.AUDIO_MEDIUM == moby_dick_edition.medium assert "Herman Melville" == moby_dick_edition.author assert moby_dick_edition.duration == 100.2 @@ -260,6 +267,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( assert moby_dick_license_pool.open_access assert LicensePool.UNLIMITED_ACCESS == moby_dick_license_pool.licenses_owned assert LicensePool.UNLIMITED_ACCESS == moby_dick_license_pool.licenses_available + assert True == moby_dick_license_pool.should_track_playtime assert 1 == len(moby_dick_license_pool.delivery_mechanisms) [moby_dick_delivery_mechanism] = moby_dick_license_pool.delivery_mechanisms @@ -268,7 +276,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( == moby_dick_delivery_mechanism.delivery_mechanism.drm_scheme ) assert ( - MediaTypes.EPUB_MEDIA_TYPE + MediaTypes.AUDIOBOOK_MANIFEST_MEDIA_TYPE == moby_dick_delivery_mechanism.delivery_mechanism.content_type ) @@ -285,6 +293,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( LicensePool.UNLIMITED_ACCESS == huckleberry_finn_license_pool.licenses_available ) + assert False == huckleberry_finn_license_pool.should_track_playtime assert 2 == len(huckleberry_finn_license_pool.delivery_mechanisms) huckleberry_finn_delivery_mechanisms = ( @@ -396,7 +405,7 @@ def test_opds2_importer_skips_publications_with_unsupported_identifier_types( opds2_importer_fixture: TestOPDS2ImporterFixture, opds2_files_fixture: OPDS2FilesFixture, this_identifier_type, - ignore_identifier_type: List[IdentifierType], + ignore_identifier_type: list[IdentifierType], identifier: str, ) -> None: """Ensure that OPDS2Importer imports only publications having supported identifier types. @@ -453,27 +462,25 @@ def test_auth_token_feed( imported_editions, pools, works, failures = data.importer.import_from_feed( content ) - setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, data.collection.external_integration + token_endpoint = data.collection.integration_configuration.context.get( + ExternalIntegration.TOKEN_AUTH ) # Did the token endpoint get stored correctly? - assert setting.value == "http://example.org/auth?userName={patron_id}" + assert token_endpoint == "http://example.org/auth?userName={patron_id}" class Opds2ApiFixture: def __init__(self, db: DatabaseTransactionFixture, mock_http: MagicMock): self.patron = db.patron() self.collection: Collection = db.collection( - protocol=ExternalIntegration.OPDS2_IMPORT, data_source_name="test" - ) - self.integration = self.collection.create_external_integration( - ExternalIntegration.OPDS2_IMPORT - ) - self.setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, self.integration + protocol=ExternalIntegration.OPDS2_IMPORT, + data_source_name="test", + external_account_id="http://opds2.example.org/feed", ) - self.setting.value = "http://example.org/token?userName={patron_id}" + self.collection.integration_configuration.context = { + ExternalIntegration.TOKEN_AUTH: "http://example.org/token?userName={patron_id}" + } self.mock_response = MagicMock(spec=Response) self.mock_response.status_code = 200 @@ -525,7 +532,7 @@ def test_opds2_with_authentication_tokens( work = works[0] - api = CirculationAPI(db.session, db.default_library()) + api = CirculationAPI(db.session, opds2_importer_fixture.library) patron = db.patron() # Borrow the book from the library diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 171b644f0..3e1c2626d 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -1,7 +1,8 @@ +from __future__ import annotations + import random from functools import partial from io import StringIO -from typing import Optional from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -18,10 +19,10 @@ SAMLNameIDFormat, SAMLSubject, ) -from core.config import IntegrationException from core.coverage import CoverageFailure -from core.metadata_layer import CirculationData, LinkData, Metadata +from core.metadata_layer import LinkData from core.model import ( + Collection, Contributor, CoverageRecord, DataSource, @@ -225,6 +226,9 @@ def test_extract_metadata(self, opds_importer_fixture: OPDSImporterFixture): assert data_source_name == c1._data_source assert data_source_name == c2._data_source + assert m1.circulation.should_track_playtime == True + assert m2.circulation.should_track_playtime == False + [[failure]] = list(failures.values()) assert isinstance(failure, CoverageFailure) assert ( @@ -241,9 +245,12 @@ def test_use_dcterm_identifier_as_id_with_id_and_dcterms_identifier( opds_importer_fixture.db.session, ) - collection_to_test = db.default_collection() - collection_to_test.primary_identifier_source = ( - ExternalIntegration.DCTERMS_IDENTIFIER + collection_to_test = db.collection( + settings={ + "primary_identifier_source": ExternalIntegration.DCTERMS_IDENTIFIER, + }, + data_source_name="OPDS", + external_account_id="http://root.uri", ) importer = opds_importer_fixture.importer(collection=collection_to_test) @@ -289,7 +296,6 @@ def test_use_id_with_existing_dcterms_identifier( ) collection_to_test = db.default_collection() - collection_to_test.primary_identifier_source = None importer = opds_importer_fixture.importer(collection=collection_to_test) metadata, failures = importer.extract_feed_data( @@ -794,7 +800,7 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): assert crow.license_pools[0].collection == db.default_collection() assert mouse.work is not None - assert mouse.medium == Edition.PERIODICAL_MEDIUM + assert mouse.medium == Edition.AUDIO_MEDIUM # Four links have been added to the identifier of the 'mouse' # edition. @@ -891,7 +897,10 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): # Bonus: make sure that delivery mechanisms are set appropriately. [mech] = mouse_pool.delivery_mechanisms - assert Representation.EPUB_MEDIA_TYPE == mech.delivery_mechanism.content_type + assert ( + Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE + == mech.delivery_mechanism.content_type + ) assert DeliveryMechanism.NO_DRM == mech.delivery_mechanism.drm_scheme assert "http://www.gutenberg.org/ebooks/10441.epub.images" == mech.resource.url @@ -1366,188 +1375,6 @@ def test_update_work_for_edition_having_multiple_license_pools( assert lp.work == work assert lp2.work == work - def test_assert_importable_content(self, db: DatabaseTransactionFixture): - session = db.session - collection = db.collection( - protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS" - ) - - class Mock(OPDSImporter): - """An importer that may or may not be able to find - real open-access content. - """ - - # Set this variable to control whether any open-access links - # are "found" in the OPDS feed. - open_access_links: Optional[list] = None - - extract_feed_data_called_with = None - _is_open_access_link_called_with = [] - - def extract_feed_data(self, feed, feed_url): - # There's no need to return realistic metadata, - # since _open_access_links is also mocked. - self.extract_feed_data_called_with = (feed, feed_url) - return {"some": "metadata"}, {} - - def _open_access_links(self, metadatas): - self._open_access_links_called_with = metadatas - yield from self.open_access_links - - def _is_open_access_link(self, url, type): - self._is_open_access_link_called_with.append((url, type)) - return False - - class NoLinks(Mock): - "Simulate an OPDS feed that contains no open-access links." - open_access_links = [] - - # We won't be making any HTTP requests, even simulated ones. - do_get = MagicMock() - - # Here, there are no links at all. - importer = NoLinks(session, collection, do_get) - with pytest.raises(IntegrationException) as excinfo: - importer.assert_importable_content("feed", "url") - assert "No open-access links were found in the OPDS feed." in str(excinfo.value) - - # We extracted 'metadata' from the feed and URL. - assert ("feed", "url") == importer.extract_feed_data_called_with - - # But there were no open-access links in the 'metadata', - # so we had nothing to check. - assert [] == importer._is_open_access_link_called_with - - oa = Hyperlink.OPEN_ACCESS_DOWNLOAD - - class BadLinks(Mock): - """Simulate an OPDS feed that contains open-access links that - don't actually work, because _is_open_access always returns False - """ - - open_access_links = [ - LinkData(href="url1", rel=oa, media_type="text/html"), - LinkData(href="url2", rel=oa, media_type="application/json"), - LinkData( - href="I won't be tested", rel=oa, media_type="application/json" - ), - ] - - bad_links_importer = BadLinks(session, collection, do_get) - with pytest.raises(IntegrationException) as excinfo: - bad_links_importer.assert_importable_content( - "feed", "url", max_get_attempts=2 - ) - assert ( - "Was unable to GET supposedly open-access content such as url2 (tried 2 times)" - in str(excinfo.value) - ) - - # We called _is_open_access_link on the first and second links - # found in the 'metadata', but failed both times. - # - # We didn't bother with the third link because max_get_attempts was - # set to 2. - try1, try2 = bad_links_importer._is_open_access_link_called_with - assert ("url1", "text/html") == try1 - assert ("url2", "application/json") == try2 - - class GoodLink(Mock): - """Simulate an OPDS feed that contains two bad open-access links - and one good one. - """ - - _is_open_access_link_called_with = [] - open_access_links = [ - LinkData(href="bad", rel=oa, media_type="text/html"), - LinkData(href="good", rel=oa, media_type="application/json"), - LinkData(href="also bad", rel=oa, media_type="text/html"), - ] - - def _is_open_access_link(self, url, type): - self._is_open_access_link_called_with.append((url, type)) - if url == "bad": - return False - return "this is a book" - - good_link_importer = GoodLink(session, collection, do_get) - result = good_link_importer.assert_importable_content( - "feed", "url", max_get_attempts=5 - ) - assert True == result - - # The first link didn't work, but the second one did, - # so we didn't try the third one. - try1, try2 = good_link_importer._is_open_access_link_called_with - assert ("bad", "text/html") == try1 - assert ("good", "application/json") == try2 - - def test__open_access_links(self, db: DatabaseTransactionFixture): - session = db.session - - """Test our ability to find open-access links in Metadata objects.""" - m = OPDSImporter._open_access_links - - # No Metadata objects, no links. - assert [] == list(m([])) - - # This Metadata has no associated CirculationData and will be - # ignored. - no_circulation = Metadata(DataSource.GUTENBERG) - - # This CirculationData has no open-access links, so it will be - # ignored. - circulation = CirculationData(DataSource.GUTENBERG, db.identifier()) - no_open_access_links = Metadata(DataSource.GUTENBERG, circulation=circulation) - - # This has three links, but only the open-access links - # will be returned. - circulation = CirculationData(DataSource.GUTENBERG, db.identifier()) - oa = Hyperlink.OPEN_ACCESS_DOWNLOAD - for rel in [oa, Hyperlink.IMAGE, oa]: - circulation.links.append(LinkData(href=db.fresh_url(), rel=rel)) - two_open_access_links = Metadata(DataSource.GUTENBERG, circulation=circulation) - - oa_only = [x for x in circulation.links if x.rel == oa] - assert oa_only == list( - m([no_circulation, two_open_access_links, no_open_access_links]) - ) - - def test__is_open_access_link( - self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture - ): - session = db.session - http = DummyHTTPClient() - - # We only check that the response entity-body isn't tiny. 11 - # kilobytes of data is enough. - enough_content = "a" * (1024 * 11) - - # Set up an HTTP response that looks enough like a book - # to convince _is_open_access_link. - http.queue_response(200, content=enough_content) - monitor = opds_importer_fixture.importer(http_get=http.do_get) - - url = db.fresh_url() - type = "text/html" - assert "Found a book-like thing at %s" % url == monitor._is_open_access_link( - url, type - ) - - # We made a GET request to the appropriate URL. - assert url == http.requests.pop() - - # This HTTP response looks OK but it's not big enough to be - # any kind of book. - http.queue_response(200, content="not enough content") - monitor = opds_importer_fixture.importer(http_get=http.do_get) - assert False == monitor._is_open_access_link(url, None) - - # This HTTP response is clearly an error page. - http.queue_response(404, content=enough_content) - monitor = opds_importer_fixture.importer(http_get=http.do_get) - assert False == monitor._is_open_access_link(url, None) - def test_import_open_access_audiobook( self, opds_importer_fixture: OPDSImporterFixture ): @@ -1579,7 +1406,7 @@ def test_import_open_access_audiobook( [august_pool] = imported_pools assert True == august_pool.open_access - assert download_manifest_url == august_pool._open_access_download_url + assert download_manifest_url == august_pool.open_access_download_url [lpdm] = august_pool.delivery_mechanisms assert ( @@ -1623,8 +1450,9 @@ def _wayfless_circulation_api( "OPDS collection with a WAYFless acquisition link", ExternalIntegration.OPDS_IMPORT, data_source_name="test", + external_account_id="http://wayfless.example.com/feed", ) - library.collections.append(collection) + collection.libraries.append(library) DatabaseTransactionFixture.set_settings( collection.integration_configuration, @@ -1776,6 +1604,25 @@ def test_combine_present_value_extends_dictionary(self): ) +class OPDSImportMonitorFixture: + def collection(self, feed_url: str | None = None) -> Collection: + feed_url = feed_url or "http://fake.opds/" + settings = {"external_account_id": feed_url, "data_source": "OPDS"} + return self.db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, settings=settings + ) + + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + + +@pytest.fixture() +def opds_import_monitor_fixture( + db: DatabaseTransactionFixture, +) -> OPDSImportMonitorFixture: + return OPDSImportMonitorFixture(db) + + class TestOPDSImportMonitor: def test_constructor(self, db: DatabaseTransactionFixture): session = db.session @@ -1786,49 +1633,45 @@ def test_constructor(self, db: DatabaseTransactionFixture): "OPDSImportMonitor can only be run in the context of a Collection." in str(excinfo.value) ) - - db.default_collection().integration_configuration.protocol = ( - ExternalIntegration.OVERDRIVE - ) + c1 = db.collection(protocol=ExternalIntegration.OVERDRIVE) with pytest.raises(ValueError) as excinfo: - OPDSImportMonitor(session, db.default_collection(), OPDSImporter) + OPDSImportMonitor(session, c1, OPDSImporter) assert ( - "Collection Default Collection is configured for protocol Overdrive, not OPDS Import." + f"Collection {c1.name} is configured for protocol Overdrive, not OPDS Import." in str(excinfo.value) ) - db.default_collection().integration_configuration.protocol = ( - ExternalIntegration.OPDS_IMPORT - ) - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, "data_source", None + c2 = db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, settings={"data_source": None} ) with pytest.raises(ValueError) as excinfo: - OPDSImportMonitor(session, db.default_collection(), OPDSImporter) - assert "Collection Default Collection has no associated data source." in str( + OPDSImportMonitor(session, c2, OPDSImporter) + assert f"Collection {c2.name} has no associated data source." in str( excinfo.value ) - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, "data_source", "OPDS" + c3 = db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, + settings={ + "data_source": "OPDS", + "external_account_id": "https://opds.import.com/feed?size=100", + }, ) - db.default_collection().external_account_id = ( - "https://opds.import.com/feed?size=100" - ) - monitor = OPDSImportMonitor(session, db.default_collection(), OPDSImporter) + monitor = OPDSImportMonitor(session, c3, OPDSImporter) assert monitor._feed_base_url == "https://opds.import.com/" - def test_get(self, db: DatabaseTransactionFixture): + def test_get( + self, + db: DatabaseTransactionFixture, + ): session = db.session ## Test whether relative urls work - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, "data_source", "OPDS" - ) - db.default_collection().external_account_id = ( - "https://opds.import.com:9999/feed" + collection = db.collection( + external_account_id="https://opds.import.com:9999/feed", + data_source_name="OPDS", ) - monitor = OPDSImportMonitor(session, db.default_collection(), OPDSImporter) + monitor = OPDSImportMonitor(session, collection, OPDSImporter) with patch("core.opds_import.HTTP.get_with_timeout") as mock_get: monitor._get("/absolute/path", {}) @@ -1842,89 +1685,25 @@ def test_get(self, db: DatabaseTransactionFixture): "https://opds.import.com:9999/relative/path", ) - def test_external_integration(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) - - monitor = OPDSImportMonitor( - session, - db.default_collection(), - import_class=OPDSImporter, - ) - assert ( - db.default_collection().external_integration - == monitor.external_integration(session) - ) - - def test__run_self_tests(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) - """Verify the self-tests of an OPDS collection.""" - - class MockImporter(OPDSImporter): - def assert_importable_content(self, content, url): - self.assert_importable_content_called_with = (content, url) - return "looks good" - - class Mock(OPDSImportMonitor): - follow_one_link_called_with = [] - - # First we will get the first page of the OPDS feed. - def follow_one_link(self, url): - self.follow_one_link_called_with.append(url) - return ([], "some content") - - feed_url = db.fresh_url() - db.default_collection().external_account_id = feed_url - monitor = Mock(session, db.default_collection(), import_class=MockImporter) - [first_page, found_content] = monitor._run_self_tests(session) - expect = "Retrieve the first page of the OPDS feed (%s)" % feed_url - assert expect == first_page.name - assert True == first_page.success - assert ([], "some content") == first_page.result - - # follow_one_link was called once. - [link] = monitor.follow_one_link_called_with - assert monitor.feed_url == link - - # Then, assert_importable_content was called on the importer. - assert "Checking for importable content" == found_content.name - assert True == found_content.success - assert ( - "some content", - feed_url, - ) == monitor.importer.assert_importable_content_called_with # type: ignore[attr-defined] - assert "looks good" == found_content.result - - def test_hook_methods(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) + def test_hook_methods(self, db: DatabaseTransactionFixture): """By default, the OPDS URL and data source used by the importer come from the collection configuration. """ + collection = db.collection( + external_account_id="http://url/", data_source_name="OPDS" + ) monitor = OPDSImportMonitor( - session, - db.default_collection(), + db.session, + collection, import_class=OPDSImporter, ) - assert db.default_collection().external_account_id == monitor.opds_url( - db.default_collection() - ) - assert db.default_collection().data_source == monitor.data_source( - db.default_collection() - ) + assert collection.data_source == monitor.data_source(collection) - def test_feed_contains_new_data(self, opds_importer_fixture: OPDSImporterFixture): + def test_feed_contains_new_data( + self, + opds_importer_fixture: OPDSImporterFixture, + ): data, db, session = ( opds_importer_fixture, opds_importer_fixture.db, @@ -1937,21 +1716,25 @@ class MockOPDSImportMonitor(OPDSImportMonitor): def _get(self, url, headers): return 200, {"content-type": AtomFeed.ATOM_TYPE}, feed + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name + ) monitor = OPDSImportMonitor( session, - db.default_collection(), + collection, import_class=OPDSImporter, ) timestamp = monitor.timestamp() # Nothing has been imported yet, so all data is new. - assert True == monitor.feed_contains_new_data(feed) - assert None == timestamp.start + assert monitor.feed_contains_new_data(feed) is True + assert timestamp.start is None # Now import the editions. monitor = MockOPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=OPDSImporter, ) monitor.run() @@ -1961,10 +1744,10 @@ def _get(self, url, headers): # The timestamp has been updated, although unlike most # Monitors the timestamp is purely informational. - assert timestamp.finish != None + assert timestamp.finish is not None editions = session.query(Edition).all() - data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) + data_source = DataSource.lookup(session, data_source_name) # If there are CoverageRecords that record work are after the updated # dates, there's nothing new. @@ -1972,7 +1755,7 @@ def _get(self, url, headers): editions[0], data_source, CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) record.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -1980,22 +1763,22 @@ def _get(self, url, headers): editions[1], data_source, CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) record2.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) - assert False == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is False # If the monitor is set up to force reimport, it doesn't # matter that there's nothing new--we act as though there is. monitor.force_reimport = True - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True monitor.force_reimport = False # If an entry was updated after the date given in that entry's # CoverageRecord, there's new data. record2.timestamp = datetime_utc(1970, 1, 1, 1, 1, 1) - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True # If a CoverageRecord is a transient failure, we try again # regardless of whether it's been updated. @@ -2003,16 +1786,16 @@ def _get(self, url, headers): r.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) r.exception = "Failure!" r.status = CoverageRecord.TRANSIENT_FAILURE - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True # If a CoverageRecord is a persistent failure, we don't try again... for r in [record, record2]: r.status = CoverageRecord.PERSISTENT_FAILURE - assert False == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is False # ...unless the feed updates. record.timestamp = datetime_utc(1970, 1, 1, 1, 1, 1) - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True def test_follow_one_link(self, opds_importer_fixture: OPDSImporterFixture): data, db, session = ( @@ -2020,10 +1803,13 @@ def test_follow_one_link(self, opds_importer_fixture: OPDSImporterFixture): opds_importer_fixture.db, opds_importer_fixture.db.session, ) - + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name + ) monitor = OPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=OPDSImporter, ) feed = data.content_server_mini_feed @@ -2046,14 +1832,14 @@ def follow(): assert 2 == session.query(Edition).count() editions = session.query(Edition).all() - data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) + data_source = DataSource.lookup(session, data_source_name) for edition in editions: record, ignore = CoverageRecord.add_for( edition, data_source, CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) record.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -2092,14 +1878,17 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): opds_importer_fixture.db.session, ) # Check coverage records are created. - + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://root-url/index.xml", + data_source_name=data_source_name, + ) monitor = OPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=DoomedOPDSImporter, ) - db.default_collection().external_account_id = "http://root-url/index.xml" - data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) + data_source = DataSource.lookup(session, data_source_name) feed = data.content_server_mini_feed @@ -2120,10 +1909,10 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): editions[0].primary_identifier, data_source, operation=CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) assert CoverageRecord.SUCCESS == record.status - assert None == record.exception + assert record.exception is None # The edition's primary identifier has some cover links whose # relative URL have been resolved relative to the Collection's @@ -2156,7 +1945,7 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): identifier, data_source, operation=CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) assert "Utter failure!" in failure.exception @@ -2164,13 +1953,7 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): # import_one_feed assert 2 == len(failures) - def test_run_once(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) - + def test_run_once(self, db: DatabaseTransactionFixture): class MockOPDSImportMonitor(OPDSImportMonitor): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -2188,9 +1971,14 @@ def import_one_feed(self, feed): self.imports.append(feed) return [object(), object()], {"identifier": "Failure"} + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name + ) + monitor = MockOPDSImportMonitor( - session, - collection=db.default_collection(), + db.session, + collection=collection, import_class=OPDSImporter, ) @@ -2208,20 +1996,19 @@ def import_one_feed(self, feed): # The TimestampData returned by run_once does not include any # timing information; that's provided by run(). - assert None == progress.start - assert None == progress.finish + assert progress.start is None + assert progress.finish is None - def test_update_headers(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, + def test_update_headers(self, db: DatabaseTransactionFixture): + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name ) # Test the _update_headers helper method. monitor = OPDSImportMonitor( - session, - collection=db.default_collection(), + db.session, + collection=collection, import_class=OPDSImporter, ) @@ -2269,16 +2056,17 @@ def test_retry(self, opds_importer_fixture: OPDSImporterFixture): feed = data.content_server_mini_feed feed_url = "https://example.com/feed.opds" - # After we overrode the value of configuration setting we can instantiate OPDSImportMonitor. - # It'll load new "Max retry count"'s value from the database. - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - "connection_max_retry_count", - retry_count, + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", + data_source_name=data_source_name, + settings={"connection_max_retry_count": retry_count}, ) + + # The importer takes its retry count from the collection settings. monitor = OPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=OPDSImporter, ) @@ -2307,7 +2095,9 @@ def __init__(self, db: DatabaseTransactionFixture): self.db = db self.session = db.session self.collection = db.collection( - protocol=OPDSAPI.label(), data_source_name="OPDS" + protocol=OPDSAPI.label(), + data_source_name="OPDS", + external_account_id="http://opds.example.com/feed", ) self.api = OPDSAPI(self.session, self.collection) diff --git a/tests/core/test_opds_validate.py b/tests/core/test_opds_validate.py index 68510b376..a459cdebf 100644 --- a/tests/core/test_opds_validate.py +++ b/tests/core/test_opds_validate.py @@ -19,11 +19,16 @@ def test_opds2_schema( db: DatabaseTransactionFixture, opds_files_fixture: OPDSFilesFixture, ): - db.default_collection().protocol = ExternalIntegration.OPDS2_IMPORT - db.default_collection().data_source = DataSource.FEEDBOOKS + collection = db.collection( + protocol=ExternalIntegration.OPDS2_IMPORT, + data_source_name=DataSource.FEEDBOOKS, + settings={ + "external_account_id": "http://example.com/feed", + }, + ) validator = OPDS2SchemaValidation( db.session, - collection=db.default_collection(), + collection=collection, import_class=OPDS2Importer, parser=RWPMManifestParser(OPDS2FeedParserFactory()), ) @@ -38,15 +43,18 @@ def test_odl2_schema( db: DatabaseTransactionFixture, opds_files_fixture: OPDSFilesFixture, ): - db.default_collection().integration_configuration.settings_dict = { - "username": "username", - "password": "password", - } - db.default_collection().protocol = ExternalIntegration.ODL2 - db.default_collection().data_source = DataSource.FEEDBOOKS + collection = db.collection( + protocol=ExternalIntegration.ODL2, + data_source_name=DataSource.FEEDBOOKS, + settings={ + "username": "username", + "password": "password", + "external_account_id": "http://example.com/feed", + }, + ) validator = ODL2SchemaValidation( db.session, - collection=db.default_collection(), + collection=collection, import_class=ODL2Importer, parser=RWPMManifestParser(ODLFeedParserFactory()), ) diff --git a/tests/core/test_patron_activity_sync.py b/tests/core/test_patron_activity_sync.py index 0648a8ef4..406edf8c3 100644 --- a/tests/core/test_patron_activity_sync.py +++ b/tests/core/test_patron_activity_sync.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Optional, cast +from typing import cast from unittest.mock import call, patch import pytest @@ -31,12 +31,12 @@ class TestPatronActivitySync: def test_item_query(self, sync_fixture: PatronSyncFixture): db = sync_fixture.db - work: Optional[Work] = db.work( + work: Work | None = db.work( with_license_pool=True, with_open_access_download=True ) assert work is not None - pool: Optional[LicensePool] = work.active_license_pool() + pool: LicensePool | None = work.active_license_pool() assert pool is not None patron1: Patron = db.patron() # 0 loans, holds or tokens diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index d732068e6..18344d44b 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -1204,7 +1204,10 @@ def test_success(self, db: DatabaseTransactionFixture): collection = get_one(db.session, Collection) assert "New Collection" == collection.name assert "url" == collection.integration_configuration.settings_dict["url"] - assert "acctid" == collection.external_account_id + assert ( + "acctid" + == collection.integration_configuration.settings_dict["external_account_id"] + ) assert ( "username" == collection.integration_configuration.settings_dict["username"] ) @@ -1997,7 +2000,7 @@ def test_do_run( work = db.work(with_license_pool=True) work2 = db.work(with_license_pool=True) wcr = WorkCoverageRecord - decoys = [wcr.QUALITY_OPERATION, wcr.GENERATE_MARC_OPERATION] + decoys = [wcr.QUALITY_OPERATION, wcr.SUMMARY_OPERATION] # Set up some coverage records. for operation in decoys + [wcr.UPDATE_SEARCH_INDEX_OPERATION]: @@ -2041,7 +2044,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): work = db.work() work2 = db.work() wcr = WorkCoverageRecord - decoys = [wcr.QUALITY_OPERATION, wcr.GENERATE_MARC_OPERATION] + decoys = [wcr.QUALITY_OPERATION, wcr.SUMMARY_OPERATION] # Set up some coverage records. for operation in decoys + [wcr.UPDATE_SEARCH_INDEX_OPERATION]: diff --git a/tests/core/test_selftest.py b/tests/core/test_selftest.py index 910b0d04c..49ea4cf14 100644 --- a/tests/core/test_selftest.py +++ b/tests/core/test_selftest.py @@ -6,7 +6,7 @@ """ import datetime -from typing import Generator, Optional +from collections.abc import Generator from unittest.mock import MagicMock from sqlalchemy.orm import Session @@ -38,7 +38,7 @@ def test_success_representation(self, db: DatabaseTransactionFixture): ) # A SelfTestResult may have an associated Collection. - db.default_collection().name = "CollectionA" + db.default_collection().integration_configuration.name = "CollectionA" result.collection = db.default_collection() assert ( "" @@ -104,7 +104,7 @@ def test_run_self_tests(self, db: DatabaseTransactionFixture): """ class Tester(HasSelfTests): - integration: Optional[ExternalIntegration] + integration: ExternalIntegration | None def __init__(self, extra_arg=None): """This constructor works.""" diff --git a/tests/core/util/test_log.py b/tests/core/util/test_log.py new file mode 100644 index 000000000..f5af553b9 --- /dev/null +++ b/tests/core/util/test_log.py @@ -0,0 +1,53 @@ +import pytest +from _pytest.logging import LogCaptureFixture + +from core.service.logging.configuration import LogLevel +from core.util.log import LoggerMixin, log_elapsed_time + + +class MockClass(LoggerMixin): + @classmethod + @log_elapsed_time(log_level=LogLevel.info, message_prefix="Test") + def test_method(cls): + pass + + @log_elapsed_time( + log_level=LogLevel.debug, message_prefix="Test 12345", skip_start=True + ) + def test_method_2(self): + pass + + +def test_log_elapsed_time_cls(caplog: LogCaptureFixture): + caplog.set_level(LogLevel.info.value) + + MockClass.test_method() + assert len(caplog.records) == 2 + + [first, second] = caplog.records + assert first.name == "tests.core.util.test_log.MockClass" + assert first.message == "Test: Starting..." + assert first.levelname == LogLevel.info.value + + assert second.name == "tests.core.util.test_log.MockClass" + assert "Test: Completed. (elapsed time:" in second.message + assert second.levelname == LogLevel.info.value + + +def test_log_elapsed_time_instance(caplog: LogCaptureFixture): + caplog.set_level(LogLevel.debug.value) + + MockClass().test_method_2() + assert len(caplog.records) == 1 + [record] = caplog.records + assert record.name == "tests.core.util.test_log.MockClass" + assert "Test 12345: Completed. (elapsed time:" in record.message + assert record.levelname == LogLevel.debug.value + + +def test_log_elapsed_time_invalid(caplog: LogCaptureFixture): + caplog.set_level(LogLevel.info.value) + + with pytest.raises(RuntimeError): + log_elapsed_time(log_level=LogLevel.info, message_prefix="Test")(lambda: None)() + assert len(caplog.records) == 0 diff --git a/tests/core/util/test_mock_web_server.py b/tests/core/util/test_mock_web_server.py index 4c866a5dd..ca71728b2 100644 --- a/tests/core/util/test_mock_web_server.py +++ b/tests/core/util/test_mock_web_server.py @@ -1,189 +1,7 @@ -import logging -import threading -from http.server import BaseHTTPRequestHandler, HTTPServer -from typing import Dict, List, Optional, Tuple - import pytest from core.util.http import HTTP, RequestNetworkException - - -class MockAPIServerRequest: - """A request made to a server.""" - - headers: Dict[str, str] - payload: bytes - method: str - path: str - - def __init__(self): - self.headers = {} - self.payload = b"" - self.method = "GET" - self.path = "/" - - -class MockAPIServerResponse: - """A response returned from a server.""" - - status_code: int - content: bytes - headers: Dict[str, str] - close_obnoxiously: bool - - def __init__(self): - self.status_code = 200 - self.content = b"" - self.headers = {} - self.close_obnoxiously = False - - def set_content(self, data: bytes): - """A convenience method that automatically sets the correct content length for data.""" - self.content = data - self.headers["content-length"] = str(len(data)) - - -class MockAPIServerRequestHandler(BaseHTTPRequestHandler): - """Basic request handler.""" - - def _send_everything(self, _response: MockAPIServerResponse): - if _response.close_obnoxiously: - return - - self.send_response(_response.status_code) - for key in _response.headers.keys(): - _value = _response.headers.get(key) - if _value: - self.send_header(key, _value) - - self.end_headers() - self.wfile.write(_response.content) - self.wfile.flush() - - def _read_everything(self) -> MockAPIServerRequest: - _request = MockAPIServerRequest() - _request.method = self.command - for k in self.headers.keys(): - header = self.headers.get(k, None) - if header is not None: - _request.headers[k] = header - _request.path = self.path - _readable = int(self.headers.get("Content-Length") or 0) - if _readable > 0: - _request.payload = self.rfile.read(_readable) - return _request - - def _handle_everything(self): - _request = self._read_everything() - _response = self.server.mock_api_server.dequeue_response(_request) - if _response is None: - logging.error( - f"failed to find a response for {_request.method} {_request.path}" - ) - raise AssertionError( - f"No available response for {_request.method} {_request.path}!" - ) - self._send_everything(_response) - - def do_GET(self): - logging.info("GET") - self._handle_everything() - - def do_POST(self): - logging.info("POST") - self._handle_everything() - - def do_PUT(self): - logging.info("PUT") - self._handle_everything() - - def version_string(self) -> str: - return "" - - def date_time_string(self, timestamp: Optional[int] = 0) -> str: - return "Sat, 1 January 2000 00:00:00 UTC" - - -class MockAPIInternalServer(HTTPServer): - mock_api_server: "MockAPIServer" - - def __init__(self, server_address: Tuple[str, int], bind_and_activate: bool): - super().__init__(server_address, MockAPIServerRequestHandler, bind_and_activate) - self.allow_reuse_address = True - - -class MockAPIServer: - """Embedded web server.""" - - _address: str - _port: int - _server: HTTPServer - _server_thread: threading.Thread - _responses: Dict[str, Dict[str, List[MockAPIServerResponse]]] - _requests: List[MockAPIServerRequest] - - def __init__(self, address: str, port: int): - self._address = address - self._port = port - self._server = MockAPIInternalServer( - (self._address, self._port), bind_and_activate=True - ) - self._server.mock_api_server = self - self._server_thread = threading.Thread(target=self._server.serve_forever) - self._responses = {} - self._requests = [] - - def start(self) -> None: - self._server_thread.start() - - def stop(self) -> None: - self._server.shutdown() - self._server.server_close() - self._server_thread.join(timeout=10) - - def enqueue_response( - self, request_method: str, request_path: str, response: MockAPIServerResponse - ): - _by_method = self._responses.get(request_method) or {} - _by_path = _by_method.get(request_path) or [] - _by_path.append(response) - _by_method[request_path] = _by_path - self._responses[request_method] = _by_method - - def dequeue_response( - self, request: MockAPIServerRequest - ) -> Optional[MockAPIServerResponse]: - self._requests.append(request) - _by_method = self._responses.get(request.method) or {} - _by_path = _by_method.get(request.path) or [] - if len(_by_path) > 0: - return _by_path.pop(0) - return None - - def address(self) -> str: - return self._address - - def port(self) -> int: - return self._port - - def url(self, path: str) -> str: - return f"http://{self.address()}:{self.port()}{path}" - - def requests(self) -> List[MockAPIServerRequest]: - return list(self._requests) - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse class TestMockAPIServer: @@ -224,11 +42,8 @@ def test_server_post(self, mock_web_server: MockAPIServer): def test_server_get_no_response(self, mock_web_server: MockAPIServer): url = mock_web_server.url("/x/y/z") - try: - HTTP.request_with_timeout("GET", url) - except RequestNetworkException: - return - raise AssertionError("Failed to fail!") + with pytest.raises(RequestNetworkException): + HTTP.request_with_timeout("GET", url, timeout=1, backoff_factor=0) def test_server_get_dies(self, mock_web_server: MockAPIServer): _r = MockAPIServerResponse() @@ -236,8 +51,5 @@ def test_server_get_dies(self, mock_web_server: MockAPIServer): mock_web_server.enqueue_response("GET", "/x/y/z", _r) url = mock_web_server.url("/x/y/z") - try: - HTTP.request_with_timeout("GET", url) - except RequestNetworkException: - return - raise AssertionError("Failed to fail!") + with pytest.raises(RequestNetworkException): + HTTP.request_with_timeout("GET", url, timeout=1, backoff_factor=0) diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index 07dd2af3f..432a10dd8 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -1,7 +1,8 @@ import logging import re +from collections.abc import Generator from datetime import datetime -from typing import Generator +from typing import Any from unittest import mock from unittest.mock import MagicMock @@ -14,7 +15,7 @@ from requests_mock import Mocker from core.config import Configuration -from core.model import create, get_one, get_one_or_create +from core.model import Hold, create, get_one, get_one_or_create from core.model.configuration import ConfigurationSetting from core.model.constants import NotificationConstants from core.model.devicetokens import DeviceToken, DeviceTokenTypes @@ -272,68 +273,73 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): work1: Work = db.work(with_license_pool=True) work2: Work = db.work(with_license_pool=True) p1 = work1.active_license_pool() + assert p1 is not None p2 = work2.active_license_pool() - if p1 and p2: # mypy complains if we don't do this - hold1, _ = p1.on_hold_to(patron1, position=0) - hold2, _ = p2.on_hold_to(patron2, position=0) - - with mock.patch("core.util.notifications.messaging") as messaging: + assert p2 is not None + hold1, _ = p1.on_hold_to(patron1, position=0) + hold2, _ = p2.on_hold_to(patron2, position=0) + + with mock.patch("core.util.notifications.messaging") as mock_messaging: + # Mock the notification method to return the kwargs passed to it + # so that we can make sure we are making the expected calls + mock_messaging.Notification.side_effect = lambda **kwargs: kwargs PushNotifications.send_holds_notifications([hold1, hold2]) assert ( hold1.patron_last_notified == hold2.patron_last_notified == utc_now().date() ) loans_api = "http://localhost/default/loans" - assert messaging.Message.call_count == 3 - assert messaging.Message.call_args_list == [ - mock.call( - token="test-token-1", - notification=messaging.Notification( - title=f'Your hold on "{work1.title}" is available!', - ), - data=dict( - title=f'Your hold on "{work1.title}" is available!', - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold1.patron.external_identifier, - authorization_identifier=hold1.patron.authorization_identifier, - identifier=hold1.license_pool.identifier.identifier, - type=hold1.license_pool.identifier.type, - library=hold1.patron.library.short_name, - ), - ), - mock.call( - token="test-token-2", - notification=messaging.Notification( - title=f'Your hold on "{work1.title}" is available!', - ), - data=dict( - title=f'Your hold on "{work1.title}" is available!', - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold1.patron.external_identifier, - authorization_identifier=hold1.patron.authorization_identifier, - identifier=hold1.license_pool.identifier.identifier, - type=hold1.license_pool.identifier.type, - library=hold1.patron.library.short_name, - ), - ), - mock.call( - token="test-token-3", - notification=messaging.Notification( - title=f'Your hold on "{work2.title}" is available!', - ), - data=dict( - title=f'Your hold on "{work2.title}" is available!', - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold2.patron.external_identifier, - identifier=hold2.license_pool.identifier.identifier, - type=hold2.license_pool.identifier.type, - library=hold2.patron.library.short_name, - ), - ), - ] + + def assert_message_call( + actual: Any, + token: str, + work: Work, + hold: Hold, + include_auth_id: bool = True, + ) -> None: + data = dict( + title="Your hold is available!", + body=f'Your hold on "{work.title}" is available!', + event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, + loans_endpoint=loans_api, + identifier=hold.license_pool.identifier.identifier, + type=hold.license_pool.identifier.type, + library=hold.patron.library.short_name, + external_identifier=hold.patron.external_identifier, + ) + + if include_auth_id: + data["authorization_identifier"] = hold.patron.authorization_identifier + + notification = dict( + title=data["title"], + body=data["body"], + ) + + assert actual == mock.call( + token=token, + notification=notification, + data=data, + ) + + # We should have sent 3 messages, one for each token + assert mock_messaging.Message.call_count == 3 + + # We should have sent 2 notifications, one for each patron. + # Because patron1 has 2 tokens, they will get the same notification for + # each token. + assert mock_messaging.Notification.call_count == 2 + + [ + message_call1, + message_call2, + message_call3, + ] = mock_messaging.Message.call_args_list + assert_message_call(message_call1, "test-token-1", work1, hold1) + assert_message_call(message_call2, "test-token-2", work1, hold1) + assert_message_call( + message_call3, "test-token-3", work2, hold2, include_auth_id=False + ) def test_send_messages( self, diff --git a/tests/core/util/test_uuid.py b/tests/core/util/test_uuid.py new file mode 100644 index 000000000..03bc27aa0 --- /dev/null +++ b/tests/core/util/test_uuid.py @@ -0,0 +1,40 @@ +from uuid import UUID + +import pytest + +from core.util.uuid import uuid_decode, uuid_encode + + +@pytest.mark.parametrize( + "uuid,expected", + [ + ("804184d9-ac4f-4cd3-8ad0-a362d71a7431", "gEGE2axPTNOK0KNi1xp0MQ"), + ("e34f3186-c563-4211-a52a-3a866b214963", "408xhsVjQhGlKjqGayFJYw"), + ("c4b0e2a0-9e4a-4b0e-8f4e-2d6d9d5a8a1e", "xLDioJ5KSw6PTi1tnVqKHg"), + ("55ff6224-8ced-41f8-9fb2-eda74657ff56", "Vf9iJIztQfifsu2nRlf_Vg"), + ], +) +def test_uuid_encode_decode(uuid: str, expected: str): + # Create a UUID object from the string + uuid_obj = UUID(uuid) + + # Test that we can encode the uuid and get the expected result + encoded = uuid_encode(uuid_obj) + assert len(encoded) == 22 + assert encoded == expected + + # Test that we can round-trip the encoded string back to a UUID + decoded = uuid_decode(encoded) + assert isinstance(decoded, UUID) + assert str(decoded) == uuid + assert decoded == uuid_obj + + +def test_uuid_decode_error(): + # Invalid length + with pytest.raises(ValueError): + uuid_decode("gE") + + # Invalid characters + with pytest.raises(ValueError): + uuid_decode("/~") diff --git a/tests/core/util/test_xml_parser.py b/tests/core/util/test_xml_parser.py index d7aad4670..9b077f6c0 100644 --- a/tests/core/util/test_xml_parser.py +++ b/tests/core/util/test_xml_parser.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Optional - from lxml.etree import _Element from core.util.xmlparser import XMLProcessor @@ -17,9 +15,7 @@ def __init__(self, xpath_expression: str) -> None: def xpath_expression(self) -> str: return self._xpath_expression - def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> _Element: + def process_one(self, tag: _Element, namespaces: dict[str, str] | None) -> _Element: return tag diff --git a/tests/customlists/conftest.py b/tests/customlists/conftest.py new file mode 100644 index 000000000..12f2b7f9e --- /dev/null +++ b/tests/customlists/conftest.py @@ -0,0 +1,3 @@ +pytest_plugins = [ + "tests.fixtures.webserver", +] diff --git a/tests/customlists/test_explain.py b/tests/customlists/test_explain.py index 8628c42c6..efc92fd06 100644 --- a/tests/customlists/test_explain.py +++ b/tests/customlists/test_explain.py @@ -1,5 +1,4 @@ from pathlib import Path -from typing import List from customlists.customlist_explain import CustomListImportExplainer @@ -46,8 +45,8 @@ def test_explain_simple_report(self, tmpdir): ] ).execute() - text_expected: List[str] = open(output_path).readlines() - text_received: List[str] = open(tmpdir.join("output.csv")).readlines() + text_expected: list[str] = open(output_path).readlines() + text_received: list[str] = open(tmpdir.join("output.csv")).readlines() assert len(text_expected) == len(text_received) for i in range(0, len(text_expected)): assert text_expected[i] == text_received[i] diff --git a/tests/customlists/test_export.py b/tests/customlists/test_export.py index 2495cc089..f7befb879 100644 --- a/tests/customlists/test_export.py +++ b/tests/customlists/test_export.py @@ -1,5 +1,4 @@ import json -import logging from pathlib import Path import pytest @@ -9,20 +8,7 @@ CustomListExportFailed, CustomListExports, ) -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse class TestExports: diff --git a/tests/customlists/test_import.py b/tests/customlists/test_import.py index ed05e2fa9..7c176f97d 100644 --- a/tests/customlists/test_import.py +++ b/tests/customlists/test_import.py @@ -1,7 +1,5 @@ import json -import logging from pathlib import Path -from typing import List import pytest @@ -11,20 +9,7 @@ CustomListReport, CustomListsReport, ) -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse class TestImports: @@ -385,10 +370,10 @@ def test_import_cannot_update_custom_list( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -511,10 +496,10 @@ def test_import_cannot_update_existing_list( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -623,10 +608,10 @@ def test_import_dry_run(self, mock_web_server: MockAPIServer, tmpdir): schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 1 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -738,10 +723,10 @@ def test_import_error_collection_missing( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "The collection 'B2' appears to be missing on the importing CM" @@ -859,10 +844,10 @@ def test_import_updates_and_includes_csrf( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 1 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -978,10 +963,10 @@ def test_import_updates_with_missing_collection( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "The collection 'B2' appears to be missing on the importing CM" @@ -1188,10 +1173,10 @@ def test_import_bad_book_identifier(self, mock_web_server: MockAPIServer, tmpdir schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 3 == len(problems) assert ( "The collection 'B2' appears to be missing on the importing CM" diff --git a/tests/finland/conftest.py b/tests/finland/conftest.py deleted file mode 100644 index 7efef71ed..000000000 --- a/tests/finland/conftest.py +++ /dev/null @@ -1,10 +0,0 @@ -from pytest import register_assert_rewrite - -register_assert_rewrite("tests.fixtures.database") -register_assert_rewrite("tests.fixtures.files") -register_assert_rewrite("tests.fixtures.vendor_id") - -pytest_plugins = [ - "tests.fixtures.api_controller", - "tests.fixtures.database", -] diff --git a/tests/fixtures/announcements.py b/tests/fixtures/announcements.py index 61bf5084e..9d85281e3 100644 --- a/tests/fixtures/announcements.py +++ b/tests/fixtures/announcements.py @@ -1,5 +1,4 @@ import datetime -from typing import Optional import pytest from sqlalchemy.orm import Session @@ -22,10 +21,10 @@ class AnnouncementFixture: def create_announcement( self, db: Session, - start: Optional[datetime.date] = None, - finish: Optional[datetime.date] = None, + start: datetime.date | None = None, + finish: datetime.date | None = None, content: str = "test", - library: Optional[Library] = None, + library: Library | None = None, ) -> Announcement: if start is None: start = self.today @@ -40,7 +39,7 @@ def create_announcement( return announcement def active_announcement( - self, db: Session, library: Optional[Library] = None + self, db: Session, library: Library | None = None ) -> Announcement: # This announcement is active. return self.create_announcement( @@ -52,7 +51,7 @@ def active_announcement( ) def expired_announcement( - self, db: Session, library: Optional[Library] = None + self, db: Session, library: Library | None = None ) -> Announcement: # This announcement expired yesterday. return self.create_announcement( @@ -64,7 +63,7 @@ def expired_announcement( ) def forthcoming_announcement( - self, db: Session, library: Optional[Library] = None + self, db: Session, library: Library | None = None ) -> Announcement: # This announcement should be displayed starting tomorrow. return self.create_announcement( diff --git a/tests/fixtures/api_admin.py b/tests/fixtures/api_admin.py index 2f0df6d32..081841cc9 100644 --- a/tests/fixtures/api_admin.py +++ b/tests/fixtures/api_admin.py @@ -1,13 +1,12 @@ from contextlib import contextmanager -from typing import List import flask import pytest from api.admin.controller import setup_admin_controllers from api.app import initialize_admin +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from core.integration.goals import Goals from core.model import create from core.model.admin import Admin, AdminRole @@ -22,7 +21,7 @@ class AdminControllerFixture: admin: Admin manager: CirculationManager - BOOKS: List[WorkSpec] = [] + BOOKS: list[WorkSpec] = [] def __init__(self, controller_fixture: ControllerFixture): self.ctrl = controller_fixture diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index cffdf7175..ae4d01ba3 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -2,8 +2,9 @@ import datetime import logging +from collections.abc import Callable from contextlib import contextmanager -from typing import Any, Callable, Optional +from typing import Any import flask import pytest @@ -11,8 +12,9 @@ from api.adobe_vendor_id import AuthdataUtility from api.app import app +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager, CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.integration.registry.patron_auth import PatronAuthRegistry from api.lanes import create_default_lanes from api.simple_authentication import SimpleAuthenticationProvider @@ -57,7 +59,7 @@ class ControllerFixture: """A test that requires a functional app server.""" app: PalaceFlask - authdata: Optional[AuthdataUtility] + authdata: AuthdataUtility | None collection: Collection collections: list[Collection] controller: CirculationManagerController diff --git a/tests/fixtures/api_odl.py b/tests/fixtures/api_odl.py index 9bdef0ad8..00a6decc4 100644 --- a/tests/fixtures/api_odl.py +++ b/tests/fixtures/api_odl.py @@ -3,7 +3,7 @@ import datetime import json import uuid -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any import pytest from jinja2 import Template @@ -24,10 +24,10 @@ class LicenseHelper: def __init__( self, - identifier: Optional[str] = None, - checkouts: Optional[int] = None, - concurrency: Optional[int] = None, - expires: Optional[Union[datetime.datetime, str]] = None, + identifier: str | None = None, + checkouts: int | None = None, + concurrency: int | None = None, + expires: datetime.datetime | str | None = None, ) -> None: """Initialize a new instance of LicenseHelper class. @@ -37,12 +37,12 @@ def __init__( :param expires: Date & time when a license expires """ self.identifier: str = identifier if identifier else f"urn:uuid:{uuid.uuid1()}" - self.checkouts: Optional[int] = checkouts - self.concurrency: Optional[int] = concurrency + self.checkouts: int | None = checkouts + self.concurrency: int | None = concurrency if isinstance(expires, datetime.datetime): self.expires = expires.isoformat() else: - self.expires: Optional[str] = expires # type: ignore + self.expires: str | None = expires # type: ignore class LicenseInfoHelper: @@ -53,12 +53,12 @@ def __init__( license: LicenseHelper, available: int, status: str = "available", - left: Optional[int] = None, + left: int | None = None, ) -> None: """Initialize a new instance of LicenseInfoHelper class.""" self.license: LicenseHelper = license self.status: str = status - self.left: Optional[int] = left + self.left: int | None = left self.available: int = available def __str__(self) -> str: @@ -110,7 +110,7 @@ class MockGet: def __init__(self): self.responses = [] - def get(self, *args: Any, **kwargs: Any) -> Tuple[int, Dict[str, str], bytes]: + def get(self, *args: Any, **kwargs: Any) -> tuple[int, dict[str, str], bytes]: return 200, {}, self.responses.pop(0) def add(self, item: LicenseInfoHelper | str | bytes) -> None: @@ -169,12 +169,12 @@ def __init__( self.feed_template = feed_template def __call__( - self, licenses: List[LicenseInfoHelper] - ) -> Tuple[ - List[Edition], - List[LicensePool], - List[Work], - Dict[str, List[CoverageFailure]], + self, licenses: list[LicenseInfoHelper] + ) -> tuple[ + list[Edition], + list[LicensePool], + list[Work], + dict[str, list[CoverageFailure]], ]: feed_licenses = [l.license for l in licenses] for _license in licenses: @@ -187,7 +187,7 @@ def __call__( return self.importer.import_from_feed(feed) def get_templated_feed( - self, files: APIFilesFixture, filename: str, licenses: List[LicenseHelper] + self, files: APIFilesFixture, filename: str, licenses: list[LicenseHelper] ) -> str: """Get the test ODL feed with specific licensing information. diff --git a/tests/fixtures/api_routes.py b/tests/fixtures/api_routes.py index f1010e456..e9823dbad 100644 --- a/tests/fixtures/api_routes.py +++ b/tests/fixtures/api_routes.py @@ -1,12 +1,13 @@ import logging -from typing import Any, Generator, Optional +from collections.abc import Generator +from typing import Any import flask import pytest from werkzeug.exceptions import MethodNotAllowed from api import routes -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.api_controller import ControllerFixture from tests.fixtures.database import DatabaseTransactionFixture @@ -136,8 +137,8 @@ def __init__( self.original_app = self.routes.app self.resolver = self.original_app.url_map.bind("", "/") - self.controller: Optional[CirculationManagerController] = None - self.real_controller: Optional[CirculationManagerController] = None + self.controller: CirculationManagerController | None = None + self.real_controller: CirculationManagerController | None = None self.routes.app = app # type: ignore def set_controller_name(self, name: str): diff --git a/tests/fixtures/authenticator.py b/tests/fixtures/authenticator.py index 8794e9865..da97b8cea 100644 --- a/tests/fixtures/authenticator.py +++ b/tests/fixtures/authenticator.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple, Type +from typing import Optional import pytest @@ -20,7 +20,7 @@ IntegrationLibraryConfigurationFixture, ) -AuthProviderFixture = Tuple[ +AuthProviderFixture = tuple[ IntegrationConfiguration, Optional[IntegrationLibraryConfiguration] ] @@ -37,9 +37,9 @@ def __init__( def __call__( self, protocol: str, - library: Optional[Library], - settings_dict: Optional[Dict[str, str]] = None, - library_settings_dict: Optional[Dict[str, str]] = None, + library: Library | None, + settings_dict: dict[str, str] | None = None, + library_settings_dict: dict[str, str] | None = None, ) -> AuthProviderFixture: settings_dict = settings_dict or {} library_settings_dict = library_settings_dict or {} @@ -76,7 +76,7 @@ class AuthProtocolFixture: def __init__(self, registry: PatronAuthRegistry): self.registry = registry - def __call__(self, protocol: Type[AuthenticationProviderType]) -> str: + def __call__(self, protocol: type[AuthenticationProviderType]) -> str: return self.registry.get_protocol(protocol, "") @@ -100,7 +100,7 @@ def __init__( def __call__( self, - library: Optional[Library] = None, + library: Library | None = None, test_identifier: str = "username1", test_password: str = "password1", ) -> AuthProviderFixture: @@ -136,7 +136,7 @@ def __init__( self.get_auth_protocol = get_auth_protocol def __call__( - self, library: Optional[Library] = None, **kwargs: str + self, library: Library | None = None, **kwargs: str ) -> AuthProviderFixture: if "url" not in kwargs: kwargs["url"] = "http://url.com/" @@ -169,7 +169,7 @@ def __init__( self.get_auth_protocol = get_auth_protocol def __call__( - self, library: Optional[Library] = None, **kwargs: str + self, library: Library | None = None, **kwargs: str ) -> AuthProviderFixture: if "url" not in kwargs: kwargs["url"] = "url.com" @@ -202,7 +202,7 @@ def __init__( self.get_auth_protocol = get_auth_protocol def __call__( - self, library: Optional[Library] = None, **kwargs: str + self, library: Library | None = None, **kwargs: str ) -> AuthProviderFixture: if "service_provider_xml_metadata" not in kwargs: kwargs["service_provider_xml_metadata"] = CORRECT_XML_WITH_ONE_SP diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index e1f3393cf..da161daaa 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -7,8 +7,9 @@ import tempfile import time import uuid +from collections.abc import Generator, Iterable from textwrap import dedent -from typing import Generator, Iterable, List, Optional, Tuple +from typing import Any import pytest import sqlalchemy @@ -98,7 +99,7 @@ def __init__(self, engine: Engine, connection: Connection): self._connection = connection @staticmethod - def _get_database_connection() -> Tuple[Engine, Connection]: + def _get_database_connection() -> tuple[Engine, Connection]: url = Configuration.database_url() engine = SessionManager.engine(url) connection = engine.connect() @@ -139,12 +140,12 @@ class DatabaseTransactionFixture: """A fixture representing a single transaction. The transaction is automatically rolled back.""" _database: DatabaseFixture - _default_library: Optional[Library] - _default_collection: Optional[Collection] + _default_library: Library | None + _default_collection: Collection | None _session: Session _transaction: Transaction _counter: int - _isbns: List[str] + _isbns: list[str] def __init__( self, database: DatabaseFixture, session: Session, transaction: Transaction @@ -169,10 +170,9 @@ def _make_default_library(self) -> Library: "Default Collection", protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS", + external_account_id="http://opds.example.com/feed", ) - collection.integration_configuration.for_library(library.id, create=True) - if collection not in library.collections: - library.collections.append(collection) + collection.libraries.append(library) return library @staticmethod @@ -207,7 +207,7 @@ def transaction(self) -> Transaction: def session(self) -> Session: return self._session - def default_collection(self): + def default_collection(self) -> Collection: """A Collection that will only be created once throughout a given test. @@ -241,9 +241,9 @@ def fresh_str(self) -> str: def library( self, - name: Optional[str] = None, - short_name: Optional[str] = None, - settings: Optional[LibrarySettings] = None, + name: str | None = None, + short_name: str | None = None, + settings: LibrarySettings | None = None, ) -> Library: # Just a dummy key used for testing. key_string = """\ @@ -296,19 +296,20 @@ def collection( username=None, password=None, data_source_name=None, + settings: dict[str, Any] | None = None, ) -> Collection: name = name or self.fresh_str() - collection, ignore = get_one_or_create(self.session, Collection, name=name) - collection.external_account_id = external_account_id - integration = collection.create_external_integration(protocol) - integration.goal = ExternalIntegration.LICENSE_GOAL - config = collection.create_integration_configuration(protocol) - config.goal = Goals.LICENSE_GOAL - config.settings_dict = { - "url": url, - "username": username, - "password": password, - } + collection, _ = Collection.by_name_and_protocol(self.session, name, protocol) + settings = settings or {} + if url: + settings["url"] = url + if username: + settings["username"] = username + if password: + settings["password"] = password + if external_account_id: + settings["external_account_id"] = external_account_id + collection.integration_configuration.settings_dict = settings if data_source_name: collection.data_source = data_source_name @@ -722,13 +723,14 @@ def integration_configuration( goal=goal, name=(name or random_string(16)), ) - if libraries and not isinstance(libraries, list): - libraries = [libraries] - else: + + if libraries is None: libraries = [] - for library in libraries: - integration.for_library(library.id, create=True) + if not isinstance(libraries, list): + libraries = [libraries] + + integration.libraries.extend(libraries) integration.settings_dict = kwargs return integration @@ -971,7 +973,7 @@ def __init__(self, db: DatabaseTransactionFixture): self.db = db def __call__( - self, protocol: Optional[str], goal: Goals, settings_dict: Optional[dict] = None + self, protocol: str | None, goal: Goals, settings_dict: dict | None = None ) -> IntegrationConfiguration: integration, _ = create( self.db.session, @@ -984,7 +986,7 @@ def __call__( return integration def discovery_service( - self, protocol: Optional[str] = None, url: Optional[str] = None + self, protocol: str | None = None, url: str | None = None ) -> IntegrationConfiguration: registry = DiscoveryRegistry() if protocol is None: @@ -1018,7 +1020,7 @@ def __call__( self, library: Library, parent: IntegrationConfiguration, - settings_dict: Optional[dict] = None, + settings_dict: dict | None = None, ) -> IntegrationLibraryConfiguration: settings_dict = settings_dict or {} integration, _ = create( diff --git a/tests/fixtures/flask.py b/tests/fixtures/flask.py index 50a53ae42..670b7d908 100644 --- a/tests/fixtures/flask.py +++ b/tests/fixtures/flask.py @@ -1,4 +1,4 @@ -from typing import Generator +from collections.abc import Generator import pytest from flask.ctx import RequestContext diff --git a/tests/fixtures/library.py b/tests/fixtures/library.py index c17f59966..e4056b49e 100644 --- a/tests/fixtures/library.py +++ b/tests/fixtures/library.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pytest @@ -38,9 +38,9 @@ def __init__(self, db: DatabaseTransactionFixture) -> None: def library( self, - name: Optional[str] = None, - short_name: Optional[str] = None, - settings: Optional[LibrarySettings] = None, + name: str | None = None, + short_name: str | None = None, + settings: LibrarySettings | None = None, ) -> Library: library = self.db.library(name=name, short_name=short_name, settings=settings) if isinstance(settings, MockLibrarySettings): diff --git a/tests/fixtures/odl.py b/tests/fixtures/odl.py index 420e342ae..ffa3e3dea 100644 --- a/tests/fixtures/odl.py +++ b/tests/fixtures/odl.py @@ -1,6 +1,7 @@ import json import types -from typing import Any, Callable, Optional, Tuple, Type +from collections.abc import Callable +from typing import Any import pytest from _pytest.monkeypatch import MonkeyPatch @@ -17,7 +18,6 @@ Patron, Representation, Work, - get_one_or_create, ) from core.model.configuration import ExternalIntegration from core.util.http import HTTP @@ -53,7 +53,7 @@ def _url_for(patched_self, *args, **kwargs): "&".join([f"{key}={val}" for key, val in list(kwargs.items())]), ) - def __call__(self, api: Type[BaseODLAPI]): + def __call__(self, api: type[BaseODLAPI]): # We monkeypatch the ODLAPI class to intercept HTTP requests and responses # these monkeypatched methods are staticmethods on this class. They take # a patched_self argument, which is the instance of the ODLAPI class that @@ -91,26 +91,18 @@ def library(self): def collection(self, library, api_class=ODLAPI): """Create a mock ODL collection to use in tests.""" integration_protocol = api_class.label() - collection, ignore = get_one_or_create( + collection, _ = Collection.by_name_and_protocol( self.db.session, - Collection, - name=f"Test {api_class.__name__} Collection", - create_method_kwargs=dict( - external_account_id="http://odl", - ), + f"Test {api_class.__name__} Collection", + integration_protocol, ) - integration = collection.create_external_integration( - protocol=integration_protocol - ) - config = collection.create_integration_configuration(integration_protocol) - config.settings_dict = { + collection.integration_configuration.settings_dict = { "username": "a", "password": "b", - "url": "http://metadata", + "external_account_id": "http://odl", Collection.DATA_SOURCE_NAME_SETTING: "Feedbooks", } - config.for_library(library.id, create=True) - library.collections.append(collection) + collection.libraries.append(library) return collection def work(self, collection): @@ -179,7 +171,7 @@ def checkout( pool: LicensePool, db: DatabaseTransactionFixture, loan_url: str, - ) -> Callable[[], Tuple[LoanInfo, Any]]: + ) -> Callable[[], tuple[LoanInfo, Any]]: """Create a function that, when evaluated, performs a checkout.""" def c(): @@ -240,19 +232,17 @@ def __init__( self.patron = patron self.pool = license.license_pool - def checkin( - self, patron: Optional[Patron] = None, pool: Optional[LicensePool] = None - ): + def checkin(self, patron: Patron | None = None, pool: LicensePool | None = None): patron = patron or self.patron pool = pool or self.pool return self.fixture.checkin(self.api, patron=patron, pool=pool)() def checkout( self, - loan_url: Optional[str] = None, - patron: Optional[Patron] = None, - pool: Optional[LicensePool] = None, - ) -> Tuple[LoanInfo, Any]: + loan_url: str | None = None, + patron: Patron | None = None, + pool: LicensePool | None = None, + ) -> tuple[LoanInfo, Any]: patron = patron or self.patron pool = pool or self.pool loan_url = loan_url or self.db.fresh_url() @@ -287,10 +277,10 @@ def __init__( patched(ODL2API) def collection( - self, library: Library, api_class: Type[ODL2API] = ODL2API + self, library: Library, api_class: type[ODL2API] = ODL2API ) -> Collection: collection = super().collection(library, api_class) - collection.name = "Test ODL2 Collection" + collection.integration_configuration.name = "Test ODL2 Collection" collection.integration_configuration.protocol = ExternalIntegration.ODL2 return collection diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index 60c10e568..fced64664 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -2,17 +2,7 @@ import functools import sys -from types import TracebackType -from typing import ( - TYPE_CHECKING, - BinaryIO, - List, - Literal, - NamedTuple, - Optional, - Protocol, - Type, -) +from typing import TYPE_CHECKING, BinaryIO, NamedTuple, Protocol from unittest.mock import MagicMock import pytest @@ -32,7 +22,7 @@ class MockS3ServiceUpload(NamedTuple): key: str content: bytes - media_type: Optional[str] + media_type: str | None class MockMultipartS3ContextManager(MultipartS3ContextManager): @@ -42,14 +32,14 @@ def __init__( bucket: str, key: str, url: str, - media_type: Optional[str] = None, + media_type: str | None = None, ) -> None: self.parent = parent self.key = key self.bucket = bucket self.media_type = media_type self.content = b"" - self.content_parts: List[bytes] = [] + self.content_parts: list[bytes] = [] self._complete = False self._url = url self._exception = None @@ -57,22 +47,19 @@ def __init__( def __enter__(self) -> Self: return self - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Literal[False]: + def upload_part(self, content: bytes) -> None: + self.content_parts.append(content) + self.content += content + + def _upload_complete(self) -> None: if self.content: self._complete = True self.parent.uploads.append( MockS3ServiceUpload(self.key, self.content, self.media_type) ) - return False - def upload_part(self, content: bytes) -> None: - self.content_parts.append(content) - self.content += content + def _upload_abort(self) -> None: + ... class MockS3Service(S3Service): @@ -84,20 +71,20 @@ def __init__( url_template: str, ) -> None: super().__init__(client, region, bucket, url_template) - self.uploads: List[MockS3ServiceUpload] = [] - self.mocked_multipart_upload: Optional[MockMultipartS3ContextManager] = None + self.uploads: list[MockS3ServiceUpload] = [] + self.mocked_multipart_upload: MockMultipartS3ContextManager | None = None def store_stream( self, key: str, stream: BinaryIO, - content_type: Optional[str] = None, - ) -> Optional[str]: + content_type: str | None = None, + ) -> str | None: self.uploads.append(MockS3ServiceUpload(key, stream.read(), content_type)) return self.generate_url(key) def multipart( - self, key: str, content_type: Optional[str] = None + self, key: str, content_type: str | None = None ) -> MultipartS3ContextManager: self.mocked_multipart_upload = MockMultipartS3ContextManager( self, self.bucket, key, self.generate_url(key), content_type @@ -108,10 +95,10 @@ def multipart( class S3ServiceProtocol(Protocol): def __call__( self, - client: Optional[S3Client] = None, - region: Optional[str] = None, - bucket: Optional[str] = None, - url_template: Optional[str] = None, + client: S3Client | None = None, + region: str | None = None, + bucket: str | None = None, + url_template: str | None = None, ) -> S3Service: ... diff --git a/tests/fixtures/search.py b/tests/fixtures/search.py index 0936d273a..bb5429094 100644 --- a/tests/fixtures/search.py +++ b/tests/fixtures/search.py @@ -1,6 +1,6 @@ import logging import os -from typing import Iterable, List +from collections.abc import Iterable import pytest from opensearchpy import OpenSearch @@ -24,7 +24,7 @@ class ExternalSearchFixture: integration: ExternalIntegration db: DatabaseTransactionFixture search: OpenSearch - _indexes_created: List[str] + _indexes_created: list[str] def __init__(self): self._indexes_created = [] diff --git a/tests/fixtures/tls_server.py b/tests/fixtures/tls_server.py index 26ca6ce2c..57adbcf97 100644 --- a/tests/fixtures/tls_server.py +++ b/tests/fixtures/tls_server.py @@ -3,10 +3,11 @@ import select import ssl from collections import deque +from collections.abc import Generator from concurrent.futures import ThreadPoolExecutor from pathlib import Path from socket import AF_INET, SOCK_STREAM, socket -from typing import Any, Deque, Generator +from typing import Any, Deque import pytest diff --git a/tests/fixtures/webserver.py b/tests/fixtures/webserver.py new file mode 100644 index 000000000..b9060b993 --- /dev/null +++ b/tests/fixtures/webserver.py @@ -0,0 +1,186 @@ +import threading +from collections.abc import Generator +from http.server import BaseHTTPRequestHandler, HTTPServer + +import pytest + +from core.util.log import LoggerMixin + + +class MockAPIServerRequest: + """A request made to a server.""" + + headers: dict[str, str] + payload: bytes + method: str + path: str + + def __init__(self) -> None: + self.headers = {} + self.payload = b"" + self.method = "GET" + self.path = "/" + + +class MockAPIServerResponse: + """A response returned from a server.""" + + status_code: int + content: bytes + headers: dict[str, str] + close_obnoxiously: bool + + def __init__(self) -> None: + self.status_code = 200 + self.content = b"" + self.headers = {} + self.close_obnoxiously = False + + def set_content(self, data: bytes) -> None: + """A convenience method that automatically sets the correct content length for data.""" + self.content = data + self.headers["content-length"] = str(len(data)) + + +class MockAPIServerRequestHandler(BaseHTTPRequestHandler, LoggerMixin): + """Basic request handler.""" + + def _send_everything(self, _response: MockAPIServerResponse) -> None: + if _response.close_obnoxiously: + return + + self.send_response(_response.status_code) + for key in _response.headers.keys(): + _value = _response.headers.get(key) + if _value: + self.send_header(key, _value) + + self.end_headers() + self.wfile.write(_response.content) + self.wfile.flush() + + def _read_everything(self) -> MockAPIServerRequest: + _request = MockAPIServerRequest() + _request.method = self.command + for k in self.headers.keys(): + header = self.headers.get(k, None) + if header is not None: + _request.headers[k] = header + _request.path = self.path + _readable = int(self.headers.get("Content-Length") or 0) + if _readable > 0: + _request.payload = self.rfile.read(_readable) + return _request + + def _handle_everything(self) -> None: + _request = self._read_everything() + assert isinstance(self.server, MockAPIInternalServer) + _response = self.server.mock_api_server.dequeue_response(_request) + if _response is None: + self.log.error( + f"failed to find a response for {_request.method} {_request.path}" + ) + raise AssertionError( + f"No available response for {_request.method} {_request.path}!" + ) + self._send_everything(_response) + + def do_GET(self) -> None: + self.log.info("GET") + self._handle_everything() + + def do_POST(self) -> None: + self.log.info("POST") + self._handle_everything() + + def do_PUT(self) -> None: + self.log.info("PUT") + self._handle_everything() + + def version_string(self) -> str: + return "" + + def date_time_string(self, timestamp: int | None = 0) -> str: + return "Sat, 1 January 2000 00:00:00 UTC" + + +class MockAPIInternalServer(HTTPServer): + mock_api_server: "MockAPIServer" + + def __init__(self, server_address: tuple[str, int], bind_and_activate: bool): + super().__init__(server_address, MockAPIServerRequestHandler, bind_and_activate) + self.allow_reuse_address = True + + +class MockAPIServer(LoggerMixin): + """Embedded web server.""" + + _address: str + _port: int + _server: HTTPServer + _server_thread: threading.Thread + _responses: dict[str, dict[str, list[MockAPIServerResponse]]] + _requests: list[MockAPIServerRequest] + + def __init__(self, address: str, port: int): + self._address = address + self._port = port + self._server = MockAPIInternalServer( + (self._address, self._port), bind_and_activate=True + ) + self._server.mock_api_server = self + self._server_thread = threading.Thread(target=self._server.serve_forever) + self._responses = {} + self._requests = [] + + def start(self) -> None: + self.log.info(f"starting mock web server on {self.address()}:{self.port()}") + self._server_thread.start() + + def stop(self) -> None: + self.log.info( + f"shutting down mock web server on {self.address()}:{self.port()}" + ) + self._server.shutdown() + self._server.server_close() + self._server_thread.join(timeout=10) + + def enqueue_response( + self, request_method: str, request_path: str, response: MockAPIServerResponse + ) -> None: + _by_method = self._responses.get(request_method) or {} + _by_path = _by_method.get(request_path) or [] + _by_path.append(response) + _by_method[request_path] = _by_path + self._responses[request_method] = _by_method + + def dequeue_response( + self, request: MockAPIServerRequest + ) -> MockAPIServerResponse | None: + self._requests.append(request) + _by_method = self._responses.get(request.method) or {} + _by_path = _by_method.get(request.path) or [] + if len(_by_path) > 0: + return _by_path.pop(0) + return None + + def address(self) -> str: + return self._address + + def port(self) -> int: + return self._port + + def url(self, path: str) -> str: + return f"http://{self.address()}:{self.port()}{path}" + + def requests(self) -> list[MockAPIServerRequest]: + return list(self._requests) + + +@pytest.fixture +def mock_web_server() -> Generator[MockAPIServer, None, None]: + """A test fixture that yields a usable mock web server for the lifetime of the test.""" + _server = MockAPIServer("127.0.0.1", 10256) + _server.start() + yield _server + _server.stop() diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index 99bef013a..c537d93b1 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -3,13 +3,13 @@ import json import random import string +from collections.abc import Generator from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Protocol, Union +from typing import TYPE_CHECKING, Any, Protocol, cast import pytest import pytest_alembic from pytest_alembic.config import Config -from sqlalchemy import inspect from core.model import json_serializer from tests.fixtures.database import ApplicationFixture, DatabaseFixture @@ -62,7 +62,7 @@ def alembic_engine(database: DatabaseFixture) -> Engine: @pytest.fixture def alembic_runner( - alembic_config: Union[Dict[str, Any], alembic.config.Config, Config], + alembic_config: dict[str, Any] | alembic.config.Config | Config, alembic_engine: Engine, ) -> Generator[MigrationContext, None, None]: """ @@ -76,13 +76,13 @@ def alembic_runner( class RandomName(Protocol): - def __call__(self, length: Optional[int] = None) -> str: + def __call__(self, length: int | None = None) -> str: ... @pytest.fixture def random_name() -> RandomName: - def fixture(length: Optional[int] = None) -> str: + def fixture(length: int | None = None) -> str: if length is None: length = 10 return "".join(random.choices(string.ascii_lowercase, k=length)) @@ -94,8 +94,8 @@ class CreateLibrary(Protocol): def __call__( self, connection: Connection, - name: Optional[str] = None, - short_name: Optional[str] = None, + name: str | None = None, + short_name: str | None = None, ) -> int: ... @@ -104,34 +104,27 @@ def __call__( def create_library(random_name: RandomName) -> CreateLibrary: def fixture( connection: Connection, - name: Optional[str] = None, - short_name: Optional[str] = None, + name: str | None = None, + short_name: str | None = None, ) -> int: if name is None: name = random_name() if short_name is None: short_name = random_name() - inspector = inspect(connection) - columns = [column["name"] for column in inspector.get_columns("libraries")] - args = { "name": name, "short_name": short_name, } - # See if we need to include public and private keys - if "public_key" in columns: - args["public_key"] = random_name() - args["private_key"] = random_name() + args["public_key"] = random_name() + args["private_key"] = random_name() - # See if we need to include a settings dict - if "settings_dict" in columns: - settings_dict = { - "website": "http://library.com", - "help_web": "http://library.com/support", - } - args["settings_dict"] = json_serializer(settings_dict) + settings_dict = { + "website": "http://library.com", + "help_web": "http://library.com/support", + } + args["settings_dict"] = json_serializer(settings_dict) keys = ",".join(args.keys()) values = ",".join([f"'{value}'" for value in args.values()]) @@ -150,9 +143,7 @@ class CreateCollection(Protocol): def __call__( self, connection: Connection, - name: Optional[str] = None, - external_integration_id: Optional[int] = None, - external_account_id: Optional[str] = None, + integration_configuration_id: int | None = None, ) -> int: ... @@ -161,16 +152,11 @@ def __call__( def create_collection(random_name: RandomName) -> CreateCollection: def fixture( connection: Connection, - name: Optional[str] = None, - external_integration_id: Optional[int] = None, - external_account_id: Optional[str] = None, + integration_configuration_id: int | None = None, ) -> int: - if name is None: - name = random_name() collection = connection.execute( - "INSERT INTO collections (name, external_account_id, external_integration_id) VALUES" - + "(%s, %s, %s) returning id", - (name, external_account_id, external_integration_id), + "INSERT INTO collections (integration_configuration_id) VALUES (%s) returning id", + integration_configuration_id, ).fetchone() assert collection is not None assert isinstance(collection.id, int) @@ -183,9 +169,9 @@ class CreateExternalIntegration(Protocol): def __call__( self, connection: Connection, - protocol: Optional[str] = None, - goal: Optional[str] = None, - name: Optional[str] = None, + protocol: str | None = None, + goal: str | None = None, + name: str | None = None, ) -> int: ... @@ -194,9 +180,9 @@ def __call__( def create_external_integration(random_name: RandomName) -> CreateExternalIntegration: def fixture( connection: Connection, - protocol: Optional[str] = None, - goal: Optional[str] = None, - name: Optional[str] = None, + protocol: str | None = None, + goal: str | None = None, + name: str | None = None, ) -> int: protocol = protocol or random_name() goal = goal or random_name() @@ -215,10 +201,10 @@ class CreateConfigSetting(Protocol): def __call__( self, connection: Connection, - key: Optional[str] = None, - value: Optional[str] = None, - integration_id: Optional[int] = None, - library_id: Optional[int] = None, + key: str | None = None, + value: str | None = None, + integration_id: int | None = None, + library_id: int | None = None, associate_library: bool = False, ) -> int: ... @@ -228,10 +214,10 @@ def __call__( def create_config_setting() -> CreateConfigSetting: def fixture( connection: Connection, - key: Optional[str] = None, - value: Optional[str] = None, - integration_id: Optional[int] = None, - library_id: Optional[int] = None, + key: str | None = None, + value: str | None = None, + integration_id: int | None = None, + library_id: int | None = None, associate_library: bool = False, ) -> int: if type(value) in (tuple, list, dict): @@ -258,3 +244,206 @@ def fixture( return setting.id return fixture + + +class CreateIntegrationConfiguration(Protocol): + def __call__( + self, + connection: Connection, + name: str, + protocol: str, + goal: str, + settings: dict[str, Any] | None = None, + ) -> int: + ... + + +@pytest.fixture +def create_integration_configuration() -> CreateIntegrationConfiguration: + def fixture( + connection: Connection, + name: str, + protocol: str, + goal: str, + settings: dict[str, Any] | None = None, + ) -> int: + if settings is None: + settings = {} + + settings_str = json_serializer(settings) + + integration_configuration = connection.execute( + "INSERT INTO integration_configurations (name, protocol, goal, settings, self_test_results, context) " + "VALUES (%s, %s, %s, %s, '{}', '{}') returning id", + name, + protocol, + goal, + settings_str, + ).fetchone() + assert integration_configuration is not None + assert isinstance(integration_configuration.id, int) + return integration_configuration.id + + return fixture + + +class CreateEdition(Protocol): + def __call__( + self, + connection: Connection, + title: str, + medium: str, + primary_identifier_id: int, + ) -> int: + ... + + +@pytest.fixture +def create_edition() -> CreateEdition: + def fixture( + connection: Connection, title: str, medium: str, primary_identifier_id: int + ) -> int: + edition = connection.execute( + "INSERT INTO editions (title, medium, primary_identifier_id) VALUES (%s, %s, %s) returning id", + title, + medium, + primary_identifier_id, + ).fetchone() + assert edition is not None + return cast(int, edition.id) + + return fixture + + +class CreateIdentifier: + def __call__( + self, + connection: Connection, + identifier: str | None = None, + type: str | None = None, + ) -> int: + identifier = identifier or self.random_name() + type = type or self.random_name() + identifier_row = connection.execute( + "INSERT INTO identifiers (identifier, type) VALUES (%s, %s) returning id", + identifier, + type, + ).fetchone() + assert identifier_row is not None + assert isinstance(identifier_row.id, int) + return identifier_row.id + + def __init__(self, random_name: RandomName) -> None: + self.random_name = random_name + + +@pytest.fixture +def create_identifier(random_name: RandomName) -> CreateIdentifier: + return CreateIdentifier(random_name) + + +class CreateLicensePool(Protocol): + def __call__( + self, + connection: Connection, + collection_id: int, + identifier_id: int | None = None, + should_track_playtime: bool | None = False, + ) -> int: + ... + + +@pytest.fixture +def create_license_pool() -> CreateLicensePool: + def fixture( + connection: Connection, + collection_id: int, + identifier_id: int | None = None, + should_track_playtime: bool | None = False, + ) -> int: + licensepool = connection.execute( + "INSERT into licensepools (collection_id, identifier_id, should_track_playtime) VALUES (%(id)s, %(identifier_id)s, %(track)s) returning id", + id=collection_id, + identifier_id=identifier_id, + track=should_track_playtime, + ).fetchone() + assert licensepool is not None + return cast(int, licensepool.id) + + return fixture + + +class CreateLane: + def __call__( + self, + connection: Connection, + library_id: int, + name: str | None = None, + priority: int = 0, + inherit_parent_restrictions: bool = False, + include_self_in_grouped_feed: bool = False, + visible: bool = True, + ) -> int: + name = name or self.random_name() + lane = connection.execute( + "INSERT INTO lanes " + "(library_id, display_name, priority, size, inherit_parent_restrictions, " + "include_self_in_grouped_feed, visible) " + " VALUES (%s, %s, %s, 0, %s, %s, %s) returning id", + library_id, + name, + priority, + inherit_parent_restrictions, + include_self_in_grouped_feed, + visible, + ).fetchone() + assert lane is not None + assert isinstance(lane.id, int) + return lane.id + + def __init__(self, random_name: RandomName) -> None: + self.random_name = random_name + + +@pytest.fixture +def create_lane(random_name: RandomName) -> CreateLane: + return CreateLane(random_name) + + +class CreateCoverageRecord: + def __call__( + self, + connection: Connection, + operation: str | None = None, + identifier_id: int | None = None, + collection_id: int | None = None, + ) -> int: + if identifier_id is None: + identifier_id = self.create_identifier(connection) + + if operation is None: + operation = self.random_name() + + row = connection.execute( + "INSERT INTO coveragerecords (operation, identifier_id, collection_id, timestamp) " + "VALUES (%s, %s, %s, '2021-01-01') returning id", + operation, + identifier_id, + collection_id, + ).first() + assert row is not None + assert isinstance(row.id, int) + return row.id + + def __init__( + self, create_identifier: CreateIdentifier, random_name: RandomName + ) -> None: + self.create_identifier = create_identifier + self.random_name = random_name + + +@pytest.fixture +def create_coverage_record( + create_identifier: CreateIdentifier, random_name: RandomName +) -> CreateCoverageRecord: + return CreateCoverageRecord(create_identifier, random_name) diff --git a/tests/migration/test_20230510_a9ed3f76d649.py b/tests/migration/test_20230510_a9ed3f76d649.py deleted file mode 100644 index e7bbb0cb7..000000000 --- a/tests/migration/test_20230510_a9ed3f76d649.py +++ /dev/null @@ -1,231 +0,0 @@ -from __future__ import annotations - -import json -from typing import TYPE_CHECKING - -from sqlalchemy import inspect - -if TYPE_CHECKING: - from pytest_alembic import MigrationContext - from sqlalchemy.engine import Engine - - from tests.migration.conftest import ( - CreateConfigSetting, - CreateExternalIntegration, - CreateLibrary, - ) - - -def assert_tables_exist(alembic_engine: Engine) -> None: - # We should have the tables for this migration - insp = inspect(alembic_engine) - assert "integration_configurations" in insp.get_table_names() - assert "integration_library_configurations" in insp.get_table_names() - assert "integration_errors" in insp.get_table_names() - - # We should have the enum defined in this migration - with alembic_engine.connect() as connection: - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'goals'") - assert result.rowcount == 1 - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'status'") - assert result.rowcount == 1 - - -def assert_tables_dont_exist(alembic_engine: Engine) -> None: - # We should not have the tables for this migration - insp = inspect(alembic_engine) - assert "integration_configurations" not in insp.get_table_names() - assert "integration_library_configurations" not in insp.get_table_names() - assert "integration_errors" not in insp.get_table_names() - - # We should not have the enum defined in this migration - with alembic_engine.connect() as connection: - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'goals'") - assert result.rowcount == 0 - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'status'") - assert result.rowcount == 0 - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, -) -> None: - # Migrate to just before our migration - alembic_runner.migrate_down_to("a9ed3f76d649") - assert_tables_exist(alembic_engine) - - # Migrate down past our migration, running the downgrade migration - alembic_runner.migrate_down_one() - assert_tables_dont_exist(alembic_engine) - - # Insert configuration settings for testing - with alembic_engine.connect() as connection: - # Set up two libraries - library = create_library(connection) - library2 = create_library(connection) - - # Set up four integrations - sip_integration = create_external_integration( - connection, "api.sip", "patron_auth", "Integration 1" - ) - millenium_integration = create_external_integration( - connection, "api.millenium_patron", "patron_auth", "Integration 2" - ) - simple_integration = create_external_integration( - connection, "api.simple_authentication", "patron_auth", "Integration 3" - ) - unrelated_integration = create_external_integration( - connection, "unrelated", "other_goal", "Integration 4" - ) - - # Add configuration settings for the sip integration - create_config_setting(connection, "setting1", "value1", sip_integration) - create_config_setting(connection, "url", "sip url", sip_integration) - create_config_setting( - connection, "institution_id", "institution", sip_integration - ) - create_config_setting( - connection, - "self_test_results", - json.dumps({"test": "test"}), - sip_integration, - ) - create_config_setting( - connection, "patron status block", "false", sip_integration - ) - create_config_setting( - connection, "identifier_barcode_format", "", sip_integration - ) - create_config_setting( - connection, "institution_id", "bar", sip_integration, library - ) - - # Add configuration settings for the millenium integration - create_config_setting(connection, "setting2", "value2", millenium_integration) - create_config_setting( - connection, "url", "https://url.com", millenium_integration - ) - create_config_setting( - connection, "verify_certificate", "false", millenium_integration - ) - create_config_setting( - connection, "use_post_requests", "true", millenium_integration - ) - create_config_setting( - connection, - "identifier_blacklist", - json.dumps(["a", "b", "c"]), - millenium_integration, - ) - create_config_setting( - connection, - "library_identifier_field", - "foo", - millenium_integration, - library, - ) - - # Add configuration settings for the simple integration - create_config_setting(connection, "test_identifier", "123", simple_integration) - create_config_setting(connection, "test_password", "456", simple_integration) - - # Associate the millenium integration with the library - connection.execute( - "INSERT INTO externalintegrations_libraries (library_id, externalintegration_id) VALUES (%s, %s)", - (library, millenium_integration), - ) - - # Associate the simple integration with library 2 - connection.execute( - "INSERT INTO externalintegrations_libraries (library_id, externalintegration_id) VALUES (%s, %s)", - (library2, simple_integration), - ) - - # Migrate back up, running our upgrade migration - alembic_runner.migrate_up_one() - assert_tables_exist(alembic_engine) - - # Check that the configuration settings were migrated correctly - with alembic_engine.connect() as connection: - # Check that we have the correct number of integrations - integrations = connection.execute( - "SELECT * FROM integration_configurations", - ) - assert integrations.rowcount == 3 - - # Check that the sip integration was migrated correctly - # The unknown setting 'setting1' was dropped, self test results were migrated, and the patron status block - # setting was renamed, based on the field alias. - sip_result = connection.execute( - "SELECT protocol, goal, settings, self_test_results FROM integration_configurations WHERE name = %s", - ("Integration 1",), - ).fetchone() - assert sip_result is not None - assert sip_result[0] == "api.sip" - assert sip_result[1] == "PATRON_AUTH_GOAL" - assert sip_result[2] == { - "patron_status_block": False, - "url": "sip url", - } - assert sip_result[3] == {"test": "test"} - - # Check that the millenium integration was migrated correctly - # The unknown setting 'setting2' was dropped, the list and bool values were serialized correctly, and - # the empty self test results were migrated as an empty dict. - millenium_result = connection.execute( - "SELECT protocol, goal, settings, self_test_results, id FROM integration_configurations WHERE name = %s", - ("Integration 2",), - ).fetchone() - assert millenium_result is not None - assert millenium_result[0] == "api.millenium_patron" - assert millenium_result[1] == "PATRON_AUTH_GOAL" - assert millenium_result[2] == { - "url": "https://url.com", - "verify_certificate": False, - "use_post_requests": True, - "identifier_blacklist": ["a", "b", "c"], - } - assert millenium_result[3] == {} - - # Check that the simple integration was migrated correctly - simple_result = connection.execute( - "SELECT protocol, goal, settings, self_test_results, id FROM integration_configurations WHERE name = %s", - ("Integration 3",), - ).fetchone() - assert simple_result is not None - assert simple_result[0] == "api.simple_authentication" - assert simple_result[1] == "PATRON_AUTH_GOAL" - assert simple_result[2] == { - "test_identifier": "123", - "test_password": "456", - } - assert simple_result[3] == {} - - # Check that we have the correct number of library integrations - # The SIP integration has library settings, but no association with a library, so no - # library integration was created for it. And the simple auth integration has a library - # association, but no library settings, so we do create a integration with no settings for it. - integrations = connection.execute( - "SELECT parent_id, library_id, settings FROM integration_library_configurations ORDER BY library_id asc", - ) - assert integrations.rowcount == 2 - - # Check that the millenium integration was migrated correctly - [ - millenium_library_integration, - simple_library_integration, - ] = integrations.fetchall() - assert millenium_library_integration is not None - assert millenium_library_integration[0] == millenium_result[4] - assert millenium_library_integration[1] == library - assert millenium_library_integration[2] == { - "library_identifier_field": "foo", - } - - assert simple_library_integration is not None - assert simple_library_integration[0] == simple_result[4] - assert simple_library_integration[1] == library2 - assert simple_library_integration[2] == {} diff --git a/tests/migration/test_20230512_5a425ebe026c.py b/tests/migration/test_20230512_5a425ebe026c.py deleted file mode 100644 index a66465688..000000000 --- a/tests/migration/test_20230512_5a425ebe026c.py +++ /dev/null @@ -1,123 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Callable, Optional - -import pytest - -if TYPE_CHECKING: - from pytest_alembic import MigrationContext - from sqlalchemy.engine import Connection, Engine - - from tests.migration.conftest import CreateConfigSetting, CreateExternalIntegration - - -@pytest.fixture -def create_test_settings( - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, -) -> Callable[..., int]: - def fixture( - connection: Connection, - url: str, - post: Optional[str] = None, - set_post: bool = True, - ) -> int: - integration = create_external_integration( - connection, protocol="api.millenium_patron" - ) - create_config_setting( - connection, integration_id=integration, key="url", value=url - ) - if set_post: - create_config_setting( - connection, - integration_id=integration, - key="use_post_requests", - value=post, - ) - - return integration - - return fixture - - -def assert_setting(connection: Connection, integration_id: int, value: str) -> None: - result = connection.execute( - "SELECT cs.value FROM configurationsettings cs join externalintegrations ei ON cs.external_integration_id = ei.id WHERE ei.id=%(id)s and cs.key='use_post_requests'", - id=integration_id, - ) - row = result.fetchone() - assert row is not None - assert row.value == value - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_test_settings: Callable[..., int], -) -> None: - alembic_runner.migrate_down_to("5a425ebe026c") - - # Test down migration - with alembic_engine.connect() as connection: - integration = create_test_settings( - connection, "https://vlc.thepalaceproject.org" - ) - - alembic_runner.migrate_down_one() - - with alembic_engine.connect() as connection: - assert_setting(connection, integration, "false") - - # Test up migration - with alembic_engine.connect() as connection: - integration_dev = create_test_settings( - connection, "http://vlc.dev.palaceproject.io/api", "false" - ) - integration_staging = create_test_settings( - connection, "https://vlc.staging.palaceproject.io/PATRONAPI", "false" - ) - integration_local1 = create_test_settings( - connection, "localhost:6500/PATRONAPI", "false" - ) - integration_local2 = create_test_settings( - connection, "http://localhost:6500/api", "false" - ) - integration_prod = create_test_settings( - connection, "https://vlc.thepalaceproject.org/anything...", "false" - ) - integration_other = create_test_settings( - connection, "https://vendor.millenium.com/PATRONAPI", "false" - ) - integration_null = create_test_settings( - connection, "http://vlc.dev.palaceproject.io/api" - ) - integration_missing = create_test_settings( - connection, "http://vlc.dev.palaceproject.io/api", set_post=False - ) - - alembic_runner.migrate_up_one() - - with alembic_engine.connect() as connection: - assert_setting(connection, integration, "true") - assert_setting(connection, integration_dev, "true") - assert_setting(connection, integration_staging, "true") - assert_setting(connection, integration_local1, "true") - assert_setting(connection, integration_local2, "true") - assert_setting(connection, integration_prod, "true") - assert_setting(connection, integration_other, "false") - assert_setting(connection, integration_null, "true") - assert_setting(connection, integration_missing, "true") - - alembic_runner.migrate_down_one() - - with alembic_engine.connect() as connection: - assert_setting(connection, integration, "false") - assert_setting(connection, integration_dev, "false") - assert_setting(connection, integration_staging, "false") - assert_setting(connection, integration_local1, "false") - assert_setting(connection, integration_local2, "false") - assert_setting(connection, integration_prod, "false") - assert_setting(connection, integration_other, "false") - assert_setting(connection, integration_null, "false") - assert_setting(connection, integration_missing, "false") diff --git a/tests/migration/test_20230531_0af587ff8595.py b/tests/migration/test_20230531_0af587ff8595.py deleted file mode 100644 index 26451e2bc..000000000 --- a/tests/migration/test_20230531_0af587ff8595.py +++ /dev/null @@ -1,142 +0,0 @@ -from dataclasses import dataclass -from typing import Any, Dict - -from pytest_alembic import MigrationContext -from sqlalchemy.engine import Connection, Engine - -from tests.migration.conftest import ( - CreateCollection, - CreateConfigSetting, - CreateExternalIntegration, - CreateLibrary, -) - - -@dataclass -class IntegrationConfiguration: - name: str - goal: str - id: int - settings: Dict[str, Any] - library_settings: Dict[int, Dict[str, Any]] - - -def query_integration_configurations( - connection: Connection, goal: str, name: str -) -> IntegrationConfiguration: - result = connection.execute( - "select id, name, protocol, goal, settings from integration_configurations where goal=%s and name=%s", - (goal, name), - ).fetchone() - assert result is not None - - library_results = connection.execute( - "select library_id, settings from integration_library_configurations where parent_id=%s", - result.id, - ).fetchall() - - library_settings = {lr.library_id: lr.settings for lr in library_results} - return IntegrationConfiguration( - result.name, result.goal, result.id, result.settings, library_settings - ) - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, - create_collection: CreateCollection, -) -> None: - """Test the migration of configurationsettings to integration_configurations for the licenses type goals""" - # alembic_runner.set_revision("a9ed3f76d649") - alembic_runner.migrate_down_to("a9ed3f76d649") - with alembic_engine.connect() as connection: - library_id = create_library(connection) - integration_id = create_external_integration( - connection, "Axis 360", "licenses", "Test B&T" - ) - create_config_setting(connection, "username", "username", integration_id) - create_config_setting(connection, "password", "password", integration_id) - create_config_setting(connection, "url", "http://url", integration_id) - create_config_setting( - connection, - "default_loan_duration", - "77", - integration_id, - library_id, - associate_library=True, - ) - create_collection(connection, "Test B&T", integration_id, "ExternalAccountID") - - # Fake value, never used - create_config_setting( - connection, "external_account_id", "external_account_id", integration_id - ) - - alembic_runner.migrate_up_to("0af587ff8595") - - with alembic_engine.connect() as connection: - configuration = query_integration_configurations( - connection, "LICENSE_GOAL", "Test B&T" - ) - - assert configuration.settings == { - "username": "username", - "password": "password", - "url": "http://url", - "external_account_id": "ExternalAccountID", - } - assert configuration.library_settings == { - library_id: {"default_loan_duration": 77} - } - - -def test_key_rename( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, - create_collection: CreateCollection, -) -> None: - alembic_runner.migrate_down_to("a9ed3f76d649") - with alembic_engine.connect() as connection: - integration_id = create_external_integration( - connection, "Overdrive", "licenses", "Test Overdrive" - ) - create_config_setting( - connection, "overdrive_website_id", "website", integration_id - ) - create_config_setting( - connection, "overdrive_client_key", "overdrive_client_key", integration_id - ) - create_config_setting( - connection, - "overdrive_client_secret", - "overdrive_client_secret", - integration_id, - ) - create_collection( - connection, "Test Overdrive", integration_id, "ExternalAccountID" - ) - - # Fake value, never used - create_config_setting( - connection, "external_account_id", "external_account_id", integration_id - ) - - alembic_runner.migrate_up_to("0af587ff8595") - - with alembic_engine.connect() as connection: - configuration = query_integration_configurations( - connection, "LICENSE_GOAL", "Test Overdrive" - ) - - assert configuration.settings == { - "overdrive_website_id": "website", - "overdrive_client_key": "overdrive_client_key", - "overdrive_client_secret": "overdrive_client_secret", - "external_account_id": "ExternalAccountID", - } diff --git a/tests/migration/test_20230711_3d380776c1bf.py b/tests/migration/test_20230711_3d380776c1bf.py deleted file mode 100644 index f90b3c969..000000000 --- a/tests/migration/test_20230711_3d380776c1bf.py +++ /dev/null @@ -1,75 +0,0 @@ -from __future__ import annotations - -import json -from typing import TYPE_CHECKING - -from pytest_alembic import MigrationContext -from sqlalchemy import inspect -from sqlalchemy.engine import Engine - -if TYPE_CHECKING: - from tests.migration.conftest import CreateConfigSetting, CreateLibrary - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_config_setting: CreateConfigSetting, - create_library: CreateLibrary, -) -> None: - alembic_runner.migrate_down_to("3d380776c1bf") - - # Test down migration - assert inspect(alembic_engine).has_table("announcements") - alembic_runner.migrate_down_one() - assert not inspect(alembic_engine).has_table("announcements") - - a1 = { - "content": "This is a test library announcement", - "id": "13ab12b8-2e86-449d-b58d-7f3a944d4093", - "start": "1990-07-01", - "finish": "1990-07-31", - } - a2 = { - "content": "This is another test library announcement", - "id": "23e0ff93-42f6-4333-8d74-4b162237bd5c", - "start": "2022-02-20", - "finish": "2022-02-21", - } - a3 = { - "content": "This is a test global announcement", - "id": "171208b0-d9bc-433f-a957-444fd32e2993", - "start": "2025-01-01", - "finish": "2025-01-02", - } - - # Test up migration - with alembic_engine.connect() as connection: - library = create_library(connection) - - # Create some library announcements - create_config_setting( - connection, "announcements", json.dumps([a1, a2]), library_id=library - ) - - # Create some global announcements - create_config_setting(connection, "global_announcements", json.dumps([a3])) - - # Run the migration - alembic_runner.migrate_up_one() - - # Make sure settings are migrated into table correctly - with alembic_engine.connect() as connection: - announcements = connection.execute( - "SELECT * FROM announcements order by start" - ).all() - assert len(announcements) == 3 - for actual, expected in zip(announcements, [a1, a2, a3]): - assert str(actual.id) == expected["id"] - assert actual.content == expected["content"] - assert str(actual.start) == expected["start"] - assert str(actual.finish) == expected["finish"] - - assert announcements[0].library_id == library - assert announcements[1].library_id == library - assert announcements[2].library_id is None diff --git a/tests/migration/test_20230719_b3749bac3e55.py b/tests/migration/test_20230719_b3749bac3e55.py deleted file mode 100644 index f25f86007..000000000 --- a/tests/migration/test_20230719_b3749bac3e55.py +++ /dev/null @@ -1,64 +0,0 @@ -import json - -from pytest_alembic import MigrationContext -from sqlalchemy import inspect -from sqlalchemy.engine import Engine - -from tests.migration.conftest import CreateConfigSetting, CreateLibrary - - -def column_exists(engine: Engine, table_name: str, column_name: str) -> bool: - inspector = inspect(engine) - columns = [column["name"] for column in inspector.get_columns(table_name)] - return column_name in columns - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_config_setting: CreateConfigSetting, - create_library: CreateLibrary, -) -> None: - alembic_runner.migrate_down_to("b3749bac3e55") - - # Make sure settings column exists - assert column_exists(alembic_engine, "libraries", "settings_dict") - - # Test down migration, make sure settings column is dropped - alembic_runner.migrate_down_one() - assert not column_exists(alembic_engine, "libraries", "settings_dict") - - # Create a library with some configuration settings - with alembic_engine.connect() as connection: - library = create_library(connection) - create_config_setting( - connection, "website", "https://foo.bar", library_id=library - ) - create_config_setting( - connection, "help_web", "https://foo.bar/helpme", library_id=library - ) - create_config_setting( - connection, "logo", "https://foo.bar/logo.png", library_id=library - ) - create_config_setting(connection, "key-pair", "foo", library_id=library) - create_config_setting(connection, "foo", "foo", library_id=library) - create_config_setting( - connection, - "enabled_entry_points", - json.dumps(["xyz", "abc"]), - library_id=library, - ) - - # Run the up migration, and make sure settings column is added - alembic_runner.migrate_up_one() - assert column_exists(alembic_engine, "libraries", "settings_dict") - - # Make sure settings are migrated into table correctly - with alembic_engine.connect() as connection: - result = connection.execute("select settings_dict from libraries").fetchone() - assert result is not None - settings_dict = result.settings_dict - assert len(settings_dict) == 3 - assert settings_dict["website"] == "https://foo.bar" - assert settings_dict["help_web"] == "https://foo.bar/helpme" - assert settings_dict["enabled_entry_points"] == ["xyz", "abc"] diff --git a/tests/migration/test_20230905_2b672c6fb2b9.py b/tests/migration/test_20230905_2b672c6fb2b9.py deleted file mode 100644 index fa3e94a60..000000000 --- a/tests/migration/test_20230905_2b672c6fb2b9.py +++ /dev/null @@ -1,167 +0,0 @@ -import json -from typing import Any, Dict - -import pytest -from pytest_alembic import MigrationContext -from sqlalchemy.engine import Connection, Engine - -from tests.migration.conftest import CreateLibrary - - -class CreateConfiguration: - def __call__( - self, - connection: Connection, - goal: str, - protocol: str, - name: str, - settings: Dict[str, Any], - ) -> int: - integration_configuration = connection.execute( - "INSERT INTO integration_configurations (goal, protocol, name, settings, self_test_results) VALUES (%s, %s, %s, %s, '{}') returning id", - goal, - protocol, - name, - json.dumps(settings), - ).fetchone() - assert integration_configuration is not None - assert isinstance(integration_configuration.id, int) - return integration_configuration.id - - -@pytest.fixture -def create_integration_configuration() -> CreateConfiguration: - return CreateConfiguration() - - -def fetch_config(connection: Connection, _id: int) -> Dict[str, Any]: - integration_config = connection.execute( - "SELECT settings FROM integration_configurations where id=%s", _id - ).fetchone() - assert integration_config is not None - assert isinstance(integration_config.settings, dict) - return integration_config.settings - - -def fetch_library_config( - connection: Connection, parent_id: int, library_id: int -) -> Dict[str, Any]: - integration_lib_config = connection.execute( - "SELECT parent_id, settings FROM integration_library_configurations where parent_id=%s and library_id=%s", - parent_id, - library_id, - ).fetchone() - assert integration_lib_config is not None - assert isinstance(integration_lib_config.settings, dict) - return integration_lib_config.settings - - -MIGRATION_UID = "2b672c6fb2b9" - - -def test_settings_coersion( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_integration_configuration: CreateConfiguration, -) -> None: - alembic_runner.migrate_down_to(MIGRATION_UID) - alembic_runner.migrate_down_one() - - with alembic_engine.connect() as connection: - config_id = create_integration_configuration( - connection, - "LICENSE_GOAL", - "Axis 360", - "axis-test-1", - dict( - verify_certificate="true", - loan_limit="20", - default_reservation_period="12", - key="value", - ), - ) - - # Test 2 library configs, to the same parent - library_id = create_library(connection) - library_id2 = create_library(connection) - - library_settings = dict( - hold_limit="30", - max_retry_count="2", - ebook_loan_duration="10", - default_loan_duration="11", - unchanged="value", - ) - connection.execute( - "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", - library_id, - config_id, - json.dumps(library_settings), - ) - library_settings = dict( - hold_limit="31", - max_retry_count="3", - ebook_loan_duration="", - default_loan_duration="12", - unchanged="value1", - ) - connection.execute( - "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", - library_id2, - config_id, - json.dumps(library_settings), - ) - - other_config_settings = dict( - verify_certificate="true", - loan_limit="20", - default_reservation_period="12", - key="value", - ) - other_config_id = create_integration_configuration( - connection, "PATRON_AUTH_GOAL", "Other", "other-test", other_config_settings - ) - connection.execute( - "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", - library_id2, - other_config_id, - json.dumps(other_config_settings), - ) - - alembic_runner.migrate_up_one() - - axis_config = fetch_config(connection, config_id) - assert axis_config["verify_certificate"] == True - assert axis_config["loan_limit"] == 20 - assert axis_config["default_reservation_period"] == 12 - # Unknown settings remain as-is - assert axis_config["key"] == "value" - - odl_config = fetch_library_config( - connection, parent_id=config_id, library_id=library_id - ) - assert odl_config["hold_limit"] == 30 - assert odl_config["max_retry_count"] == 2 - assert odl_config["ebook_loan_duration"] == 10 - assert odl_config["default_loan_duration"] == 11 - # Unknown settings remain as-is - assert odl_config["unchanged"] == "value" - - odl_config2 = fetch_library_config( - connection, parent_id=config_id, library_id=library_id2 - ) - assert odl_config2["hold_limit"] == 31 - assert odl_config2["max_retry_count"] == 3 - assert odl_config2["ebook_loan_duration"] is None - assert odl_config2["default_loan_duration"] == 12 - # Unknown settings remain as-is - assert odl_config2["unchanged"] == "value1" - - # Other integration is unchanged - other_config = fetch_config(connection, other_config_id) - assert other_config == other_config_settings - other_library_config = fetch_library_config( - connection, parent_id=other_config_id, library_id=library_id2 - ) - assert other_library_config == other_config_settings diff --git a/tests/migration/test_20231101_2d72d6876c52.py b/tests/migration/test_20231101_2d72d6876c52.py new file mode 100644 index 000000000..56a803898 --- /dev/null +++ b/tests/migration/test_20231101_2d72d6876c52.py @@ -0,0 +1,263 @@ +from typing import Any + +import pytest +from pytest_alembic import MigrationContext +from sqlalchemy import inspect +from sqlalchemy.engine import Connection, Engine +from sqlalchemy.exc import IntegrityError + +from core.model import json_serializer +from tests.migration.conftest import ( + CreateConfigSetting, + CreateExternalIntegration, + CreateLibrary, +) + + +def create_integration_configuration( + connection: Connection, + name: str, + protocol: str, + goal: str, + settings: dict[str, Any] | None = None, +) -> int: + if settings is None: + settings = {} + + settings_str = json_serializer(settings) + + integration_configuration = connection.execute( + "INSERT INTO integration_configurations (name, protocol, goal, settings, self_test_results) " + "VALUES (%s, %s, %s, %s, '{}') returning id", + name, + protocol, + goal, + settings_str, + ).fetchone() + assert integration_configuration is not None + assert isinstance(integration_configuration.id, int) + return integration_configuration.id + + +def create_integration_library_configuration( + connection: Connection, + integration_id: int, + library_id: int, + settings: dict[str, Any] | None = None, +) -> None: + if settings is None: + settings = {} + + settings_str = json_serializer(settings) + connection.execute( + "INSERT INTO integration_library_configurations (parent_id, library_id, settings) " + "VALUES (%s, %s, %s)", + integration_id, + library_id, + settings_str, + ) + + +def create_collection_library( + connection: Connection, collection_id: int, library_id: int +) -> None: + connection.execute( + "INSERT INTO collections_libraries (collection_id, library_id) " + "VALUES (%s, %s)", + collection_id, + library_id, + ) + + +def create_collection( + connection: Connection, + name: str, + integration_configuration_id: int, + external_account_id: str | None = None, + external_integration_id: int | None = None, + parent_id: int | None = None, +) -> int: + collection = connection.execute( + "INSERT INTO collections " + "(name, external_account_id, integration_configuration_id, external_integration_id, parent_id) VALUES " + "(%s, %s, %s, %s, %s) " + "returning id", + name, + external_account_id, + integration_configuration_id, + external_integration_id, + parent_id, + ).fetchone() + assert collection is not None + assert isinstance(collection.id, int) + return collection.id + + +def column_exists(engine: Engine, table_name: str, column_name: str) -> bool: + inspector = inspect(engine) + columns = [column["name"] for column in inspector.get_columns(table_name)] + return column_name in columns + + +def test_migration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("2d72d6876c52") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + # Test setup, create all the data we need for the migration + library_1 = create_library(connection, "library_1") + library_2 = create_library(connection, "library_2") + + integration_1_settings = {"data_source": "integration_1"} + integration_1 = create_integration_configuration( + connection, + "integration_1", + "OPDS Import", + "LICENSE_GOAL", + settings=integration_1_settings, + ) + + integration_2_settings = { + "overdrive_website_id": "2", + "overdrive_client_key": "3", + "overdrive_client_secret": "4", + } + integration_2 = create_integration_configuration( + connection, + "collection_2", + "Overdrive", + "LICENSE_GOAL", + settings=integration_2_settings, + ) + integration_3_settings: dict[str, str] = {} + integration_3 = create_integration_configuration( + connection, + "collection_1", + "Overdrive", + "LICENSE_GOAL", + settings=integration_3_settings, + ) + + external_1 = create_external_integration(connection) + external_2 = create_external_integration(connection) + external_3 = create_external_integration(connection) + + create_config_setting( + connection, "token_auth_endpoint", "http://token.com/auth", external_1 + ) + + collection_1 = create_collection( + connection, "collection_1", integration_1, "http://test.com", external_1 + ) + collection_2 = create_collection( + connection, "collection_2", integration_2, "1", external_2 + ) + collection_3 = create_collection( + connection, "collection_3", integration_3, "5656", external_3, collection_2 + ) + + create_integration_library_configuration(connection, integration_1, library_1) + create_integration_library_configuration(connection, integration_1, library_2) + create_collection_library(connection, collection_1, library_1) + create_collection_library(connection, collection_1, library_2) + + create_integration_library_configuration(connection, integration_2, library_2) + create_collection_library(connection, collection_2, library_2) + + # Test that the collections_libraries table has the correct foreign key constraints + with pytest.raises(IntegrityError) as excinfo: + create_collection_library(connection, 99, 99) + assert "violates foreign key constraint" in str(excinfo.value) + + # Make sure we have the data we expect before we run the migration + integration_1_actual = connection.execute( + "select name, settings from integration_configurations where id = (%s)", + integration_1, + ).fetchone() + assert integration_1_actual is not None + assert integration_1_actual.name == "integration_1" + assert integration_1_actual.settings == integration_1_settings + assert ( + column_exists(alembic_engine, "integration_configurations", "context") + is False + ) + + integration_2_actual = connection.execute( + "select name, settings from integration_configurations where id = (%s)", + integration_2, + ).fetchone() + assert integration_2_actual is not None + assert integration_2_actual.name == "collection_2" + assert integration_2_actual.settings == integration_2_settings + assert ( + column_exists(alembic_engine, "integration_configurations", "context") + is False + ) + + # Run the migration + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as connection: + # Make sure the migration updated the integration name, added the context column, and updated the settings + # column to contain the external_account_id + integration_1_actual = connection.execute( + "select name, settings, context from integration_configurations where id = (%s)", + integration_1, + ).fetchone() + assert integration_1_actual is not None + assert integration_1_actual.name == "collection_1" + assert integration_1_actual.settings != integration_1_settings + assert integration_1_actual.settings == { + "data_source": "integration_1", + "external_account_id": "http://test.com", + } + assert integration_1_actual.context == { + "token_auth_endpoint": "http://token.com/auth" + } + + integration_2_actual = connection.execute( + "select name, settings, context from integration_configurations where id = (%s)", + integration_2, + ).fetchone() + assert integration_2_actual is not None + assert integration_2_actual.name == "collection_2" + assert integration_2_actual.settings != integration_2_settings + assert integration_2_actual.settings == { + "overdrive_website_id": "2", + "overdrive_client_key": "3", + "overdrive_client_secret": "4", + "external_account_id": "1", + } + assert integration_2_actual.context == {} + + integration_3_actual = connection.execute( + "select name, settings, context from integration_configurations where id = (%s)", + integration_3, + ).fetchone() + assert integration_3_actual is not None + assert integration_3_actual.name == "collection_3" + assert integration_3_actual.settings != integration_3_settings + assert integration_3_actual.settings == { + "external_account_id": "5656", + } + assert integration_3_actual.context == {} + + # The foreign key constraints have been removed from the collections_libraries table + create_collection_library(connection, 99, 99) + + # If we try to run the migration, it will fail when it tries to add back the foreign key constraints + with pytest.raises(IntegrityError): + alembic_runner.migrate_down_one() + + # But if we remove the data that violates the foreign key constraints, the migration will run successfully + with alembic_engine.connect() as connection: + connection.execute( + "delete from collections_libraries where collection_id = 99 and library_id = 99" + ) + alembic_runner.migrate_down_one() diff --git a/tests/migration/test_20231121_1e46a5bc33b5.py b/tests/migration/test_20231121_1e46a5bc33b5.py new file mode 100644 index 000000000..f58ab1718 --- /dev/null +++ b/tests/migration/test_20231121_1e46a5bc33b5.py @@ -0,0 +1,181 @@ +import pytest +from pytest_alembic import MigrationContext +from sqlalchemy.engine import Engine + +from api.integration.registry.catalog_services import CatalogServicesRegistry +from core.integration.base import integration_settings_load +from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings +from tests.migration.conftest import ( + CreateConfigSetting, + CreateExternalIntegration, + CreateLibrary, +) + + +def test_migration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("1e46a5bc33b5") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + lib_1_id = create_library(conn, "Test Library 1") + lib_2_id = create_library(conn, "Test Library 2") + ext_id = create_external_integration( + conn, + protocol="MARC Export", + goal="ils_catalog", + name="MARC Export Test", + ) + + create_config_setting( + conn, "marc_update_frequency", "8", ext_id, lib_1_id, associate_library=True + ) + create_config_setting( + conn, + "marc_organization_code", + "org1", + ext_id, + lib_1_id, + associate_library=True, + ) + create_config_setting( + conn, "include_summary", "true", ext_id, lib_1_id, associate_library=True + ) + + create_config_setting( + conn, + "marc_organization_code", + "org2", + ext_id, + lib_2_id, + associate_library=True, + ) + create_config_setting( + conn, + "marc_web_client_url", + "http://web.com", + ext_id, + lib_2_id, + associate_library=True, + ) + create_config_setting( + conn, + "include_simplified_genres", + "true", + ext_id, + lib_2_id, + associate_library=True, + ) + + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as conn: + rows = conn.execute( + "select id, protocol, goal, settings from integration_configurations where name='MARC Export Test'" + ).all() + assert len(rows) == 1 + + integration = rows[0] + + protocol_cls = CatalogServicesRegistry()[integration.protocol] + assert protocol_cls == MARCExporter + settings = integration_settings_load( + protocol_cls.settings_class(), integration.settings + ) + assert isinstance(settings, MarcExporterSettings) + assert settings.update_frequency == 8 + + rows = conn.execute( + "select library_id, settings from integration_library_configurations where parent_id = %s order by library_id", + integration.id, + ).all() + assert len(rows) == 2 + [library_1_integration, library_2_integration] = rows + + assert library_1_integration.library_id == lib_1_id + assert library_2_integration.library_id == lib_2_id + + library_1_settings = integration_settings_load( + protocol_cls.library_settings_class(), library_1_integration.settings + ) + assert isinstance(library_1_settings, MarcExporterLibrarySettings) + assert library_1_settings.organization_code == "org1" + assert library_1_settings.include_summary is True + + library_2_settings = integration_settings_load( + protocol_cls.library_settings_class(), library_2_integration.settings + ) + assert isinstance(library_2_settings, MarcExporterLibrarySettings) + assert library_2_settings.organization_code == "org2" + assert library_2_settings.web_client_url == "http://web.com" + assert library_2_settings.include_genres is True + + +def test_different_update_frequency( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("1e46a5bc33b5") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + lib_1_id = create_library(conn, "Test Library 1") + lib_2_id = create_library(conn, "Test Library 2") + ext_id = create_external_integration( + conn, + protocol="MARC Export", + goal="ils_catalog", + name="MARC Export Test", + ) + + create_config_setting( + conn, "marc_update_frequency", "8", ext_id, lib_1_id, associate_library=True + ) + + create_config_setting( + conn, + "marc_update_frequency", + "12", + ext_id, + lib_2_id, + associate_library=True, + ) + + with pytest.raises(RuntimeError) as excinfo: + alembic_runner.migrate_up_one() + + assert "Found different update frequencies for different libraries (8/12)." in str( + excinfo.value + ) + + +def test_unknown_protocol( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("1e46a5bc33b5") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + ext_id = create_external_integration( + conn, + protocol="unknown", + goal="ils_catalog", + name="MARC Export Test", + ) + + with pytest.raises(RuntimeError) as excinfo: + alembic_runner.migrate_up_one() + + assert "Unknown catalog service" in str(excinfo.value) diff --git a/tests/migration/test_20231124_1c14468b74ce.py b/tests/migration/test_20231124_1c14468b74ce.py new file mode 100644 index 000000000..1e197ead2 --- /dev/null +++ b/tests/migration/test_20231124_1c14468b74ce.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pytest_alembic import MigrationContext + from sqlalchemy.engine import Engine + + from tests.migration.conftest import ( + CreateCollection, + CreateEdition, + CreateIdentifier, + CreateIntegrationConfiguration, + CreateLicensePool, + ) + +MIGRATION_UID = "1c14468b74ce" + + +def test_migration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_collection: CreateCollection, + create_integration_configuration: CreateIntegrationConfiguration, + create_edition: CreateEdition, + create_identifier: CreateIdentifier, + create_license_pool: CreateLicensePool, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_UID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + ic_id_incorrect_protocol = create_integration_configuration( + connection, + "configuration_badprotocol", + "OPDS 1.0", + "LICENSE_GOAL", + {}, + ) + collection_id_incorrect_protocol = create_collection( + connection, + integration_configuration_id=ic_id_incorrect_protocol, + ) + + ic_id1 = create_integration_configuration( + connection, "configuration1", "OPDS for Distributors", "LICENSE_GOAL", {} + ) + collection_id = create_collection( + connection, integration_configuration_id=ic_id1 + ) + + identifier_id1 = create_identifier(connection, "identifier-1", "type") + edition_id1 = create_edition(connection, "title", "Audio", identifier_id1) + lp1_id = create_license_pool( + connection, + collection_id, + identifier_id=identifier_id1, + should_track_playtime=False, + ) + + # Should not update because of incorrect medium + identifier_id2 = create_identifier(connection, "identifier-2", "type") + edition_id2 = create_edition(connection, "title", "Book", identifier_id2) + lp2_id = create_license_pool( + connection, + collection_id, + identifier_id=identifier_id2, + should_track_playtime=False, + ) + + # Should not update because of incorrect collection protocol + lp3_id = create_license_pool( + connection, + collection_id_incorrect_protocol, + identifier_id=identifier_id1, + should_track_playtime=False, + ) + + # Should update this one as well + identifier_id3 = create_identifier(connection, "identifier-3", "other-type") + edition_id3 = create_edition(connection, "title-1", "Audio", identifier_id3) + lp4_id = create_license_pool( + connection, + collection_id, + identifier_id=identifier_id3, + should_track_playtime=False, + ) + + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as connection: + should_track = connection.execute( + "select should_track_playtime from licensepools order by id" + ).all() + assert should_track == [(True,), (False,), (False,), (True,)] diff --git a/tests/migration/test_20231206_e06f965879ab.py b/tests/migration/test_20231206_e06f965879ab.py new file mode 100644 index 000000000..5f0aeee5b --- /dev/null +++ b/tests/migration/test_20231206_e06f965879ab.py @@ -0,0 +1,223 @@ +from unittest.mock import MagicMock, call + +import pytest +from _pytest.logging import LogCaptureFixture +from pytest_alembic import MigrationContext +from sqlalchemy import inspect +from sqlalchemy.engine import Connection, Engine + +from core.service.container import container_instance +from core.service.storage.s3 import S3Service +from tests.migration.conftest import ( + CreateCoverageRecord, + CreateIdentifier, + CreateLane, + CreateLibrary, +) + + +class CreateCachedMarcFile: + def __call__( + self, + connection: Connection, + url: str | None, + library_id: int | None = None, + lane_id: int | None = None, + ) -> tuple[int, int]: + if library_id is None: + library_id = self.create_library(connection) + + if lane_id is None: + lane_id = self.create_lane(connection, library_id) + + representation_id = self.representation(connection, url) + + row = connection.execute( + "INSERT INTO cachedmarcfiles (representation_id, start_time, end_time, lane_id, library_id) " + "VALUES (%s, %s, %s, %s, %s) returning id", + (representation_id, "2021-01-01", "2021-01-02", library_id, lane_id), + ).first() + assert row is not None + file_id = row.id + + return representation_id, file_id + + def representation(self, connection: Connection, url: str | None) -> int: + row = connection.execute( + "INSERT INTO representations (media_type, url) " + "VALUES ('application/marc', %s) returning id", + url, + ).first() + assert row is not None + assert isinstance(row.id, int) + return row.id + + def __init__( + self, + create_library: CreateLibrary, + create_lane: CreateLane, + ) -> None: + self.create_library = create_library + self.create_lane = create_lane + + +@pytest.fixture +def create_cachedmarcfile( + create_library: CreateLibrary, + create_lane: CreateLane, + create_identifier: CreateIdentifier, +) -> CreateCachedMarcFile: + return CreateCachedMarcFile(create_library, create_lane) + + +MIGRATION_ID = "e06f965879ab" + + +def test_migration_no_s3_integration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_cachedmarcfile: CreateCachedMarcFile, + caplog: LogCaptureFixture, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + container = container_instance() + with container.storage.public.override(None): + # If there is no public s3 integration, and no cachedmarcfiles in the database, the migration should succeed + alembic_runner.migrate_up_one() + + alembic_runner.migrate_down_one() + # If there is no public s3 integration, but there are cachedmarcfiles in the database, the migration should fail + with alembic_engine.connect() as connection: + create_cachedmarcfile(connection, "http://s3.amazonaws.com/test-bucket/1.mrc") + + with pytest.raises(RuntimeError) as excinfo, container.storage.public.override( + None + ): + alembic_runner.migrate_up_one() + + assert ( + "There are cachedmarcfiles in the database, but no public s3 storage configured!" + in str(excinfo.value) + ) + + +def test_migration_bucket_url_different( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_cachedmarcfile: CreateCachedMarcFile, + caplog: LogCaptureFixture, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + container = container_instance() + mock_storage = MagicMock(spec=S3Service) + + # If the generated URL doesn't match the original URL, the migration should fail + mock_storage.bucket = "test-bucket" + mock_storage.generate_url.return_value = ( + "http://s3.amazonaws.com/test-bucket/different-url.mrc" + ) + + with alembic_engine.connect() as connection: + create_cachedmarcfile(connection, "http://s3.amazonaws.com/test-bucket/1.mrc") + + with pytest.raises(RuntimeError) as excinfo, container.storage.public.override( + mock_storage + ): + alembic_runner.migrate_up_one() + + assert "URL mismatch" in str(excinfo.value) + + +def test_migration_success( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_lane: CreateLane, + caplog: LogCaptureFixture, + create_cachedmarcfile: CreateCachedMarcFile, + create_coverage_record: CreateCoverageRecord, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + library_id = create_library(connection, "test-library") + lane_id = create_lane(connection, library_id, "test-lane") + + url1 = "http://s3.amazonaws.com/test-bucket/1.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=url1, + ) + url2 = "http://test-bucket.us-west-2.s3.amazonaws.com/2.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=url2, + ) + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=None, + ) + url3 = "https://test-bucket.s3.us-west-2.amazonaws.com/test-1/2023-02-17%2006%3A38%3A01.837167%2B00%3A00-2023-03-21%2005%3A41%3A28.262257%2B00%3A00/Fiction.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=url3, + ) + unrelated_representation = create_cachedmarcfile.representation( + connection, "http://s3.amazonaws.com/test-bucket/4.mrc" + ) + + create_coverage_record(connection, "generate-marc") + unrelated_coverage_record = create_coverage_record(connection) + + mock_storage = MagicMock(spec=S3Service) + mock_storage.bucket = "test-bucket" + mock_storage.generate_url.side_effect = [url1, url2, url3] + + container = container_instance() + with container.storage.public.override(mock_storage): + alembic_runner.migrate_up_one() + + # We should have checked that the generated url is the same and deleted the files from s3 + assert mock_storage.generate_url.call_count == 3 + assert mock_storage.delete.call_count == 3 + assert mock_storage.delete.call_args_list == [ + call("1.mrc"), + call("2.mrc"), + call( + "test-1/2023-02-17 06:38:01.837167+00:00-2023-03-21 05:41:28.262257+00:00/Fiction.mrc" + ), + ] + + # But the representations and coveragerecords should still be there + with alembic_engine.connect() as connection: + assert connection.execute("SELECT id FROM representations").rowcount == 5 + assert connection.execute("SELECT id FROM coveragerecords").rowcount == 2 + + # The next migration takes care of those + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as connection: + # The representation and coveragerecord that were not associated should still be there + assert connection.execute("SELECT id FROM representations").fetchall() == [ + (unrelated_representation,) + ] + assert connection.execute("SELECT id FROM coveragerecords").fetchall() == [ + (unrelated_coverage_record,) + ] + + # Cachedmarcfiles should be gone + inspector = inspect(connection) + assert inspector.has_table("cachedmarcfiles") is False diff --git a/tests/mocks/search.py b/tests/mocks/search.py index 194ddb557..eebf8f399 100644 --- a/tests/mocks/search.py +++ b/tests/mocks/search.py @@ -1,7 +1,7 @@ from __future__ import annotations +from collections.abc import Iterable from enum import Enum -from typing import Dict, Iterable, List, Optional from unittest.mock import MagicMock from opensearch_dsl import MultiSearch, Search @@ -32,29 +32,29 @@ class SearchServiceFailureMode(Enum): class SearchServiceFake(SearchService): """A search service that doesn't speak to a real service.""" - _documents_by_index: Dict[str, List[dict]] + _documents_by_index: dict[str, list[dict]] _failing: SearchServiceFailureMode _search_client: Search _multi_search_client: MultiSearch - _indexes_created: List[str] - _document_submission_attempts: List[dict] + _indexes_created: list[str] + _document_submission_attempts: list[dict] def __init__(self): self.base_name = "test_index" self._failing = SearchServiceFailureMode.NOT_FAILING self._documents_by_index = {} - self._read_pointer: Optional[str] = None - self._write_pointer: Optional[SearchWritePointer] = None + self._read_pointer: str | None = None + self._write_pointer: SearchWritePointer | None = None self._search_client = Search(using=MagicMock()) self._multi_search_client = MultiSearch(using=MagicMock()) self._indexes_created = [] self._document_submission_attempts = [] @property - def document_submission_attempts(self) -> List[dict]: + def document_submission_attempts(self) -> list[dict]: return self._document_submission_attempts - def indexes_created(self) -> List[str]: + def indexes_created(self) -> list[str]: return self._indexes_created def _fail_if_necessary(self): @@ -64,17 +64,17 @@ def _fail_if_necessary(self): def set_failing_mode(self, mode: SearchServiceFailureMode): self._failing = mode - def documents_for_index(self, index_name: str) -> List[dict]: + def documents_for_index(self, index_name: str) -> list[dict]: self._fail_if_necessary() if not (index_name in self._documents_by_index): return [] return self._documents_by_index[index_name] - def documents_all(self) -> List[dict]: + def documents_all(self) -> list[dict]: self._fail_if_necessary() - results: List[dict] = [] + results: list[dict] = [] for documents in self._documents_by_index.values(): for document in documents: results.append(document) @@ -93,11 +93,11 @@ def write_pointer_name(self) -> str: self._fail_if_necessary() return f"{self.base_name}-search-write" - def read_pointer(self) -> Optional[str]: + def read_pointer(self) -> str | None: self._fail_if_necessary() return self._read_pointer - def write_pointer(self) -> Optional[SearchWritePointer]: + def write_pointer(self) -> SearchWritePointer | None: self._fail_if_necessary() return self._write_pointer @@ -131,7 +131,7 @@ def index_set_mapping(self, revision: SearchSchemaRevision) -> None: def index_submit_documents( self, pointer: str, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: self._fail_if_necessary() _should_fail = False @@ -145,7 +145,7 @@ def index_submit_documents( ) if _should_fail: - results: List[SearchServiceFailedDocument] = [] + results: list[SearchServiceFailedDocument] = [] for document in documents: self._document_submission_attempts.append(document) if self._failing == SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS: @@ -208,7 +208,7 @@ def is_pointer_empty(*args): return False -def fake_hits(works: List[Work]): +def fake_hits(works: list[Work]): return [ Hit( { @@ -237,14 +237,14 @@ def __init__( _db, url, test_search_term, revision_directory, version, SearchServiceFake() ) - self._mock_multi_works: List[Dict] = [] + self._mock_multi_works: list[dict] = [] self._mock_count_works = 0 - self._queries: List[tuple] = [] + self._queries: list[tuple] = [] - def mock_query_works(self, works: List[Work]): + def mock_query_works(self, works: list[Work]): self.mock_query_works_multi(works) - def mock_query_works_multi(self, works: List[Work], *args: List[Work]): + def mock_query_works_multi(self, works: list[Work], *args: list[Work]): self._mock_multi_works = [fake_hits(works)] self._mock_multi_works.extend([fake_hits(arg_works) for arg_works in args]) diff --git a/tox.ini b/tox.ini index 34ffa9983..b95fd8cbe 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{38,39,310,311}-{api,core}-docker +envlist = py{310,311}-{api,core}-docker skipsdist = true [testenv] @@ -76,8 +76,6 @@ ports = [gh-actions] python = - 3.8: py38 - 3.9: py39 3.10: py310 3.11: py311